mirror of
https://github.com/MODSetter/SurfSense.git
synced 2025-09-04 11:39:19 +00:00
add coderabbit suggestions
This commit is contained in:
parent
756a429159
commit
b4d29ba3a0
6 changed files with 839 additions and 497 deletions
1
node_modules/.cache/prettier/.prettier-caches/a2ecb2962bf19c1099cfe708e42daa0097f94976.json
generated
vendored
Normal file
1
node_modules/.cache/prettier/.prettier-caches/a2ecb2962bf19c1099cfe708e42daa0097f94976.json
generated
vendored
Normal file
|
@ -0,0 +1 @@
|
||||||
|
{"2d0ec64d93969318101ee479b664221b32241665":{"files":{"surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/page.tsx":["EHKKvlOK0vfy0GgHwlG/J2Bx5rw=",true]},"modified":1753426633288}}
|
File diff suppressed because it is too large
Load diff
|
@ -6,7 +6,6 @@ Allows fetching issue lists and their comments, projects and more.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import base64
|
import base64
|
||||||
import json
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
@ -119,8 +118,6 @@ class JiraConnector:
|
||||||
|
|
||||||
response = requests.get(url, headers=headers, params=params, timeout=500)
|
response = requests.get(url, headers=headers, params=params, timeout=500)
|
||||||
|
|
||||||
print(json.dumps(response.json(), indent=2))
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
return response.json()
|
return response.json()
|
||||||
else:
|
else:
|
||||||
|
@ -227,6 +224,7 @@ class JiraConnector:
|
||||||
date_filter = (
|
date_filter = (
|
||||||
f"(createdDate >= '{start_date}' AND createdDate <= '{end_date}')"
|
f"(createdDate >= '{start_date}' AND createdDate <= '{end_date}')"
|
||||||
)
|
)
|
||||||
|
# TODO : This JQL needs some improvement to work as expected
|
||||||
|
|
||||||
jql = f"{date_filter}"
|
jql = f"{date_filter}"
|
||||||
if project_key:
|
if project_key:
|
||||||
|
@ -252,7 +250,7 @@ class JiraConnector:
|
||||||
fields.append("comment")
|
fields.append("comment")
|
||||||
|
|
||||||
params = {
|
params = {
|
||||||
"jql": "",
|
"jql": "", # TODO : Add a JQL query to filter from a date range
|
||||||
"fields": ",".join(fields),
|
"fields": ",".join(fields),
|
||||||
"maxResults": 100,
|
"maxResults": 100,
|
||||||
"startAt": 0,
|
"startAt": 0,
|
||||||
|
@ -263,10 +261,8 @@ class JiraConnector:
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
params["startAt"] = start_at
|
params["startAt"] = start_at
|
||||||
print(json.dumps(params, indent=2))
|
|
||||||
result = self.make_api_request("search", params)
|
|
||||||
|
|
||||||
print(json.dumps(result, indent=2))
|
result = self.make_api_request("search", params)
|
||||||
|
|
||||||
if not isinstance(result, dict) or "issues" not in result:
|
if not isinstance(result, dict) or "issues" not in result:
|
||||||
return [], "Invalid response from Jira API"
|
return [], "Invalid response from Jira API"
|
||||||
|
|
|
@ -9,35 +9,58 @@ POST /search-source-connectors/{connector_id}/index - Index content from a conne
|
||||||
|
|
||||||
Note: Each user can have only one connector of each type (SERPER_API, TAVILY_API, SLACK_CONNECTOR, NOTION_CONNECTOR, GITHUB_CONNECTOR, LINEAR_CONNECTOR, DISCORD_CONNECTOR).
|
Note: Each user can have only one connector of each type (SERPER_API, TAVILY_API, SLACK_CONNECTOR, NOTION_CONNECTOR, GITHUB_CONNECTOR, LINEAR_CONNECTOR, DISCORD_CONNECTOR).
|
||||||
"""
|
"""
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, BackgroundTasks, Body
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
import logging
|
||||||
from sqlalchemy.future import select
|
from datetime import datetime, timedelta
|
||||||
from sqlalchemy.exc import IntegrityError
|
from typing import Any, Dict, List
|
||||||
from typing import List, Dict, Any
|
|
||||||
from app.db import get_async_session, User, SearchSourceConnector, SearchSourceConnectorType, SearchSpace, async_session_maker
|
from app.connectors.github_connector import GitHubConnector
|
||||||
from app.schemas import SearchSourceConnectorCreate, SearchSourceConnectorUpdate, SearchSourceConnectorRead, SearchSourceConnectorBase
|
from app.db import (
|
||||||
|
SearchSourceConnector,
|
||||||
|
SearchSourceConnectorType,
|
||||||
|
SearchSpace,
|
||||||
|
User,
|
||||||
|
async_session_maker,
|
||||||
|
get_async_session,
|
||||||
|
)
|
||||||
|
from app.schemas import (
|
||||||
|
SearchSourceConnectorBase,
|
||||||
|
SearchSourceConnectorCreate,
|
||||||
|
SearchSourceConnectorRead,
|
||||||
|
SearchSourceConnectorUpdate,
|
||||||
|
)
|
||||||
|
from app.tasks.connectors_indexing_tasks import (
|
||||||
|
index_discord_messages,
|
||||||
|
index_github_repos,
|
||||||
|
index_jira_issues,
|
||||||
|
index_linear_issues,
|
||||||
|
index_notion_pages,
|
||||||
|
index_slack_messages,
|
||||||
|
)
|
||||||
from app.users import current_active_user
|
from app.users import current_active_user
|
||||||
from app.utils.check_ownership import check_ownership
|
from app.utils.check_ownership import check_ownership
|
||||||
|
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, Query
|
||||||
from pydantic import BaseModel, Field, ValidationError
|
from pydantic import BaseModel, Field, ValidationError
|
||||||
from app.tasks.connectors_indexing_tasks import index_slack_messages, index_notion_pages, index_github_repos, index_linear_issues, index_discord_messages, index_jira_issues
|
from sqlalchemy.exc import IntegrityError
|
||||||
from app.connectors.github_connector import GitHubConnector
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
from datetime import datetime, timedelta
|
from sqlalchemy.future import select
|
||||||
import logging
|
|
||||||
|
|
||||||
# Set up logging
|
# Set up logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
# Use Pydantic's BaseModel here
|
# Use Pydantic's BaseModel here
|
||||||
class GitHubPATRequest(BaseModel):
|
class GitHubPATRequest(BaseModel):
|
||||||
github_pat: str = Field(..., description="GitHub Personal Access Token")
|
github_pat: str = Field(..., description="GitHub Personal Access Token")
|
||||||
|
|
||||||
|
|
||||||
# --- New Endpoint to list GitHub Repositories ---
|
# --- New Endpoint to list GitHub Repositories ---
|
||||||
@router.post("/github/repositories/", response_model=List[Dict[str, Any]])
|
@router.post("/github/repositories/", response_model=List[Dict[str, Any]])
|
||||||
async def list_github_repositories(
|
async def list_github_repositories(
|
||||||
pat_request: GitHubPATRequest,
|
pat_request: GitHubPATRequest,
|
||||||
user: User = Depends(current_active_user) # Ensure the user is logged in
|
user: User = Depends(current_active_user), # Ensure the user is logged in
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Fetches a list of repositories accessible by the provided GitHub PAT.
|
Fetches a list of repositories accessible by the provided GitHub PAT.
|
||||||
|
@ -54,35 +77,39 @@ async def list_github_repositories(
|
||||||
logger.error(f"GitHub PAT validation failed for user {user.id}: {str(e)}")
|
logger.error(f"GitHub PAT validation failed for user {user.id}: {str(e)}")
|
||||||
raise HTTPException(status_code=400, detail=f"Invalid GitHub PAT: {str(e)}")
|
raise HTTPException(status_code=400, detail=f"Invalid GitHub PAT: {str(e)}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to fetch GitHub repositories for user {user.id}: {str(e)}")
|
logger.error(
|
||||||
raise HTTPException(status_code=500, detail="Failed to fetch GitHub repositories.")
|
f"Failed to fetch GitHub repositories for user {user.id}: {str(e)}"
|
||||||
|
)
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500, detail="Failed to fetch GitHub repositories."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.post("/search-source-connectors/", response_model=SearchSourceConnectorRead)
|
@router.post("/search-source-connectors/", response_model=SearchSourceConnectorRead)
|
||||||
async def create_search_source_connector(
|
async def create_search_source_connector(
|
||||||
connector: SearchSourceConnectorCreate,
|
connector: SearchSourceConnectorCreate,
|
||||||
session: AsyncSession = Depends(get_async_session),
|
session: AsyncSession = Depends(get_async_session),
|
||||||
user: User = Depends(current_active_user)
|
user: User = Depends(current_active_user),
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Create a new search source connector.
|
Create a new search source connector.
|
||||||
|
|
||||||
Each user can have only one connector of each type (SERPER_API, TAVILY_API, SLACK_CONNECTOR, etc.).
|
Each user can have only one connector of each type (SERPER_API, TAVILY_API, SLACK_CONNECTOR, etc.).
|
||||||
The config must contain the appropriate keys for the connector type.
|
The config must contain the appropriate keys for the connector type.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# Check if a connector with the same type already exists for this user
|
# Check if a connector with the same type already exists for this user
|
||||||
result = await session.execute(
|
result = await session.execute(
|
||||||
select(SearchSourceConnector)
|
select(SearchSourceConnector).filter(
|
||||||
.filter(
|
|
||||||
SearchSourceConnector.user_id == user.id,
|
SearchSourceConnector.user_id == user.id,
|
||||||
SearchSourceConnector.connector_type == connector.connector_type
|
SearchSourceConnector.connector_type == connector.connector_type,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
existing_connector = result.scalars().first()
|
existing_connector = result.scalars().first()
|
||||||
if existing_connector:
|
if existing_connector:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=409,
|
status_code=409,
|
||||||
detail=f"A connector with type {connector.connector_type} already exists. Each user can have only one connector of each type."
|
detail=f"A connector with type {connector.connector_type} already exists. Each user can have only one connector of each type.",
|
||||||
)
|
)
|
||||||
db_connector = SearchSourceConnector(**connector.model_dump(), user_id=user.id)
|
db_connector = SearchSourceConnector(**connector.model_dump(), user_id=user.id)
|
||||||
session.add(db_connector)
|
session.add(db_connector)
|
||||||
|
@ -91,15 +118,12 @@ async def create_search_source_connector(
|
||||||
return db_connector
|
return db_connector
|
||||||
except ValidationError as e:
|
except ValidationError as e:
|
||||||
await session.rollback()
|
await session.rollback()
|
||||||
raise HTTPException(
|
raise HTTPException(status_code=422, detail=f"Validation error: {str(e)}")
|
||||||
status_code=422,
|
|
||||||
detail=f"Validation error: {str(e)}"
|
|
||||||
)
|
|
||||||
except IntegrityError as e:
|
except IntegrityError as e:
|
||||||
await session.rollback()
|
await session.rollback()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=409,
|
status_code=409,
|
||||||
detail=f"Integrity error: A connector with this type already exists. {str(e)}"
|
detail=f"Integrity error: A connector with this type already exists. {str(e)}",
|
||||||
)
|
)
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
await session.rollback()
|
await session.rollback()
|
||||||
|
@ -109,38 +133,44 @@ async def create_search_source_connector(
|
||||||
await session.rollback()
|
await session.rollback()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=500,
|
status_code=500,
|
||||||
detail=f"Failed to create search source connector: {str(e)}"
|
detail=f"Failed to create search source connector: {str(e)}",
|
||||||
)
|
)
|
||||||
|
|
||||||
@router.get("/search-source-connectors/", response_model=List[SearchSourceConnectorRead])
|
|
||||||
|
@router.get(
|
||||||
|
"/search-source-connectors/", response_model=List[SearchSourceConnectorRead]
|
||||||
|
)
|
||||||
async def read_search_source_connectors(
|
async def read_search_source_connectors(
|
||||||
skip: int = 0,
|
skip: int = 0,
|
||||||
limit: int = 100,
|
limit: int = 100,
|
||||||
search_space_id: int = None,
|
search_space_id: int = None,
|
||||||
session: AsyncSession = Depends(get_async_session),
|
session: AsyncSession = Depends(get_async_session),
|
||||||
user: User = Depends(current_active_user)
|
user: User = Depends(current_active_user),
|
||||||
):
|
):
|
||||||
"""List all search source connectors for the current user."""
|
"""List all search source connectors for the current user."""
|
||||||
try:
|
try:
|
||||||
query = select(SearchSourceConnector).filter(SearchSourceConnector.user_id == user.id)
|
query = select(SearchSourceConnector).filter(
|
||||||
|
SearchSourceConnector.user_id == user.id
|
||||||
# No need to filter by search_space_id as connectors are user-owned, not search space specific
|
|
||||||
|
|
||||||
result = await session.execute(
|
|
||||||
query.offset(skip).limit(limit)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# No need to filter by search_space_id as connectors are user-owned, not search space specific
|
||||||
|
|
||||||
|
result = await session.execute(query.offset(skip).limit(limit))
|
||||||
return result.scalars().all()
|
return result.scalars().all()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=500,
|
status_code=500,
|
||||||
detail=f"Failed to fetch search source connectors: {str(e)}"
|
detail=f"Failed to fetch search source connectors: {str(e)}",
|
||||||
)
|
)
|
||||||
|
|
||||||
@router.get("/search-source-connectors/{connector_id}", response_model=SearchSourceConnectorRead)
|
|
||||||
|
@router.get(
|
||||||
|
"/search-source-connectors/{connector_id}", response_model=SearchSourceConnectorRead
|
||||||
|
)
|
||||||
async def read_search_source_connector(
|
async def read_search_source_connector(
|
||||||
connector_id: int,
|
connector_id: int,
|
||||||
session: AsyncSession = Depends(get_async_session),
|
session: AsyncSession = Depends(get_async_session),
|
||||||
user: User = Depends(current_active_user)
|
user: User = Depends(current_active_user),
|
||||||
):
|
):
|
||||||
"""Get a specific search source connector by ID."""
|
"""Get a specific search source connector by ID."""
|
||||||
try:
|
try:
|
||||||
|
@ -149,31 +179,37 @@ async def read_search_source_connector(
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=500,
|
status_code=500, detail=f"Failed to fetch search source connector: {str(e)}"
|
||||||
detail=f"Failed to fetch search source connector: {str(e)}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@router.put("/search-source-connectors/{connector_id}", response_model=SearchSourceConnectorRead)
|
|
||||||
|
@router.put(
|
||||||
|
"/search-source-connectors/{connector_id}", response_model=SearchSourceConnectorRead
|
||||||
|
)
|
||||||
async def update_search_source_connector(
|
async def update_search_source_connector(
|
||||||
connector_id: int,
|
connector_id: int,
|
||||||
connector_update: SearchSourceConnectorUpdate,
|
connector_update: SearchSourceConnectorUpdate,
|
||||||
session: AsyncSession = Depends(get_async_session),
|
session: AsyncSession = Depends(get_async_session),
|
||||||
user: User = Depends(current_active_user)
|
user: User = Depends(current_active_user),
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Update a search source connector.
|
Update a search source connector.
|
||||||
Handles partial updates, including merging changes into the 'config' field.
|
Handles partial updates, including merging changes into the 'config' field.
|
||||||
"""
|
"""
|
||||||
db_connector = await check_ownership(session, SearchSourceConnector, connector_id, user)
|
db_connector = await check_ownership(
|
||||||
|
session, SearchSourceConnector, connector_id, user
|
||||||
|
)
|
||||||
|
|
||||||
# Convert the sparse update data (only fields present in request) to a dict
|
# Convert the sparse update data (only fields present in request) to a dict
|
||||||
update_data = connector_update.model_dump(exclude_unset=True)
|
update_data = connector_update.model_dump(exclude_unset=True)
|
||||||
|
|
||||||
# Special handling for 'config' field
|
# Special handling for 'config' field
|
||||||
if "config" in update_data:
|
if "config" in update_data:
|
||||||
incoming_config = update_data["config"] # Config data from the request
|
incoming_config = update_data["config"] # Config data from the request
|
||||||
existing_config = db_connector.config if db_connector.config else {} # Current config from DB
|
existing_config = (
|
||||||
|
db_connector.config if db_connector.config else {}
|
||||||
|
) # Current config from DB
|
||||||
|
|
||||||
# Merge incoming config into existing config
|
# Merge incoming config into existing config
|
||||||
# This preserves existing keys (like GITHUB_PAT) if they are not in the incoming data
|
# This preserves existing keys (like GITHUB_PAT) if they are not in the incoming data
|
||||||
merged_config = existing_config.copy()
|
merged_config = existing_config.copy()
|
||||||
|
@ -182,26 +218,29 @@ async def update_search_source_connector(
|
||||||
# -- Validation after merging --
|
# -- Validation after merging --
|
||||||
# Validate the *merged* config based on the connector type
|
# Validate the *merged* config based on the connector type
|
||||||
# We need the connector type - use the one from the update if provided, else the existing one
|
# We need the connector type - use the one from the update if provided, else the existing one
|
||||||
current_connector_type = connector_update.connector_type if connector_update.connector_type is not None else db_connector.connector_type
|
current_connector_type = (
|
||||||
|
connector_update.connector_type
|
||||||
|
if connector_update.connector_type is not None
|
||||||
|
else db_connector.connector_type
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# We can reuse the base validator by creating a temporary base model instance
|
# We can reuse the base validator by creating a temporary base model instance
|
||||||
# Note: This assumes 'name' and 'is_indexable' are not crucial for config validation itself
|
# Note: This assumes 'name' and 'is_indexable' are not crucial for config validation itself
|
||||||
temp_data_for_validation = {
|
temp_data_for_validation = {
|
||||||
"name": db_connector.name, # Use existing name
|
"name": db_connector.name, # Use existing name
|
||||||
"connector_type": current_connector_type,
|
"connector_type": current_connector_type,
|
||||||
"is_indexable": db_connector.is_indexable, # Use existing value
|
"is_indexable": db_connector.is_indexable, # Use existing value
|
||||||
"last_indexed_at": db_connector.last_indexed_at, # Not used by validator
|
"last_indexed_at": db_connector.last_indexed_at, # Not used by validator
|
||||||
"config": merged_config
|
"config": merged_config,
|
||||||
}
|
}
|
||||||
SearchSourceConnectorBase.model_validate(temp_data_for_validation)
|
SearchSourceConnectorBase.model_validate(temp_data_for_validation)
|
||||||
except ValidationError as e:
|
except ValidationError as e:
|
||||||
# Raise specific validation error for the merged config
|
# Raise specific validation error for the merged config
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=422,
|
status_code=422, detail=f"Validation error for merged config: {str(e)}"
|
||||||
detail=f"Validation error for merged config: {str(e)}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# If validation passes, update the main update_data dict with the merged config
|
# If validation passes, update the main update_data dict with the merged config
|
||||||
update_data["config"] = merged_config
|
update_data["config"] = merged_config
|
||||||
|
|
||||||
|
@ -210,20 +249,19 @@ async def update_search_source_connector(
|
||||||
# Prevent changing connector_type if it causes a duplicate (check moved here)
|
# Prevent changing connector_type if it causes a duplicate (check moved here)
|
||||||
if key == "connector_type" and value != db_connector.connector_type:
|
if key == "connector_type" and value != db_connector.connector_type:
|
||||||
result = await session.execute(
|
result = await session.execute(
|
||||||
select(SearchSourceConnector)
|
select(SearchSourceConnector).filter(
|
||||||
.filter(
|
|
||||||
SearchSourceConnector.user_id == user.id,
|
SearchSourceConnector.user_id == user.id,
|
||||||
SearchSourceConnector.connector_type == value,
|
SearchSourceConnector.connector_type == value,
|
||||||
SearchSourceConnector.id != connector_id
|
SearchSourceConnector.id != connector_id,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
existing_connector = result.scalars().first()
|
existing_connector = result.scalars().first()
|
||||||
if existing_connector:
|
if existing_connector:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=409,
|
status_code=409,
|
||||||
detail=f"A connector with type {value} already exists. Each user can have only one connector of each type."
|
detail=f"A connector with type {value} already exists. Each user can have only one connector of each type.",
|
||||||
)
|
)
|
||||||
|
|
||||||
setattr(db_connector, key, value)
|
setattr(db_connector, key, value)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -234,26 +272,31 @@ async def update_search_source_connector(
|
||||||
await session.rollback()
|
await session.rollback()
|
||||||
# This might occur if connector_type constraint is violated somehow after the check
|
# This might occur if connector_type constraint is violated somehow after the check
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=409,
|
status_code=409, detail=f"Database integrity error during update: {str(e)}"
|
||||||
detail=f"Database integrity error during update: {str(e)}"
|
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
await session.rollback()
|
await session.rollback()
|
||||||
logger.error(f"Failed to update search source connector {connector_id}: {e}", exc_info=True)
|
logger.error(
|
||||||
|
f"Failed to update search source connector {connector_id}: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=500,
|
status_code=500,
|
||||||
detail=f"Failed to update search source connector: {str(e)}"
|
detail=f"Failed to update search source connector: {str(e)}",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@router.delete("/search-source-connectors/{connector_id}", response_model=dict)
|
@router.delete("/search-source-connectors/{connector_id}", response_model=dict)
|
||||||
async def delete_search_source_connector(
|
async def delete_search_source_connector(
|
||||||
connector_id: int,
|
connector_id: int,
|
||||||
session: AsyncSession = Depends(get_async_session),
|
session: AsyncSession = Depends(get_async_session),
|
||||||
user: User = Depends(current_active_user)
|
user: User = Depends(current_active_user),
|
||||||
):
|
):
|
||||||
"""Delete a search source connector."""
|
"""Delete a search source connector."""
|
||||||
try:
|
try:
|
||||||
db_connector = await check_ownership(session, SearchSourceConnector, connector_id, user)
|
db_connector = await check_ownership(
|
||||||
|
session, SearchSourceConnector, connector_id, user
|
||||||
|
)
|
||||||
await session.delete(db_connector)
|
await session.delete(db_connector)
|
||||||
await session.commit()
|
await session.commit()
|
||||||
return {"message": "Search source connector deleted successfully"}
|
return {"message": "Search source connector deleted successfully"}
|
||||||
|
@ -263,22 +306,33 @@ async def delete_search_source_connector(
|
||||||
await session.rollback()
|
await session.rollback()
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=500,
|
status_code=500,
|
||||||
detail=f"Failed to delete search source connector: {str(e)}"
|
detail=f"Failed to delete search source connector: {str(e)}",
|
||||||
)
|
)
|
||||||
|
|
||||||
@router.post("/search-source-connectors/{connector_id}/index", response_model=Dict[str, Any])
|
|
||||||
|
@router.post(
|
||||||
|
"/search-source-connectors/{connector_id}/index", response_model=Dict[str, Any]
|
||||||
|
)
|
||||||
async def index_connector_content(
|
async def index_connector_content(
|
||||||
connector_id: int,
|
connector_id: int,
|
||||||
search_space_id: int = Query(..., description="ID of the search space to store indexed content"),
|
search_space_id: int = Query(
|
||||||
start_date: str = Query(None, description="Start date for indexing (YYYY-MM-DD format). If not provided, uses last_indexed_at or defaults to 365 days ago"),
|
..., description="ID of the search space to store indexed content"
|
||||||
end_date: str = Query(None, description="End date for indexing (YYYY-MM-DD format). If not provided, uses today's date"),
|
),
|
||||||
|
start_date: str = Query(
|
||||||
|
None,
|
||||||
|
description="Start date for indexing (YYYY-MM-DD format). If not provided, uses last_indexed_at or defaults to 365 days ago",
|
||||||
|
),
|
||||||
|
end_date: str = Query(
|
||||||
|
None,
|
||||||
|
description="End date for indexing (YYYY-MM-DD format). If not provided, uses today's date",
|
||||||
|
),
|
||||||
session: AsyncSession = Depends(get_async_session),
|
session: AsyncSession = Depends(get_async_session),
|
||||||
user: User = Depends(current_active_user),
|
user: User = Depends(current_active_user),
|
||||||
background_tasks: BackgroundTasks = None
|
background_tasks: BackgroundTasks = None,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Index content from a connector to a search space.
|
Index content from a connector to a search space.
|
||||||
|
|
||||||
Currently supports:
|
Currently supports:
|
||||||
- SLACK_CONNECTOR: Indexes messages from all accessible Slack channels
|
- SLACK_CONNECTOR: Indexes messages from all accessible Slack channels
|
||||||
- NOTION_CONNECTOR: Indexes pages from all accessible Notion pages
|
- NOTION_CONNECTOR: Indexes pages from all accessible Notion pages
|
||||||
|
@ -286,26 +340,30 @@ async def index_connector_content(
|
||||||
- LINEAR_CONNECTOR: Indexes issues and comments from Linear
|
- LINEAR_CONNECTOR: Indexes issues and comments from Linear
|
||||||
- JIRA_CONNECTOR: Indexes issues and comments from Jira
|
- JIRA_CONNECTOR: Indexes issues and comments from Jira
|
||||||
- DISCORD_CONNECTOR: Indexes messages from all accessible Discord channels
|
- DISCORD_CONNECTOR: Indexes messages from all accessible Discord channels
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
connector_id: ID of the connector to use
|
connector_id: ID of the connector to use
|
||||||
search_space_id: ID of the search space to store indexed content
|
search_space_id: ID of the search space to store indexed content
|
||||||
background_tasks: FastAPI background tasks
|
background_tasks: FastAPI background tasks
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dictionary with indexing status
|
Dictionary with indexing status
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# Check if the connector belongs to the user
|
# Check if the connector belongs to the user
|
||||||
connector = await check_ownership(session, SearchSourceConnector, connector_id, user)
|
connector = await check_ownership(
|
||||||
|
session, SearchSourceConnector, connector_id, user
|
||||||
|
)
|
||||||
|
|
||||||
# Check if the search space belongs to the user
|
# Check if the search space belongs to the user
|
||||||
search_space = await check_ownership(session, SearchSpace, search_space_id, user)
|
search_space = await check_ownership(
|
||||||
|
session, SearchSpace, search_space_id, user
|
||||||
|
)
|
||||||
|
|
||||||
# Handle different connector types
|
# Handle different connector types
|
||||||
response_message = ""
|
response_message = ""
|
||||||
today_str = datetime.now().strftime("%Y-%m-%d")
|
today_str = datetime.now().strftime("%Y-%m-%d")
|
||||||
|
|
||||||
# Determine the actual date range to use
|
# Determine the actual date range to use
|
||||||
if start_date is None:
|
if start_date is None:
|
||||||
# Use last_indexed_at or default to 365 days ago
|
# Use last_indexed_at or default to 365 days ago
|
||||||
|
@ -317,10 +375,12 @@ async def index_connector_content(
|
||||||
else:
|
else:
|
||||||
indexing_from = connector.last_indexed_at.strftime("%Y-%m-%d")
|
indexing_from = connector.last_indexed_at.strftime("%Y-%m-%d")
|
||||||
else:
|
else:
|
||||||
indexing_from = (datetime.now() - timedelta(days=365)).strftime("%Y-%m-%d")
|
indexing_from = (datetime.now() - timedelta(days=365)).strftime(
|
||||||
|
"%Y-%m-%d"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
indexing_from = start_date
|
indexing_from = start_date
|
||||||
|
|
||||||
if end_date is None:
|
if end_date is None:
|
||||||
indexing_to = today_str
|
indexing_to = today_str
|
||||||
else:
|
else:
|
||||||
|
@ -328,32 +388,77 @@ async def index_connector_content(
|
||||||
|
|
||||||
if connector.connector_type == SearchSourceConnectorType.SLACK_CONNECTOR:
|
if connector.connector_type == SearchSourceConnectorType.SLACK_CONNECTOR:
|
||||||
# Run indexing in background
|
# Run indexing in background
|
||||||
logger.info(f"Triggering Slack indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}")
|
logger.info(
|
||||||
background_tasks.add_task(run_slack_indexing_with_new_session, connector_id, search_space_id, str(user.id), indexing_from, indexing_to)
|
f"Triggering Slack indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
|
||||||
|
)
|
||||||
|
background_tasks.add_task(
|
||||||
|
run_slack_indexing_with_new_session,
|
||||||
|
connector_id,
|
||||||
|
search_space_id,
|
||||||
|
str(user.id),
|
||||||
|
indexing_from,
|
||||||
|
indexing_to,
|
||||||
|
)
|
||||||
response_message = "Slack indexing started in the background."
|
response_message = "Slack indexing started in the background."
|
||||||
|
|
||||||
elif connector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR:
|
elif connector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR:
|
||||||
# Run indexing in background
|
# Run indexing in background
|
||||||
logger.info(f"Triggering Notion indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}")
|
logger.info(
|
||||||
background_tasks.add_task(run_notion_indexing_with_new_session, connector_id, search_space_id, str(user.id), indexing_from, indexing_to)
|
f"Triggering Notion indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
|
||||||
|
)
|
||||||
|
background_tasks.add_task(
|
||||||
|
run_notion_indexing_with_new_session,
|
||||||
|
connector_id,
|
||||||
|
search_space_id,
|
||||||
|
str(user.id),
|
||||||
|
indexing_from,
|
||||||
|
indexing_to,
|
||||||
|
)
|
||||||
response_message = "Notion indexing started in the background."
|
response_message = "Notion indexing started in the background."
|
||||||
|
|
||||||
elif connector.connector_type == SearchSourceConnectorType.GITHUB_CONNECTOR:
|
elif connector.connector_type == SearchSourceConnectorType.GITHUB_CONNECTOR:
|
||||||
# Run indexing in background
|
# Run indexing in background
|
||||||
logger.info(f"Triggering GitHub indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}")
|
logger.info(
|
||||||
background_tasks.add_task(run_github_indexing_with_new_session, connector_id, search_space_id, str(user.id), indexing_from, indexing_to)
|
f"Triggering GitHub indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
|
||||||
|
)
|
||||||
|
background_tasks.add_task(
|
||||||
|
run_github_indexing_with_new_session,
|
||||||
|
connector_id,
|
||||||
|
search_space_id,
|
||||||
|
str(user.id),
|
||||||
|
indexing_from,
|
||||||
|
indexing_to,
|
||||||
|
)
|
||||||
response_message = "GitHub indexing started in the background."
|
response_message = "GitHub indexing started in the background."
|
||||||
|
|
||||||
elif connector.connector_type == SearchSourceConnectorType.LINEAR_CONNECTOR:
|
elif connector.connector_type == SearchSourceConnectorType.LINEAR_CONNECTOR:
|
||||||
# Run indexing in background
|
# Run indexing in background
|
||||||
logger.info(f"Triggering Linear indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}")
|
logger.info(
|
||||||
background_tasks.add_task(run_linear_indexing_with_new_session, connector_id, search_space_id, str(user.id), indexing_from, indexing_to)
|
f"Triggering Linear indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
|
||||||
|
)
|
||||||
|
background_tasks.add_task(
|
||||||
|
run_linear_indexing_with_new_session,
|
||||||
|
connector_id,
|
||||||
|
search_space_id,
|
||||||
|
str(user.id),
|
||||||
|
indexing_from,
|
||||||
|
indexing_to,
|
||||||
|
)
|
||||||
response_message = "Linear indexing started in the background."
|
response_message = "Linear indexing started in the background."
|
||||||
|
|
||||||
elif connector.connector_type == SearchSourceConnectorType.JIRA_CONNECTOR:
|
elif connector.connector_type == SearchSourceConnectorType.JIRA_CONNECTOR:
|
||||||
# Run indexing in background
|
# Run indexing in background
|
||||||
logger.info(f"Triggering Jira indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}")
|
logger.info(
|
||||||
background_tasks.add_task(run_jira_indexing_with_new_session, connector_id, search_space_id, str(user.id), indexing_from, indexing_to)
|
f"Triggering Jira indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
|
||||||
|
)
|
||||||
|
background_tasks.add_task(
|
||||||
|
run_jira_indexing_with_new_session,
|
||||||
|
connector_id,
|
||||||
|
search_space_id,
|
||||||
|
str(user.id),
|
||||||
|
indexing_from,
|
||||||
|
indexing_to,
|
||||||
|
)
|
||||||
response_message = "Jira indexing started in the background."
|
response_message = "Jira indexing started in the background."
|
||||||
|
|
||||||
elif connector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR:
|
elif connector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR:
|
||||||
|
@ -362,71 +467,83 @@ async def index_connector_content(
|
||||||
f"Triggering Discord indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
|
f"Triggering Discord indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
|
||||||
)
|
)
|
||||||
background_tasks.add_task(
|
background_tasks.add_task(
|
||||||
run_discord_indexing_with_new_session, connector_id, search_space_id, str(user.id), indexing_from, indexing_to
|
run_discord_indexing_with_new_session,
|
||||||
|
connector_id,
|
||||||
|
search_space_id,
|
||||||
|
str(user.id),
|
||||||
|
indexing_from,
|
||||||
|
indexing_to,
|
||||||
)
|
)
|
||||||
response_message = "Discord indexing started in the background."
|
response_message = "Discord indexing started in the background."
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=400,
|
status_code=400,
|
||||||
detail=f"Indexing not supported for connector type: {connector.connector_type}"
|
detail=f"Indexing not supported for connector type: {connector.connector_type}",
|
||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"message": response_message,
|
"message": response_message,
|
||||||
"connector_id": connector_id,
|
"connector_id": connector_id,
|
||||||
"search_space_id": search_space_id,
|
"search_space_id": search_space_id,
|
||||||
"indexing_from": indexing_from,
|
"indexing_from": indexing_from,
|
||||||
"indexing_to": indexing_to
|
"indexing_to": indexing_to,
|
||||||
}
|
}
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to initiate indexing for connector {connector_id}: {e}", exc_info=True)
|
logger.error(
|
||||||
raise HTTPException(
|
f"Failed to initiate indexing for connector {connector_id}: {e}",
|
||||||
status_code=500,
|
exc_info=True,
|
||||||
detail=f"Failed to initiate indexing: {str(e)}"
|
|
||||||
)
|
)
|
||||||
|
raise HTTPException(
|
||||||
async def update_connector_last_indexed(
|
status_code=500, detail=f"Failed to initiate indexing: {str(e)}"
|
||||||
session: AsyncSession,
|
)
|
||||||
connector_id: int
|
|
||||||
):
|
|
||||||
|
async def update_connector_last_indexed(session: AsyncSession, connector_id: int):
|
||||||
"""
|
"""
|
||||||
Update the last_indexed_at timestamp for a connector.
|
Update the last_indexed_at timestamp for a connector.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
session: Database session
|
session: Database session
|
||||||
connector_id: ID of the connector to update
|
connector_id: ID of the connector to update
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
result = await session.execute(
|
result = await session.execute(
|
||||||
select(SearchSourceConnector)
|
select(SearchSourceConnector).filter(
|
||||||
.filter(SearchSourceConnector.id == connector_id)
|
SearchSourceConnector.id == connector_id
|
||||||
|
)
|
||||||
)
|
)
|
||||||
connector = result.scalars().first()
|
connector = result.scalars().first()
|
||||||
|
|
||||||
if connector:
|
if connector:
|
||||||
connector.last_indexed_at = datetime.now()
|
connector.last_indexed_at = datetime.now()
|
||||||
await session.commit()
|
await session.commit()
|
||||||
logger.info(f"Updated last_indexed_at for connector {connector_id}")
|
logger.info(f"Updated last_indexed_at for connector {connector_id}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to update last_indexed_at for connector {connector_id}: {str(e)}")
|
logger.error(
|
||||||
|
f"Failed to update last_indexed_at for connector {connector_id}: {str(e)}"
|
||||||
|
)
|
||||||
await session.rollback()
|
await session.rollback()
|
||||||
|
|
||||||
|
|
||||||
async def run_slack_indexing_with_new_session(
|
async def run_slack_indexing_with_new_session(
|
||||||
connector_id: int,
|
connector_id: int,
|
||||||
search_space_id: int,
|
search_space_id: int,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
start_date: str,
|
start_date: str,
|
||||||
end_date: str
|
end_date: str,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Create a new session and run the Slack indexing task.
|
Create a new session and run the Slack indexing task.
|
||||||
This prevents session leaks by creating a dedicated session for the background task.
|
This prevents session leaks by creating a dedicated session for the background task.
|
||||||
"""
|
"""
|
||||||
async with async_session_maker() as session:
|
async with async_session_maker() as session:
|
||||||
await run_slack_indexing(session, connector_id, search_space_id, user_id, start_date, end_date)
|
await run_slack_indexing(
|
||||||
|
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def run_slack_indexing(
|
async def run_slack_indexing(
|
||||||
session: AsyncSession,
|
session: AsyncSession,
|
||||||
|
@ -434,11 +551,11 @@ async def run_slack_indexing(
|
||||||
search_space_id: int,
|
search_space_id: int,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
start_date: str,
|
start_date: str,
|
||||||
end_date: str
|
end_date: str,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Background task to run Slack indexing.
|
Background task to run Slack indexing.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
session: Database session
|
session: Database session
|
||||||
connector_id: ID of the Slack connector
|
connector_id: ID of the Slack connector
|
||||||
|
@ -456,31 +573,39 @@ async def run_slack_indexing(
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
start_date=start_date,
|
start_date=start_date,
|
||||||
end_date=end_date,
|
end_date=end_date,
|
||||||
update_last_indexed=False # Don't update timestamp in the indexing function
|
update_last_indexed=False, # Don't update timestamp in the indexing function
|
||||||
)
|
)
|
||||||
|
|
||||||
# Only update last_indexed_at if indexing was successful (either new docs or updated docs)
|
# Only update last_indexed_at if indexing was successful (either new docs or updated docs)
|
||||||
if documents_processed > 0:
|
if documents_processed > 0:
|
||||||
await update_connector_last_indexed(session, connector_id)
|
await update_connector_last_indexed(session, connector_id)
|
||||||
logger.info(f"Slack indexing completed successfully: {documents_processed} documents processed")
|
logger.info(
|
||||||
|
f"Slack indexing completed successfully: {documents_processed} documents processed"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
logger.error(f"Slack indexing failed or no documents processed: {error_or_warning}")
|
logger.error(
|
||||||
|
f"Slack indexing failed or no documents processed: {error_or_warning}"
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in background Slack indexing task: {str(e)}")
|
logger.error(f"Error in background Slack indexing task: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
async def run_notion_indexing_with_new_session(
|
async def run_notion_indexing_with_new_session(
|
||||||
connector_id: int,
|
connector_id: int,
|
||||||
search_space_id: int,
|
search_space_id: int,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
start_date: str,
|
start_date: str,
|
||||||
end_date: str
|
end_date: str,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Create a new session and run the Notion indexing task.
|
Create a new session and run the Notion indexing task.
|
||||||
This prevents session leaks by creating a dedicated session for the background task.
|
This prevents session leaks by creating a dedicated session for the background task.
|
||||||
"""
|
"""
|
||||||
async with async_session_maker() as session:
|
async with async_session_maker() as session:
|
||||||
await run_notion_indexing(session, connector_id, search_space_id, user_id, start_date, end_date)
|
await run_notion_indexing(
|
||||||
|
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def run_notion_indexing(
|
async def run_notion_indexing(
|
||||||
session: AsyncSession,
|
session: AsyncSession,
|
||||||
|
@ -488,11 +613,11 @@ async def run_notion_indexing(
|
||||||
search_space_id: int,
|
search_space_id: int,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
start_date: str,
|
start_date: str,
|
||||||
end_date: str
|
end_date: str,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Background task to run Notion indexing.
|
Background task to run Notion indexing.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
session: Database session
|
session: Database session
|
||||||
connector_id: ID of the Notion connector
|
connector_id: ID of the Notion connector
|
||||||
|
@ -510,112 +635,158 @@ async def run_notion_indexing(
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
start_date=start_date,
|
start_date=start_date,
|
||||||
end_date=end_date,
|
end_date=end_date,
|
||||||
update_last_indexed=False # Don't update timestamp in the indexing function
|
update_last_indexed=False, # Don't update timestamp in the indexing function
|
||||||
)
|
)
|
||||||
|
|
||||||
# Only update last_indexed_at if indexing was successful (either new docs or updated docs)
|
# Only update last_indexed_at if indexing was successful (either new docs or updated docs)
|
||||||
if documents_processed > 0:
|
if documents_processed > 0:
|
||||||
await update_connector_last_indexed(session, connector_id)
|
await update_connector_last_indexed(session, connector_id)
|
||||||
logger.info(f"Notion indexing completed successfully: {documents_processed} documents processed")
|
logger.info(
|
||||||
|
f"Notion indexing completed successfully: {documents_processed} documents processed"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
logger.error(f"Notion indexing failed or no documents processed: {error_or_warning}")
|
logger.error(
|
||||||
|
f"Notion indexing failed or no documents processed: {error_or_warning}"
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in background Notion indexing task: {str(e)}")
|
logger.error(f"Error in background Notion indexing task: {str(e)}")
|
||||||
|
|
||||||
|
|
||||||
# Add new helper functions for GitHub indexing
|
# Add new helper functions for GitHub indexing
|
||||||
async def run_github_indexing_with_new_session(
|
async def run_github_indexing_with_new_session(
|
||||||
connector_id: int,
|
connector_id: int,
|
||||||
search_space_id: int,
|
search_space_id: int,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
start_date: str,
|
start_date: str,
|
||||||
end_date: str
|
end_date: str,
|
||||||
):
|
):
|
||||||
"""Wrapper to run GitHub indexing with its own database session."""
|
"""Wrapper to run GitHub indexing with its own database session."""
|
||||||
logger.info(f"Background task started: Indexing GitHub connector {connector_id} into space {search_space_id} from {start_date} to {end_date}")
|
logger.info(
|
||||||
|
f"Background task started: Indexing GitHub connector {connector_id} into space {search_space_id} from {start_date} to {end_date}"
|
||||||
|
)
|
||||||
async with async_session_maker() as session:
|
async with async_session_maker() as session:
|
||||||
await run_github_indexing(session, connector_id, search_space_id, user_id, start_date, end_date)
|
await run_github_indexing(
|
||||||
|
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||||
|
)
|
||||||
logger.info(f"Background task finished: Indexing GitHub connector {connector_id}")
|
logger.info(f"Background task finished: Indexing GitHub connector {connector_id}")
|
||||||
|
|
||||||
|
|
||||||
async def run_github_indexing(
|
async def run_github_indexing(
|
||||||
session: AsyncSession,
|
session: AsyncSession,
|
||||||
connector_id: int,
|
connector_id: int,
|
||||||
search_space_id: int,
|
search_space_id: int,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
start_date: str,
|
start_date: str,
|
||||||
end_date: str
|
end_date: str,
|
||||||
):
|
):
|
||||||
"""Runs the GitHub indexing task and updates the timestamp."""
|
"""Runs the GitHub indexing task and updates the timestamp."""
|
||||||
try:
|
try:
|
||||||
indexed_count, error_message = await index_github_repos(
|
indexed_count, error_message = await index_github_repos(
|
||||||
session, connector_id, search_space_id, user_id, start_date, end_date, update_last_indexed=False
|
session,
|
||||||
|
connector_id,
|
||||||
|
search_space_id,
|
||||||
|
user_id,
|
||||||
|
start_date,
|
||||||
|
end_date,
|
||||||
|
update_last_indexed=False,
|
||||||
)
|
)
|
||||||
if error_message:
|
if error_message:
|
||||||
logger.error(f"GitHub indexing failed for connector {connector_id}: {error_message}")
|
logger.error(
|
||||||
|
f"GitHub indexing failed for connector {connector_id}: {error_message}"
|
||||||
|
)
|
||||||
# Optionally update status in DB to indicate failure
|
# Optionally update status in DB to indicate failure
|
||||||
else:
|
else:
|
||||||
logger.info(f"GitHub indexing successful for connector {connector_id}. Indexed {indexed_count} documents.")
|
logger.info(
|
||||||
|
f"GitHub indexing successful for connector {connector_id}. Indexed {indexed_count} documents."
|
||||||
|
)
|
||||||
# Update the last indexed timestamp only on success
|
# Update the last indexed timestamp only on success
|
||||||
await update_connector_last_indexed(session, connector_id)
|
await update_connector_last_indexed(session, connector_id)
|
||||||
await session.commit() # Commit timestamp update
|
await session.commit() # Commit timestamp update
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
await session.rollback()
|
await session.rollback()
|
||||||
logger.error(f"Critical error in run_github_indexing for connector {connector_id}: {e}", exc_info=True)
|
logger.error(
|
||||||
|
f"Critical error in run_github_indexing for connector {connector_id}: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
# Optionally update status in DB to indicate failure
|
# Optionally update status in DB to indicate failure
|
||||||
|
|
||||||
|
|
||||||
# Add new helper functions for Linear indexing
|
# Add new helper functions for Linear indexing
|
||||||
async def run_linear_indexing_with_new_session(
|
async def run_linear_indexing_with_new_session(
|
||||||
connector_id: int,
|
connector_id: int,
|
||||||
search_space_id: int,
|
search_space_id: int,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
start_date: str,
|
start_date: str,
|
||||||
end_date: str
|
end_date: str,
|
||||||
):
|
):
|
||||||
"""Wrapper to run Linear indexing with its own database session."""
|
"""Wrapper to run Linear indexing with its own database session."""
|
||||||
logger.info(f"Background task started: Indexing Linear connector {connector_id} into space {search_space_id} from {start_date} to {end_date}")
|
logger.info(
|
||||||
|
f"Background task started: Indexing Linear connector {connector_id} into space {search_space_id} from {start_date} to {end_date}"
|
||||||
|
)
|
||||||
async with async_session_maker() as session:
|
async with async_session_maker() as session:
|
||||||
await run_linear_indexing(session, connector_id, search_space_id, user_id, start_date, end_date)
|
await run_linear_indexing(
|
||||||
|
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||||
|
)
|
||||||
logger.info(f"Background task finished: Indexing Linear connector {connector_id}")
|
logger.info(f"Background task finished: Indexing Linear connector {connector_id}")
|
||||||
|
|
||||||
|
|
||||||
async def run_linear_indexing(
|
async def run_linear_indexing(
|
||||||
session: AsyncSession,
|
session: AsyncSession,
|
||||||
connector_id: int,
|
connector_id: int,
|
||||||
search_space_id: int,
|
search_space_id: int,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
start_date: str,
|
start_date: str,
|
||||||
end_date: str
|
end_date: str,
|
||||||
):
|
):
|
||||||
"""Runs the Linear indexing task and updates the timestamp."""
|
"""Runs the Linear indexing task and updates the timestamp."""
|
||||||
try:
|
try:
|
||||||
indexed_count, error_message = await index_linear_issues(
|
indexed_count, error_message = await index_linear_issues(
|
||||||
session, connector_id, search_space_id, user_id, start_date, end_date, update_last_indexed=False
|
session,
|
||||||
|
connector_id,
|
||||||
|
search_space_id,
|
||||||
|
user_id,
|
||||||
|
start_date,
|
||||||
|
end_date,
|
||||||
|
update_last_indexed=False,
|
||||||
)
|
)
|
||||||
if error_message:
|
if error_message:
|
||||||
logger.error(f"Linear indexing failed for connector {connector_id}: {error_message}")
|
logger.error(
|
||||||
|
f"Linear indexing failed for connector {connector_id}: {error_message}"
|
||||||
|
)
|
||||||
# Optionally update status in DB to indicate failure
|
# Optionally update status in DB to indicate failure
|
||||||
else:
|
else:
|
||||||
logger.info(f"Linear indexing successful for connector {connector_id}. Indexed {indexed_count} documents.")
|
logger.info(
|
||||||
|
f"Linear indexing successful for connector {connector_id}. Indexed {indexed_count} documents."
|
||||||
|
)
|
||||||
# Update the last indexed timestamp only on success
|
# Update the last indexed timestamp only on success
|
||||||
await update_connector_last_indexed(session, connector_id)
|
await update_connector_last_indexed(session, connector_id)
|
||||||
await session.commit() # Commit timestamp update
|
await session.commit() # Commit timestamp update
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
await session.rollback()
|
await session.rollback()
|
||||||
logger.error(f"Critical error in run_linear_indexing for connector {connector_id}: {e}", exc_info=True)
|
logger.error(
|
||||||
|
f"Critical error in run_linear_indexing for connector {connector_id}: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
# Optionally update status in DB to indicate failure
|
# Optionally update status in DB to indicate failure
|
||||||
|
|
||||||
|
|
||||||
# Add new helper functions for discord indexing
|
# Add new helper functions for discord indexing
|
||||||
async def run_discord_indexing_with_new_session(
|
async def run_discord_indexing_with_new_session(
|
||||||
connector_id: int,
|
connector_id: int,
|
||||||
search_space_id: int,
|
search_space_id: int,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
start_date: str,
|
start_date: str,
|
||||||
end_date: str
|
end_date: str,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Create a new session and run the Discord indexing task.
|
Create a new session and run the Discord indexing task.
|
||||||
This prevents session leaks by creating a dedicated session for the background task.
|
This prevents session leaks by creating a dedicated session for the background task.
|
||||||
"""
|
"""
|
||||||
async with async_session_maker() as session:
|
async with async_session_maker() as session:
|
||||||
await run_discord_indexing(session, connector_id, search_space_id, user_id, start_date, end_date)
|
await run_discord_indexing(
|
||||||
|
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def run_discord_indexing(
|
async def run_discord_indexing(
|
||||||
session: AsyncSession,
|
session: AsyncSession,
|
||||||
|
@ -623,7 +794,7 @@ async def run_discord_indexing(
|
||||||
search_space_id: int,
|
search_space_id: int,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
start_date: str,
|
start_date: str,
|
||||||
end_date: str
|
end_date: str,
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Background task to run Discord indexing.
|
Background task to run Discord indexing.
|
||||||
|
@ -644,15 +815,19 @@ async def run_discord_indexing(
|
||||||
user_id=user_id,
|
user_id=user_id,
|
||||||
start_date=start_date,
|
start_date=start_date,
|
||||||
end_date=end_date,
|
end_date=end_date,
|
||||||
update_last_indexed=False # Don't update timestamp in the indexing function
|
update_last_indexed=False, # Don't update timestamp in the indexing function
|
||||||
)
|
)
|
||||||
|
|
||||||
# Only update last_indexed_at if indexing was successful (either new docs or updated docs)
|
# Only update last_indexed_at if indexing was successful (either new docs or updated docs)
|
||||||
if documents_processed > 0:
|
if documents_processed > 0:
|
||||||
await update_connector_last_indexed(session, connector_id)
|
await update_connector_last_indexed(session, connector_id)
|
||||||
logger.info(f"Discord indexing completed successfully: {documents_processed} documents processed")
|
logger.info(
|
||||||
|
f"Discord indexing completed successfully: {documents_processed} documents processed"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
logger.error(f"Discord indexing failed or no documents processed: {error_or_warning}")
|
logger.error(
|
||||||
|
f"Discord indexing failed or no documents processed: {error_or_warning}"
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Error in background Discord indexing task: {str(e)}")
|
logger.error(f"Error in background Discord indexing task: {str(e)}")
|
||||||
|
|
||||||
|
@ -663,36 +838,53 @@ async def run_jira_indexing_with_new_session(
|
||||||
search_space_id: int,
|
search_space_id: int,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
start_date: str,
|
start_date: str,
|
||||||
end_date: str
|
end_date: str,
|
||||||
):
|
):
|
||||||
"""Wrapper to run Jira indexing with its own database session."""
|
"""Wrapper to run Jira indexing with its own database session."""
|
||||||
logger.info(f"Background task started: Indexing Jira connector {connector_id} into space {search_space_id} from {start_date} to {end_date}")
|
logger.info(
|
||||||
|
f"Background task started: Indexing Jira connector {connector_id} into space {search_space_id} from {start_date} to {end_date}"
|
||||||
|
)
|
||||||
async with async_session_maker() as session:
|
async with async_session_maker() as session:
|
||||||
await run_jira_indexing(session, connector_id, search_space_id, user_id, start_date, end_date)
|
await run_jira_indexing(
|
||||||
|
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||||
|
)
|
||||||
logger.info(f"Background task finished: Indexing Jira connector {connector_id}")
|
logger.info(f"Background task finished: Indexing Jira connector {connector_id}")
|
||||||
|
|
||||||
|
|
||||||
async def run_jira_indexing(
|
async def run_jira_indexing(
|
||||||
session: AsyncSession,
|
session: AsyncSession,
|
||||||
connector_id: int,
|
connector_id: int,
|
||||||
search_space_id: int,
|
search_space_id: int,
|
||||||
user_id: str,
|
user_id: str,
|
||||||
start_date: str,
|
start_date: str,
|
||||||
end_date: str
|
end_date: str,
|
||||||
):
|
):
|
||||||
"""Runs the Jira indexing task and updates the timestamp."""
|
"""Runs the Jira indexing task and updates the timestamp."""
|
||||||
try:
|
try:
|
||||||
indexed_count, error_message = await index_jira_issues(
|
indexed_count, error_message = await index_jira_issues(
|
||||||
session, connector_id, search_space_id, user_id, start_date, end_date, update_last_indexed=False
|
session,
|
||||||
|
connector_id,
|
||||||
|
search_space_id,
|
||||||
|
user_id,
|
||||||
|
start_date,
|
||||||
|
end_date,
|
||||||
|
update_last_indexed=False,
|
||||||
)
|
)
|
||||||
if error_message:
|
if error_message:
|
||||||
logger.error(f"Jira indexing failed for connector {connector_id}: {error_message}")
|
logger.error(
|
||||||
|
f"Jira indexing failed for connector {connector_id}: {error_message}"
|
||||||
|
)
|
||||||
# Optionally update status in DB to indicate failure
|
# Optionally update status in DB to indicate failure
|
||||||
else:
|
else:
|
||||||
logger.info(f"Jira indexing successful for connector {connector_id}. Indexed {indexed_count} documents.")
|
logger.info(
|
||||||
|
f"Jira indexing successful for connector {connector_id}. Indexed {indexed_count} documents."
|
||||||
|
)
|
||||||
# Update the last indexed timestamp only on success
|
# Update the last indexed timestamp only on success
|
||||||
await update_connector_last_indexed(session, connector_id)
|
await update_connector_last_indexed(session, connector_id)
|
||||||
await session.commit() # Commit timestamp update
|
await session.commit() # Commit timestamp update
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
await session.rollback()
|
logger.error(
|
||||||
logger.error(f"Critical error in run_jira_indexing for connector {connector_id}: {e}", exc_info=True)
|
f"Critical error in run_jira_indexing for connector {connector_id}: {e}",
|
||||||
# Optionally update status in DB to indicate failure
|
exc_info=True,
|
||||||
|
)
|
||||||
|
# Optionally update status in DB to indicate failure
|
||||||
|
|
|
@ -992,7 +992,7 @@ class ConnectorService:
|
||||||
# Early return if no results
|
# Early return if no results
|
||||||
if not jira_chunks:
|
if not jira_chunks:
|
||||||
return {
|
return {
|
||||||
"id": 10,
|
"id": 30,
|
||||||
"name": "Jira Issues",
|
"name": "Jira Issues",
|
||||||
"type": "JIRA_CONNECTOR",
|
"type": "JIRA_CONNECTOR",
|
||||||
"sources": [],
|
"sources": [],
|
||||||
|
|
|
@ -60,7 +60,7 @@ import {
|
||||||
IconBrandSlack,
|
IconBrandSlack,
|
||||||
IconBrandYoutube,
|
IconBrandYoutube,
|
||||||
IconLayoutKanban,
|
IconLayoutKanban,
|
||||||
IconBrandTrello,
|
IconTicket,
|
||||||
} from "@tabler/icons-react";
|
} from "@tabler/icons-react";
|
||||||
import {
|
import {
|
||||||
ColumnDef,
|
ColumnDef,
|
||||||
|
@ -178,7 +178,7 @@ const documentTypeIcons = {
|
||||||
YOUTUBE_VIDEO: IconBrandYoutube,
|
YOUTUBE_VIDEO: IconBrandYoutube,
|
||||||
GITHUB_CONNECTOR: IconBrandGithub,
|
GITHUB_CONNECTOR: IconBrandGithub,
|
||||||
LINEAR_CONNECTOR: IconLayoutKanban,
|
LINEAR_CONNECTOR: IconLayoutKanban,
|
||||||
JIRA_CONNECTOR: IconBrandTrello,
|
JIRA_CONNECTOR: IconTicket,
|
||||||
DISCORD_CONNECTOR: IconBrandDiscord,
|
DISCORD_CONNECTOR: IconBrandDiscord,
|
||||||
} as const;
|
} as const;
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue