feat: Added Calender Based Indexing.

- This should stabalize manual syning.
This commit is contained in:
DESKTOP-RTLN3BA\$punk 2025-06-06 18:17:47 -07:00
parent 74b3017c64
commit 99fa03d78b
7 changed files with 690 additions and 184 deletions

View file

@ -270,6 +270,8 @@ async def delete_search_source_connector(
async def index_connector_content(
connector_id: int,
search_space_id: int = Query(..., description="ID of the search space to store indexed content"),
start_date: str = Query(None, description="Start date for indexing (YYYY-MM-DD format). If not provided, uses last_indexed_at or defaults to 365 days ago"),
end_date: str = Query(None, description="End date for indexing (YYYY-MM-DD format). If not provided, uses today's date"),
session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user),
background_tasks: BackgroundTasks = None
@ -301,105 +303,59 @@ async def index_connector_content(
# Handle different connector types
response_message = ""
indexing_from = None
indexing_to = None
today_str = datetime.now().strftime("%Y-%m-%d")
if connector.connector_type == SearchSourceConnectorType.SLACK_CONNECTOR:
# Determine the time range that will be indexed
if not connector.last_indexed_at:
start_date = "365 days ago" # Or perhaps set a specific date if needed
else:
# Check if last_indexed_at is today
# Determine the actual date range to use
if start_date is None:
# Use last_indexed_at or default to 365 days ago
if connector.last_indexed_at:
today = datetime.now().date()
if connector.last_indexed_at.date() == today:
# If last indexed today, go back 1 day to ensure we don't miss anything
start_date = (today - timedelta(days=1)).strftime("%Y-%m-%d")
indexing_from = (today - timedelta(days=1)).strftime("%Y-%m-%d")
else:
start_date = connector.last_indexed_at.strftime("%Y-%m-%d")
indexing_from = connector.last_indexed_at.strftime("%Y-%m-%d")
else:
indexing_from = (datetime.now() - timedelta(days=365)).strftime("%Y-%m-%d")
else:
indexing_from = start_date
indexing_to = today_str
if end_date is None:
indexing_to = today_str
else:
indexing_to = end_date
if connector.connector_type == SearchSourceConnectorType.SLACK_CONNECTOR:
# Run indexing in background
logger.info(f"Triggering Slack indexing for connector {connector_id} into search space {search_space_id}")
background_tasks.add_task(run_slack_indexing_with_new_session, connector_id, search_space_id)
logger.info(f"Triggering Slack indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}")
background_tasks.add_task(run_slack_indexing_with_new_session, connector_id, search_space_id, indexing_from, indexing_to)
response_message = "Slack indexing started in the background."
elif connector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR:
# Determine the time range that will be indexed
if not connector.last_indexed_at:
start_date = "365 days ago" # Or perhaps set a specific date
else:
# Check if last_indexed_at is today
today = datetime.now().date()
if connector.last_indexed_at.date() == today:
# If last indexed today, go back 1 day to ensure we don't miss anything
start_date = (today - timedelta(days=1)).strftime("%Y-%m-%d")
else:
start_date = connector.last_indexed_at.strftime("%Y-%m-%d")
indexing_from = start_date
indexing_to = today_str
# Run indexing in background
logger.info(f"Triggering Notion indexing for connector {connector_id} into search space {search_space_id}")
background_tasks.add_task(run_notion_indexing_with_new_session, connector_id, search_space_id)
logger.info(f"Triggering Notion indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}")
background_tasks.add_task(run_notion_indexing_with_new_session, connector_id, search_space_id, indexing_from, indexing_to)
response_message = "Notion indexing started in the background."
elif connector.connector_type == SearchSourceConnectorType.GITHUB_CONNECTOR:
# GitHub connector likely indexes everything relevant, or uses internal logic
# Setting indexing_from to None and indexing_to to today
indexing_from = None
indexing_to = today_str
# Run indexing in background
logger.info(f"Triggering GitHub indexing for connector {connector_id} into search space {search_space_id}")
background_tasks.add_task(run_github_indexing_with_new_session, connector_id, search_space_id)
logger.info(f"Triggering GitHub indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}")
background_tasks.add_task(run_github_indexing_with_new_session, connector_id, search_space_id, indexing_from, indexing_to)
response_message = "GitHub indexing started in the background."
elif connector.connector_type == SearchSourceConnectorType.LINEAR_CONNECTOR:
# Determine the time range that will be indexed
if not connector.last_indexed_at:
start_date = "365 days ago"
else:
# Check if last_indexed_at is today
today = datetime.now().date()
if connector.last_indexed_at.date() == today:
# If last indexed today, go back 1 day to ensure we don't miss anything
start_date = (today - timedelta(days=1)).strftime("%Y-%m-%d")
else:
start_date = connector.last_indexed_at.strftime("%Y-%m-%d")
indexing_from = start_date
indexing_to = today_str
# Run indexing in background
logger.info(f"Triggering Linear indexing for connector {connector_id} into search space {search_space_id}")
background_tasks.add_task(run_linear_indexing_with_new_session, connector_id, search_space_id)
logger.info(f"Triggering Linear indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}")
background_tasks.add_task(run_linear_indexing_with_new_session, connector_id, search_space_id, indexing_from, indexing_to)
response_message = "Linear indexing started in the background."
elif connector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR:
# Determine the time range that will be indexed
if not connector.last_indexed_at:
start_date = "365 days ago"
else:
today = datetime.now().date()
if connector.last_indexed_at.date() == today:
# If last indexed today, go back 1 day to ensure we don't miss anything
start_date = (today - timedelta(days=1)).strftime("%Y-%m-%d")
else:
start_date = connector.last_indexed_at.strftime("%Y-%m-%d")
indexing_from = start_date
indexing_to = today_str
# Run indexing in background
logger.info(
f"Triggering Discord indexing for connector {connector_id} into search space {search_space_id}"
f"Triggering Discord indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
)
background_tasks.add_task(
run_discord_indexing_with_new_session, connector_id, search_space_id
run_discord_indexing_with_new_session, connector_id, search_space_id, indexing_from, indexing_to
)
response_message = "Discord indexing started in the background."
@ -453,19 +409,23 @@ async def update_connector_last_indexed(
async def run_slack_indexing_with_new_session(
connector_id: int,
search_space_id: int
search_space_id: int,
start_date: str,
end_date: str
):
"""
Create a new session and run the Slack indexing task.
This prevents session leaks by creating a dedicated session for the background task.
"""
async with async_session_maker() as session:
await run_slack_indexing(session, connector_id, search_space_id)
await run_slack_indexing(session, connector_id, search_space_id, start_date, end_date)
async def run_slack_indexing(
session: AsyncSession,
connector_id: int,
search_space_id: int
search_space_id: int,
start_date: str,
end_date: str
):
"""
Background task to run Slack indexing.
@ -474,6 +434,8 @@ async def run_slack_indexing(
session: Database session
connector_id: ID of the Slack connector
search_space_id: ID of the search space
start_date: Start date for indexing
end_date: End date for indexing
"""
try:
# Index Slack messages without updating last_indexed_at (we'll do it separately)
@ -481,6 +443,8 @@ async def run_slack_indexing(
session=session,
connector_id=connector_id,
search_space_id=search_space_id,
start_date=start_date,
end_date=end_date,
update_last_indexed=False # Don't update timestamp in the indexing function
)
@ -495,19 +459,23 @@ async def run_slack_indexing(
async def run_notion_indexing_with_new_session(
connector_id: int,
search_space_id: int
search_space_id: int,
start_date: str,
end_date: str
):
"""
Create a new session and run the Notion indexing task.
This prevents session leaks by creating a dedicated session for the background task.
"""
async with async_session_maker() as session:
await run_notion_indexing(session, connector_id, search_space_id)
await run_notion_indexing(session, connector_id, search_space_id, start_date, end_date)
async def run_notion_indexing(
session: AsyncSession,
connector_id: int,
search_space_id: int
search_space_id: int,
start_date: str,
end_date: str
):
"""
Background task to run Notion indexing.
@ -516,6 +484,8 @@ async def run_notion_indexing(
session: Database session
connector_id: ID of the Notion connector
search_space_id: ID of the search space
start_date: Start date for indexing
end_date: End date for indexing
"""
try:
# Index Notion pages without updating last_indexed_at (we'll do it separately)
@ -523,6 +493,8 @@ async def run_notion_indexing(
session=session,
connector_id=connector_id,
search_space_id=search_space_id,
start_date=start_date,
end_date=end_date,
update_last_indexed=False # Don't update timestamp in the indexing function
)
@ -538,23 +510,27 @@ async def run_notion_indexing(
# Add new helper functions for GitHub indexing
async def run_github_indexing_with_new_session(
connector_id: int,
search_space_id: int
search_space_id: int,
start_date: str,
end_date: str
):
"""Wrapper to run GitHub indexing with its own database session."""
logger.info(f"Background task started: Indexing GitHub connector {connector_id} into space {search_space_id}")
logger.info(f"Background task started: Indexing GitHub connector {connector_id} into space {search_space_id} from {start_date} to {end_date}")
async with async_session_maker() as session:
await run_github_indexing(session, connector_id, search_space_id)
await run_github_indexing(session, connector_id, search_space_id, start_date, end_date)
logger.info(f"Background task finished: Indexing GitHub connector {connector_id}")
async def run_github_indexing(
session: AsyncSession,
connector_id: int,
search_space_id: int
search_space_id: int,
start_date: str,
end_date: str
):
"""Runs the GitHub indexing task and updates the timestamp."""
try:
indexed_count, error_message = await index_github_repos(
session, connector_id, search_space_id, update_last_indexed=False
session, connector_id, search_space_id, start_date, end_date, update_last_indexed=False
)
if error_message:
logger.error(f"GitHub indexing failed for connector {connector_id}: {error_message}")
@ -572,23 +548,27 @@ async def run_github_indexing(
# Add new helper functions for Linear indexing
async def run_linear_indexing_with_new_session(
connector_id: int,
search_space_id: int
search_space_id: int,
start_date: str,
end_date: str
):
"""Wrapper to run Linear indexing with its own database session."""
logger.info(f"Background task started: Indexing Linear connector {connector_id} into space {search_space_id}")
logger.info(f"Background task started: Indexing Linear connector {connector_id} into space {search_space_id} from {start_date} to {end_date}")
async with async_session_maker() as session:
await run_linear_indexing(session, connector_id, search_space_id)
await run_linear_indexing(session, connector_id, search_space_id, start_date, end_date)
logger.info(f"Background task finished: Indexing Linear connector {connector_id}")
async def run_linear_indexing(
session: AsyncSession,
connector_id: int,
search_space_id: int
search_space_id: int,
start_date: str,
end_date: str
):
"""Runs the Linear indexing task and updates the timestamp."""
try:
indexed_count, error_message = await index_linear_issues(
session, connector_id, search_space_id, update_last_indexed=False
session, connector_id, search_space_id, start_date, end_date, update_last_indexed=False
)
if error_message:
logger.error(f"Linear indexing failed for connector {connector_id}: {error_message}")
@ -606,19 +586,23 @@ async def run_linear_indexing(
# Add new helper functions for discord indexing
async def run_discord_indexing_with_new_session(
connector_id: int,
search_space_id: int
search_space_id: int,
start_date: str,
end_date: str
):
"""
Create a new session and run the Discord indexing task.
This prevents session leaks by creating a dedicated session for the background task.
"""
async with async_session_maker() as session:
await run_discord_indexing(session, connector_id, search_space_id)
await run_discord_indexing(session, connector_id, search_space_id, start_date, end_date)
async def run_discord_indexing(
session: AsyncSession,
connector_id: int,
search_space_id: int
search_space_id: int,
start_date: str,
end_date: str
):
"""
Background task to run Discord indexing.
@ -626,6 +610,8 @@ async def run_discord_indexing(
session: Database session
connector_id: ID of the Discord connector
search_space_id: ID of the search space
start_date: Start date for indexing
end_date: End date for indexing
"""
try:
# Index Discord messages without updating last_indexed_at (we'll do it separately)
@ -633,6 +619,8 @@ async def run_discord_indexing(
session=session,
connector_id=connector_id,
search_space_id=search_space_id,
start_date=start_date,
end_date=end_date,
update_last_indexed=False # Don't update timestamp in the indexing function
)