feat: Added Calender Based Indexing.

- This should stabalize manual syning.
This commit is contained in:
DESKTOP-RTLN3BA\$punk 2025-06-06 18:17:47 -07:00
parent 74b3017c64
commit 99fa03d78b
7 changed files with 690 additions and 184 deletions

View file

@ -270,6 +270,8 @@ async def delete_search_source_connector(
async def index_connector_content( async def index_connector_content(
connector_id: int, connector_id: int,
search_space_id: int = Query(..., description="ID of the search space to store indexed content"), search_space_id: int = Query(..., description="ID of the search space to store indexed content"),
start_date: str = Query(None, description="Start date for indexing (YYYY-MM-DD format). If not provided, uses last_indexed_at or defaults to 365 days ago"),
end_date: str = Query(None, description="End date for indexing (YYYY-MM-DD format). If not provided, uses today's date"),
session: AsyncSession = Depends(get_async_session), session: AsyncSession = Depends(get_async_session),
user: User = Depends(current_active_user), user: User = Depends(current_active_user),
background_tasks: BackgroundTasks = None background_tasks: BackgroundTasks = None
@ -301,105 +303,59 @@ async def index_connector_content(
# Handle different connector types # Handle different connector types
response_message = "" response_message = ""
indexing_from = None
indexing_to = None
today_str = datetime.now().strftime("%Y-%m-%d") today_str = datetime.now().strftime("%Y-%m-%d")
if connector.connector_type == SearchSourceConnectorType.SLACK_CONNECTOR: # Determine the actual date range to use
# Determine the time range that will be indexed if start_date is None:
if not connector.last_indexed_at: # Use last_indexed_at or default to 365 days ago
start_date = "365 days ago" # Or perhaps set a specific date if needed if connector.last_indexed_at:
else:
# Check if last_indexed_at is today
today = datetime.now().date() today = datetime.now().date()
if connector.last_indexed_at.date() == today: if connector.last_indexed_at.date() == today:
# If last indexed today, go back 1 day to ensure we don't miss anything # If last indexed today, go back 1 day to ensure we don't miss anything
start_date = (today - timedelta(days=1)).strftime("%Y-%m-%d") indexing_from = (today - timedelta(days=1)).strftime("%Y-%m-%d")
else: else:
start_date = connector.last_indexed_at.strftime("%Y-%m-%d") indexing_from = connector.last_indexed_at.strftime("%Y-%m-%d")
else:
indexing_from = (datetime.now() - timedelta(days=365)).strftime("%Y-%m-%d")
else:
indexing_from = start_date indexing_from = start_date
indexing_to = today_str
if end_date is None:
indexing_to = today_str
else:
indexing_to = end_date
if connector.connector_type == SearchSourceConnectorType.SLACK_CONNECTOR:
# Run indexing in background # Run indexing in background
logger.info(f"Triggering Slack indexing for connector {connector_id} into search space {search_space_id}") logger.info(f"Triggering Slack indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}")
background_tasks.add_task(run_slack_indexing_with_new_session, connector_id, search_space_id) background_tasks.add_task(run_slack_indexing_with_new_session, connector_id, search_space_id, indexing_from, indexing_to)
response_message = "Slack indexing started in the background." response_message = "Slack indexing started in the background."
elif connector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR: elif connector.connector_type == SearchSourceConnectorType.NOTION_CONNECTOR:
# Determine the time range that will be indexed
if not connector.last_indexed_at:
start_date = "365 days ago" # Or perhaps set a specific date
else:
# Check if last_indexed_at is today
today = datetime.now().date()
if connector.last_indexed_at.date() == today:
# If last indexed today, go back 1 day to ensure we don't miss anything
start_date = (today - timedelta(days=1)).strftime("%Y-%m-%d")
else:
start_date = connector.last_indexed_at.strftime("%Y-%m-%d")
indexing_from = start_date
indexing_to = today_str
# Run indexing in background # Run indexing in background
logger.info(f"Triggering Notion indexing for connector {connector_id} into search space {search_space_id}") logger.info(f"Triggering Notion indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}")
background_tasks.add_task(run_notion_indexing_with_new_session, connector_id, search_space_id) background_tasks.add_task(run_notion_indexing_with_new_session, connector_id, search_space_id, indexing_from, indexing_to)
response_message = "Notion indexing started in the background." response_message = "Notion indexing started in the background."
elif connector.connector_type == SearchSourceConnectorType.GITHUB_CONNECTOR: elif connector.connector_type == SearchSourceConnectorType.GITHUB_CONNECTOR:
# GitHub connector likely indexes everything relevant, or uses internal logic
# Setting indexing_from to None and indexing_to to today
indexing_from = None
indexing_to = today_str
# Run indexing in background # Run indexing in background
logger.info(f"Triggering GitHub indexing for connector {connector_id} into search space {search_space_id}") logger.info(f"Triggering GitHub indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}")
background_tasks.add_task(run_github_indexing_with_new_session, connector_id, search_space_id) background_tasks.add_task(run_github_indexing_with_new_session, connector_id, search_space_id, indexing_from, indexing_to)
response_message = "GitHub indexing started in the background." response_message = "GitHub indexing started in the background."
elif connector.connector_type == SearchSourceConnectorType.LINEAR_CONNECTOR: elif connector.connector_type == SearchSourceConnectorType.LINEAR_CONNECTOR:
# Determine the time range that will be indexed
if not connector.last_indexed_at:
start_date = "365 days ago"
else:
# Check if last_indexed_at is today
today = datetime.now().date()
if connector.last_indexed_at.date() == today:
# If last indexed today, go back 1 day to ensure we don't miss anything
start_date = (today - timedelta(days=1)).strftime("%Y-%m-%d")
else:
start_date = connector.last_indexed_at.strftime("%Y-%m-%d")
indexing_from = start_date
indexing_to = today_str
# Run indexing in background # Run indexing in background
logger.info(f"Triggering Linear indexing for connector {connector_id} into search space {search_space_id}") logger.info(f"Triggering Linear indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}")
background_tasks.add_task(run_linear_indexing_with_new_session, connector_id, search_space_id) background_tasks.add_task(run_linear_indexing_with_new_session, connector_id, search_space_id, indexing_from, indexing_to)
response_message = "Linear indexing started in the background." response_message = "Linear indexing started in the background."
elif connector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR: elif connector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR:
# Determine the time range that will be indexed
if not connector.last_indexed_at:
start_date = "365 days ago"
else:
today = datetime.now().date()
if connector.last_indexed_at.date() == today:
# If last indexed today, go back 1 day to ensure we don't miss anything
start_date = (today - timedelta(days=1)).strftime("%Y-%m-%d")
else:
start_date = connector.last_indexed_at.strftime("%Y-%m-%d")
indexing_from = start_date
indexing_to = today_str
# Run indexing in background # Run indexing in background
logger.info( logger.info(
f"Triggering Discord indexing for connector {connector_id} into search space {search_space_id}" f"Triggering Discord indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
) )
background_tasks.add_task( background_tasks.add_task(
run_discord_indexing_with_new_session, connector_id, search_space_id run_discord_indexing_with_new_session, connector_id, search_space_id, indexing_from, indexing_to
) )
response_message = "Discord indexing started in the background." response_message = "Discord indexing started in the background."
@ -453,19 +409,23 @@ async def update_connector_last_indexed(
async def run_slack_indexing_with_new_session( async def run_slack_indexing_with_new_session(
connector_id: int, connector_id: int,
search_space_id: int search_space_id: int,
start_date: str,
end_date: str
): ):
""" """
Create a new session and run the Slack indexing task. Create a new session and run the Slack indexing task.
This prevents session leaks by creating a dedicated session for the background task. This prevents session leaks by creating a dedicated session for the background task.
""" """
async with async_session_maker() as session: async with async_session_maker() as session:
await run_slack_indexing(session, connector_id, search_space_id) await run_slack_indexing(session, connector_id, search_space_id, start_date, end_date)
async def run_slack_indexing( async def run_slack_indexing(
session: AsyncSession, session: AsyncSession,
connector_id: int, connector_id: int,
search_space_id: int search_space_id: int,
start_date: str,
end_date: str
): ):
""" """
Background task to run Slack indexing. Background task to run Slack indexing.
@ -474,6 +434,8 @@ async def run_slack_indexing(
session: Database session session: Database session
connector_id: ID of the Slack connector connector_id: ID of the Slack connector
search_space_id: ID of the search space search_space_id: ID of the search space
start_date: Start date for indexing
end_date: End date for indexing
""" """
try: try:
# Index Slack messages without updating last_indexed_at (we'll do it separately) # Index Slack messages without updating last_indexed_at (we'll do it separately)
@ -481,6 +443,8 @@ async def run_slack_indexing(
session=session, session=session,
connector_id=connector_id, connector_id=connector_id,
search_space_id=search_space_id, search_space_id=search_space_id,
start_date=start_date,
end_date=end_date,
update_last_indexed=False # Don't update timestamp in the indexing function update_last_indexed=False # Don't update timestamp in the indexing function
) )
@ -495,19 +459,23 @@ async def run_slack_indexing(
async def run_notion_indexing_with_new_session( async def run_notion_indexing_with_new_session(
connector_id: int, connector_id: int,
search_space_id: int search_space_id: int,
start_date: str,
end_date: str
): ):
""" """
Create a new session and run the Notion indexing task. Create a new session and run the Notion indexing task.
This prevents session leaks by creating a dedicated session for the background task. This prevents session leaks by creating a dedicated session for the background task.
""" """
async with async_session_maker() as session: async with async_session_maker() as session:
await run_notion_indexing(session, connector_id, search_space_id) await run_notion_indexing(session, connector_id, search_space_id, start_date, end_date)
async def run_notion_indexing( async def run_notion_indexing(
session: AsyncSession, session: AsyncSession,
connector_id: int, connector_id: int,
search_space_id: int search_space_id: int,
start_date: str,
end_date: str
): ):
""" """
Background task to run Notion indexing. Background task to run Notion indexing.
@ -516,6 +484,8 @@ async def run_notion_indexing(
session: Database session session: Database session
connector_id: ID of the Notion connector connector_id: ID of the Notion connector
search_space_id: ID of the search space search_space_id: ID of the search space
start_date: Start date for indexing
end_date: End date for indexing
""" """
try: try:
# Index Notion pages without updating last_indexed_at (we'll do it separately) # Index Notion pages without updating last_indexed_at (we'll do it separately)
@ -523,6 +493,8 @@ async def run_notion_indexing(
session=session, session=session,
connector_id=connector_id, connector_id=connector_id,
search_space_id=search_space_id, search_space_id=search_space_id,
start_date=start_date,
end_date=end_date,
update_last_indexed=False # Don't update timestamp in the indexing function update_last_indexed=False # Don't update timestamp in the indexing function
) )
@ -538,23 +510,27 @@ async def run_notion_indexing(
# Add new helper functions for GitHub indexing # Add new helper functions for GitHub indexing
async def run_github_indexing_with_new_session( async def run_github_indexing_with_new_session(
connector_id: int, connector_id: int,
search_space_id: int search_space_id: int,
start_date: str,
end_date: str
): ):
"""Wrapper to run GitHub indexing with its own database session.""" """Wrapper to run GitHub indexing with its own database session."""
logger.info(f"Background task started: Indexing GitHub connector {connector_id} into space {search_space_id}") logger.info(f"Background task started: Indexing GitHub connector {connector_id} into space {search_space_id} from {start_date} to {end_date}")
async with async_session_maker() as session: async with async_session_maker() as session:
await run_github_indexing(session, connector_id, search_space_id) await run_github_indexing(session, connector_id, search_space_id, start_date, end_date)
logger.info(f"Background task finished: Indexing GitHub connector {connector_id}") logger.info(f"Background task finished: Indexing GitHub connector {connector_id}")
async def run_github_indexing( async def run_github_indexing(
session: AsyncSession, session: AsyncSession,
connector_id: int, connector_id: int,
search_space_id: int search_space_id: int,
start_date: str,
end_date: str
): ):
"""Runs the GitHub indexing task and updates the timestamp.""" """Runs the GitHub indexing task and updates the timestamp."""
try: try:
indexed_count, error_message = await index_github_repos( indexed_count, error_message = await index_github_repos(
session, connector_id, search_space_id, update_last_indexed=False session, connector_id, search_space_id, start_date, end_date, update_last_indexed=False
) )
if error_message: if error_message:
logger.error(f"GitHub indexing failed for connector {connector_id}: {error_message}") logger.error(f"GitHub indexing failed for connector {connector_id}: {error_message}")
@ -572,23 +548,27 @@ async def run_github_indexing(
# Add new helper functions for Linear indexing # Add new helper functions for Linear indexing
async def run_linear_indexing_with_new_session( async def run_linear_indexing_with_new_session(
connector_id: int, connector_id: int,
search_space_id: int search_space_id: int,
start_date: str,
end_date: str
): ):
"""Wrapper to run Linear indexing with its own database session.""" """Wrapper to run Linear indexing with its own database session."""
logger.info(f"Background task started: Indexing Linear connector {connector_id} into space {search_space_id}") logger.info(f"Background task started: Indexing Linear connector {connector_id} into space {search_space_id} from {start_date} to {end_date}")
async with async_session_maker() as session: async with async_session_maker() as session:
await run_linear_indexing(session, connector_id, search_space_id) await run_linear_indexing(session, connector_id, search_space_id, start_date, end_date)
logger.info(f"Background task finished: Indexing Linear connector {connector_id}") logger.info(f"Background task finished: Indexing Linear connector {connector_id}")
async def run_linear_indexing( async def run_linear_indexing(
session: AsyncSession, session: AsyncSession,
connector_id: int, connector_id: int,
search_space_id: int search_space_id: int,
start_date: str,
end_date: str
): ):
"""Runs the Linear indexing task and updates the timestamp.""" """Runs the Linear indexing task and updates the timestamp."""
try: try:
indexed_count, error_message = await index_linear_issues( indexed_count, error_message = await index_linear_issues(
session, connector_id, search_space_id, update_last_indexed=False session, connector_id, search_space_id, start_date, end_date, update_last_indexed=False
) )
if error_message: if error_message:
logger.error(f"Linear indexing failed for connector {connector_id}: {error_message}") logger.error(f"Linear indexing failed for connector {connector_id}: {error_message}")
@ -606,19 +586,23 @@ async def run_linear_indexing(
# Add new helper functions for discord indexing # Add new helper functions for discord indexing
async def run_discord_indexing_with_new_session( async def run_discord_indexing_with_new_session(
connector_id: int, connector_id: int,
search_space_id: int search_space_id: int,
start_date: str,
end_date: str
): ):
""" """
Create a new session and run the Discord indexing task. Create a new session and run the Discord indexing task.
This prevents session leaks by creating a dedicated session for the background task. This prevents session leaks by creating a dedicated session for the background task.
""" """
async with async_session_maker() as session: async with async_session_maker() as session:
await run_discord_indexing(session, connector_id, search_space_id) await run_discord_indexing(session, connector_id, search_space_id, start_date, end_date)
async def run_discord_indexing( async def run_discord_indexing(
session: AsyncSession, session: AsyncSession,
connector_id: int, connector_id: int,
search_space_id: int search_space_id: int,
start_date: str,
end_date: str
): ):
""" """
Background task to run Discord indexing. Background task to run Discord indexing.
@ -626,6 +610,8 @@ async def run_discord_indexing(
session: Database session session: Database session
connector_id: ID of the Discord connector connector_id: ID of the Discord connector
search_space_id: ID of the search space search_space_id: ID of the search space
start_date: Start date for indexing
end_date: End date for indexing
""" """
try: try:
# Index Discord messages without updating last_indexed_at (we'll do it separately) # Index Discord messages without updating last_indexed_at (we'll do it separately)
@ -633,6 +619,8 @@ async def run_discord_indexing(
session=session, session=session,
connector_id=connector_id, connector_id=connector_id,
search_space_id=search_space_id, search_space_id=search_space_id,
start_date=start_date,
end_date=end_date,
update_last_indexed=False # Don't update timestamp in the indexing function update_last_indexed=False # Don't update timestamp in the indexing function
) )

View file

@ -24,6 +24,8 @@ async def index_slack_messages(
session: AsyncSession, session: AsyncSession,
connector_id: int, connector_id: int,
search_space_id: int, search_space_id: int,
start_date: str = None,
end_date: str = None,
update_last_indexed: bool = True update_last_indexed: bool = True
) -> Tuple[int, Optional[str]]: ) -> Tuple[int, Optional[str]]:
""" """
@ -61,27 +63,35 @@ async def index_slack_messages(
slack_client = SlackHistory(token=slack_token) slack_client = SlackHistory(token=slack_token)
# Calculate date range # Calculate date range
end_date = datetime.now() if start_date is None or end_date is None:
# Fall back to calculating dates based on last_indexed_at
# Use last_indexed_at as start date if available, otherwise use 365 days ago calculated_end_date = datetime.now()
if connector.last_indexed_at:
# Convert dates to be comparable (both timezone-naive)
last_indexed_naive = connector.last_indexed_at.replace(tzinfo=None) if connector.last_indexed_at.tzinfo else connector.last_indexed_at
# Check if last_indexed_at is in the future or after end_date # Use last_indexed_at as start date if available, otherwise use 365 days ago
if last_indexed_naive > end_date: if connector.last_indexed_at:
logger.warning(f"Last indexed date ({last_indexed_naive.strftime('%Y-%m-%d')}) is in the future. Using 365 days ago instead.") # Convert dates to be comparable (both timezone-naive)
start_date = end_date - timedelta(days=365) last_indexed_naive = connector.last_indexed_at.replace(tzinfo=None) if connector.last_indexed_at.tzinfo else connector.last_indexed_at
# Check if last_indexed_at is in the future or after end_date
if last_indexed_naive > calculated_end_date:
logger.warning(f"Last indexed date ({last_indexed_naive.strftime('%Y-%m-%d')}) is in the future. Using 365 days ago instead.")
calculated_start_date = calculated_end_date - timedelta(days=365)
else:
calculated_start_date = last_indexed_naive
logger.info(f"Using last_indexed_at ({calculated_start_date.strftime('%Y-%m-%d')}) as start date")
else: else:
start_date = last_indexed_naive calculated_start_date = calculated_end_date - timedelta(days=365) # Use 365 days as default
logger.info(f"Using last_indexed_at ({start_date.strftime('%Y-%m-%d')}) as start date") logger.info(f"No last_indexed_at found, using {calculated_start_date.strftime('%Y-%m-%d')} (365 days ago) as start date")
# Use calculated dates if not provided
start_date_str = start_date if start_date else calculated_start_date.strftime("%Y-%m-%d")
end_date_str = end_date if end_date else calculated_end_date.strftime("%Y-%m-%d")
else: else:
start_date = end_date - timedelta(days=365) # Use 365 days as default # Use provided dates
logger.info(f"No last_indexed_at found, using {start_date.strftime('%Y-%m-%d')} (30 days ago) as start date") start_date_str = start_date
end_date_str = end_date
# Format dates for Slack API
start_date_str = start_date.strftime("%Y-%m-%d") logger.info(f"Indexing Slack messages from {start_date_str} to {end_date_str}")
end_date_str = end_date.strftime("%Y-%m-%d")
# Get all channels # Get all channels
try: try:
@ -279,6 +289,8 @@ async def index_notion_pages(
session: AsyncSession, session: AsyncSession,
connector_id: int, connector_id: int,
search_space_id: int, search_space_id: int,
start_date: str = None,
end_date: str = None,
update_last_indexed: bool = True update_last_indexed: bool = True
) -> Tuple[int, Optional[str]]: ) -> Tuple[int, Optional[str]]:
""" """
@ -317,20 +329,33 @@ async def index_notion_pages(
notion_client = NotionHistoryConnector(token=notion_token) notion_client = NotionHistoryConnector(token=notion_token)
# Calculate date range # Calculate date range
end_date = datetime.now() if start_date is None or end_date is None:
# Fall back to calculating dates
calculated_end_date = datetime.now()
calculated_start_date = calculated_end_date - timedelta(days=365) # Check for last 1 year of pages
# Use calculated dates if not provided
if start_date is None:
start_date_iso = calculated_start_date.strftime("%Y-%m-%dT%H:%M:%SZ")
else:
# Convert YYYY-MM-DD to ISO format
start_date_iso = datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m-%dT%H:%M:%SZ")
if end_date is None:
end_date_iso = calculated_end_date.strftime("%Y-%m-%dT%H:%M:%SZ")
else:
# Convert YYYY-MM-DD to ISO format
end_date_iso = datetime.strptime(end_date, "%Y-%m-%d").strftime("%Y-%m-%dT%H:%M:%SZ")
else:
# Convert provided dates to ISO format for Notion API
start_date_iso = datetime.strptime(start_date, "%Y-%m-%d").strftime("%Y-%m-%dT%H:%M:%SZ")
end_date_iso = datetime.strptime(end_date, "%Y-%m-%d").strftime("%Y-%m-%dT%H:%M:%SZ")
# Check for last 1 year of pages logger.info(f"Fetching Notion pages from {start_date_iso} to {end_date_iso}")
start_date = end_date - timedelta(days=365)
# Format dates for Notion API (ISO format)
start_date_str = start_date.strftime("%Y-%m-%dT%H:%M:%SZ")
end_date_str = end_date.strftime("%Y-%m-%dT%H:%M:%SZ")
logger.info(f"Fetching Notion pages from {start_date_str} to {end_date_str}")
# Get all pages # Get all pages
try: try:
pages = notion_client.get_all_pages(start_date=start_date_str, end_date=end_date_str) pages = notion_client.get_all_pages(start_date=start_date_iso, end_date=end_date_iso)
logger.info(f"Found {len(pages)} Notion pages") logger.info(f"Found {len(pages)} Notion pages")
except Exception as e: except Exception as e:
logger.error(f"Error fetching Notion pages: {str(e)}", exc_info=True) logger.error(f"Error fetching Notion pages: {str(e)}", exc_info=True)
@ -524,6 +549,8 @@ async def index_github_repos(
session: AsyncSession, session: AsyncSession,
connector_id: int, connector_id: int,
search_space_id: int, search_space_id: int,
start_date: str = None,
end_date: str = None,
update_last_indexed: bool = True update_last_indexed: bool = True
) -> Tuple[int, Optional[str]]: ) -> Tuple[int, Optional[str]]:
""" """
@ -575,6 +602,8 @@ async def index_github_repos(
# For simplicity, we'll proceed with the list provided. # For simplicity, we'll proceed with the list provided.
# If a repo is inaccessible, get_repository_files will likely fail gracefully later. # If a repo is inaccessible, get_repository_files will likely fail gracefully later.
logger.info(f"Starting indexing for {len(repo_full_names_to_index)} selected repositories.") logger.info(f"Starting indexing for {len(repo_full_names_to_index)} selected repositories.")
if start_date and end_date:
logger.info(f"Date range requested: {start_date} to {end_date} (Note: GitHub indexing processes all files regardless of dates)")
# 6. Iterate through selected repositories and index files # 6. Iterate through selected repositories and index files
for repo_full_name in repo_full_names_to_index: for repo_full_name in repo_full_names_to_index:
@ -688,6 +717,8 @@ async def index_linear_issues(
session: AsyncSession, session: AsyncSession,
connector_id: int, connector_id: int,
search_space_id: int, search_space_id: int,
start_date: str = None,
end_date: str = None,
update_last_indexed: bool = True update_last_indexed: bool = True
) -> Tuple[int, Optional[str]]: ) -> Tuple[int, Optional[str]]:
""" """
@ -725,27 +756,33 @@ async def index_linear_issues(
linear_client = LinearConnector(token=linear_token) linear_client = LinearConnector(token=linear_token)
# Calculate date range # Calculate date range
end_date = datetime.now() if start_date is None or end_date is None:
# Fall back to calculating dates based on last_indexed_at
# Use last_indexed_at as start date if available, otherwise use 365 days ago calculated_end_date = datetime.now()
if connector.last_indexed_at:
# Convert dates to be comparable (both timezone-naive)
last_indexed_naive = connector.last_indexed_at.replace(tzinfo=None) if connector.last_indexed_at.tzinfo else connector.last_indexed_at
# Check if last_indexed_at is in the future or after end_date # Use last_indexed_at as start date if available, otherwise use 365 days ago
if last_indexed_naive > end_date: if connector.last_indexed_at:
logger.warning(f"Last indexed date ({last_indexed_naive.strftime('%Y-%m-%d')}) is in the future. Using 365 days ago instead.") # Convert dates to be comparable (both timezone-naive)
start_date = end_date - timedelta(days=365) last_indexed_naive = connector.last_indexed_at.replace(tzinfo=None) if connector.last_indexed_at.tzinfo else connector.last_indexed_at
# Check if last_indexed_at is in the future or after end_date
if last_indexed_naive > calculated_end_date:
logger.warning(f"Last indexed date ({last_indexed_naive.strftime('%Y-%m-%d')}) is in the future. Using 365 days ago instead.")
calculated_start_date = calculated_end_date - timedelta(days=365)
else:
calculated_start_date = last_indexed_naive
logger.info(f"Using last_indexed_at ({calculated_start_date.strftime('%Y-%m-%d')}) as start date")
else: else:
start_date = last_indexed_naive calculated_start_date = calculated_end_date - timedelta(days=365) # Use 365 days as default
logger.info(f"Using last_indexed_at ({start_date.strftime('%Y-%m-%d')}) as start date") logger.info(f"No last_indexed_at found, using {calculated_start_date.strftime('%Y-%m-%d')} (365 days ago) as start date")
# Use calculated dates if not provided
start_date_str = start_date if start_date else calculated_start_date.strftime("%Y-%m-%d")
end_date_str = end_date if end_date else calculated_end_date.strftime("%Y-%m-%d")
else: else:
start_date = end_date - timedelta(days=365) # Use 365 days as default # Use provided dates
logger.info(f"No last_indexed_at found, using {start_date.strftime('%Y-%m-%d')} (365 days ago) as start date") start_date_str = start_date
end_date_str = end_date
# Format dates for Linear API
start_date_str = start_date.strftime("%Y-%m-%d")
end_date_str = end_date.strftime("%Y-%m-%d")
logger.info(f"Fetching Linear issues from {start_date_str} to {end_date_str}") logger.info(f"Fetching Linear issues from {start_date_str} to {end_date_str}")
@ -918,6 +955,8 @@ async def index_discord_messages(
session: AsyncSession, session: AsyncSession,
connector_id: int, connector_id: int,
search_space_id: int, search_space_id: int,
start_date: str = None,
end_date: str = None,
update_last_indexed: bool = True update_last_indexed: bool = True
) -> Tuple[int, Optional[str]]: ) -> Tuple[int, Optional[str]]:
""" """
@ -957,19 +996,36 @@ async def index_discord_messages(
discord_client = DiscordConnector(token=discord_token) discord_client = DiscordConnector(token=discord_token)
# Calculate date range # Calculate date range
end_date = datetime.now(timezone.utc) if start_date is None or end_date is None:
# Fall back to calculating dates based on last_indexed_at
calculated_end_date = datetime.now(timezone.utc)
# Use last_indexed_at as start date if available, otherwise use 365 days ago # Use last_indexed_at as start date if available, otherwise use 365 days ago
if connector.last_indexed_at: if connector.last_indexed_at:
start_date = connector.last_indexed_at.replace(tzinfo=timezone.utc) calculated_start_date = connector.last_indexed_at.replace(tzinfo=timezone.utc)
logger.info(f"Using last_indexed_at ({start_date.strftime('%Y-%m-%d')}) as start date") logger.info(f"Using last_indexed_at ({calculated_start_date.strftime('%Y-%m-%d')}) as start date")
else:
calculated_start_date = calculated_end_date - timedelta(days=365)
logger.info(f"No last_indexed_at found, using {calculated_start_date.strftime('%Y-%m-%d')} (365 days ago) as start date")
# Use calculated dates if not provided, convert to ISO format for Discord API
if start_date is None:
start_date_iso = calculated_start_date.isoformat()
else:
# Convert YYYY-MM-DD to ISO format
start_date_iso = datetime.strptime(start_date, "%Y-%m-%d").replace(tzinfo=timezone.utc).isoformat()
if end_date is None:
end_date_iso = calculated_end_date.isoformat()
else:
# Convert YYYY-MM-DD to ISO format
end_date_iso = datetime.strptime(end_date, "%Y-%m-%d").replace(tzinfo=timezone.utc).isoformat()
else: else:
start_date = end_date - timedelta(days=365) # Convert provided dates to ISO format for Discord API
logger.info(f"No last_indexed_at found, using {start_date.strftime('%Y-%m-%d')} (365 days ago) as start date") start_date_iso = datetime.strptime(start_date, "%Y-%m-%d").replace(tzinfo=timezone.utc).isoformat()
end_date_iso = datetime.strptime(end_date, "%Y-%m-%d").replace(tzinfo=timezone.utc).isoformat()
# Format dates for Discord API
start_date_str = start_date.isoformat() logger.info(f"Indexing Discord messages from {start_date_iso} to {end_date_iso}")
end_date_str = end_date.isoformat()
documents_indexed = 0 documents_indexed = 0
documents_skipped = 0 documents_skipped = 0
@ -1012,8 +1068,8 @@ async def index_discord_messages(
try: try:
messages = await discord_client.get_channel_history( messages = await discord_client.get_channel_history(
channel_id=channel_id, channel_id=channel_id,
start_date=start_date_str, start_date=start_date_iso,
end_date=end_date_str, end_date=end_date_iso,
) )
except Exception as e: except Exception as e:
logger.error(f"Failed to get messages for channel {channel_name}: {str(e)}") logger.error(f"Failed to get messages for channel {channel_name}: {str(e)}")
@ -1122,8 +1178,8 @@ async def index_discord_messages(
"channel_name": channel_name, "channel_name": channel_name,
"channel_id": channel_id, "channel_id": channel_id,
"message_count": len(formatted_messages), "message_count": len(formatted_messages),
"start_date": start_date_str, "start_date": start_date_iso,
"end_date": end_date_str, "end_date": end_date_iso,
"indexed_at": datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S") "indexed_at": datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
}, },
content=summary_content, content=summary_content,

View file

@ -9,6 +9,7 @@ import {
Plus, Plus,
Trash2, Trash2,
RefreshCw, RefreshCw,
Calendar as CalendarIcon,
} from "lucide-react"; } from "lucide-react";
import { useSearchSourceConnectors } from "@/hooks/useSearchSourceConnectors"; import { useSearchSourceConnectors } from "@/hooks/useSearchSourceConnectors";
@ -45,7 +46,21 @@ import {
TooltipProvider, TooltipProvider,
TooltipTrigger, TooltipTrigger,
} from "@/components/ui/tooltip"; } from "@/components/ui/tooltip";
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
DialogTrigger,
} from "@/components/ui/dialog";
import { Calendar } from "@/components/ui/calendar";
import { Popover, PopoverContent, PopoverTrigger } from "@/components/ui/popover";
import { Label } from "@/components/ui/label";
import { getConnectorIcon } from "@/components/chat"; import { getConnectorIcon } from "@/components/chat";
import { cn } from "@/lib/utils";
import { format } from "date-fns";
// Helper function to get connector type display name // Helper function to get connector type display name
const getConnectorTypeDisplay = (type: string): string => { const getConnectorTypeDisplay = (type: string): string => {
@ -89,6 +104,10 @@ export default function ConnectorsPage() {
const [indexingConnectorId, setIndexingConnectorId] = useState<number | null>( const [indexingConnectorId, setIndexingConnectorId] = useState<number | null>(
null, null,
); );
const [datePickerOpen, setDatePickerOpen] = useState(false);
const [selectedConnectorForIndexing, setSelectedConnectorForIndexing] = useState<number | null>(null);
const [startDate, setStartDate] = useState<Date | undefined>(undefined);
const [endDate, setEndDate] = useState<Date | undefined>(undefined);
useEffect(() => { useEffect(() => {
if (error) { if (error) {
@ -112,8 +131,42 @@ export default function ConnectorsPage() {
} }
}; };
// Handle connector indexing // Handle opening date picker for indexing
const handleIndexConnector = async (connectorId: number) => { const handleOpenDatePicker = (connectorId: number) => {
setSelectedConnectorForIndexing(connectorId);
setDatePickerOpen(true);
};
// Handle connector indexing with dates
const handleIndexConnector = async () => {
if (selectedConnectorForIndexing === null) return;
setIndexingConnectorId(selectedConnectorForIndexing);
setDatePickerOpen(false);
try {
const startDateStr = startDate ? format(startDate, "yyyy-MM-dd") : undefined;
const endDateStr = endDate ? format(endDate, "yyyy-MM-dd") : undefined;
await indexConnector(selectedConnectorForIndexing, searchSpaceId, startDateStr, endDateStr);
toast.success("Connector content indexed successfully");
} catch (error) {
console.error("Error indexing connector content:", error);
toast.error(
error instanceof Error
? error.message
: "Failed to index connector content",
);
} finally {
setIndexingConnectorId(null);
setSelectedConnectorForIndexing(null);
setStartDate(undefined);
setEndDate(undefined);
}
};
// Handle indexing without date picker (for quick indexing)
const handleQuickIndexConnector = async (connectorId: number) => {
setIndexingConnectorId(connectorId); setIndexingConnectorId(connectorId);
try { try {
await indexConnector(connectorId, searchSpaceId); await indexConnector(connectorId, searchSpaceId);
@ -213,34 +266,64 @@ export default function ConnectorsPage() {
<TableCell className="text-right"> <TableCell className="text-right">
<div className="flex justify-end gap-2"> <div className="flex justify-end gap-2">
{connector.is_indexable && ( {connector.is_indexable && (
<TooltipProvider> <div className="flex gap-1">
<Tooltip> <TooltipProvider>
<TooltipTrigger asChild> <Tooltip>
<Button <TooltipTrigger asChild>
variant="outline" <Button
size="sm" variant="outline"
onClick={() => size="sm"
handleIndexConnector(connector.id) onClick={() =>
} handleOpenDatePicker(connector.id)
disabled={ }
indexingConnectorId === connector.id disabled={
} indexingConnectorId === connector.id
> }
{indexingConnectorId === connector.id ? ( >
<RefreshCw className="h-4 w-4 animate-spin" /> {indexingConnectorId === connector.id ? (
) : ( <RefreshCw className="h-4 w-4 animate-spin" />
<RefreshCw className="h-4 w-4" /> ) : (
)} <CalendarIcon className="h-4 w-4" />
<span className="sr-only"> )}
Index Content <span className="sr-only">
</span> Index with Date Range
</Button> </span>
</TooltipTrigger> </Button>
<TooltipContent> </TooltipTrigger>
<p>Index Content</p> <TooltipContent>
</TooltipContent> <p>Index with Date Range</p>
</Tooltip> </TooltipContent>
</TooltipProvider> </Tooltip>
</TooltipProvider>
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<Button
variant="outline"
size="sm"
onClick={() =>
handleQuickIndexConnector(connector.id)
}
disabled={
indexingConnectorId === connector.id
}
>
{indexingConnectorId === connector.id ? (
<RefreshCw className="h-4 w-4 animate-spin" />
) : (
<RefreshCw className="h-4 w-4" />
)}
<span className="sr-only">
Quick Index
</span>
</Button>
</TooltipTrigger>
<TooltipContent>
<p>Quick Index (Auto Date Range)</p>
</TooltipContent>
</Tooltip>
</TooltipProvider>
</div>
)} )}
<Button <Button
variant="outline" variant="outline"
@ -303,6 +386,134 @@ export default function ConnectorsPage() {
)} )}
</CardContent> </CardContent>
</Card> </Card>
{/* Date Picker Dialog */}
<Dialog open={datePickerOpen} onOpenChange={setDatePickerOpen}>
<DialogContent className="sm:max-w-[500px]">
<DialogHeader>
<DialogTitle>Select Date Range for Indexing</DialogTitle>
<DialogDescription>
Choose the start and end dates for indexing content. Leave empty to use default range.
</DialogDescription>
</DialogHeader>
<div className="grid gap-4 py-4">
<div className="grid grid-cols-2 gap-4">
<div className="space-y-2">
<Label htmlFor="start-date">Start Date</Label>
<Popover>
<PopoverTrigger asChild>
<Button
id="start-date"
variant="outline"
className={cn(
"w-full justify-start text-left font-normal",
!startDate && "text-muted-foreground"
)}
>
<CalendarIcon className="mr-2 h-4 w-4" />
{startDate ? format(startDate, "PPP") : "Pick a date"}
</Button>
</PopoverTrigger>
<PopoverContent className="w-auto p-0" align="start">
<Calendar
mode="single"
selected={startDate}
onSelect={setStartDate}
disabled={(date) =>
date > new Date() || (endDate ? date > endDate : false)
}
initialFocus
/>
</PopoverContent>
</Popover>
</div>
<div className="space-y-2">
<Label htmlFor="end-date">End Date</Label>
<Popover>
<PopoverTrigger asChild>
<Button
id="end-date"
variant="outline"
className={cn(
"w-full justify-start text-left font-normal",
!endDate && "text-muted-foreground"
)}
>
<CalendarIcon className="mr-2 h-4 w-4" />
{endDate ? format(endDate, "PPP") : "Pick a date"}
</Button>
</PopoverTrigger>
<PopoverContent className="w-auto p-0" align="start">
<Calendar
mode="single"
selected={endDate}
onSelect={setEndDate}
disabled={(date) =>
date > new Date() || (startDate ? date < startDate : false)
}
initialFocus
/>
</PopoverContent>
</Popover>
</div>
</div>
<div className="flex gap-2">
<Button
variant="outline"
size="sm"
onClick={() => {
setStartDate(undefined);
setEndDate(undefined);
}}
>
Clear Dates
</Button>
<Button
variant="outline"
size="sm"
onClick={() => {
const today = new Date();
const thirtyDaysAgo = new Date(today);
thirtyDaysAgo.setDate(today.getDate() - 30);
setStartDate(thirtyDaysAgo);
setEndDate(today);
}}
>
Last 30 Days
</Button>
<Button
variant="outline"
size="sm"
onClick={() => {
const today = new Date();
const yearAgo = new Date(today);
yearAgo.setFullYear(today.getFullYear() - 1);
setStartDate(yearAgo);
setEndDate(today);
}}
>
Last Year
</Button>
</div>
</div>
<DialogFooter>
<Button
variant="outline"
onClick={() => {
setDatePickerOpen(false);
setSelectedConnectorForIndexing(null);
setStartDate(undefined);
setEndDate(undefined);
}}
>
Cancel
</Button>
<Button onClick={handleIndexConnector}>
Start Indexing
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
</div> </div>
); );
} }

View file

@ -0,0 +1,210 @@
"use client"
import * as React from "react"
import {
ChevronDownIcon,
ChevronLeftIcon,
ChevronRightIcon,
} from "lucide-react"
import { DayButton, DayPicker, getDefaultClassNames } from "react-day-picker"
import { cn } from "@/lib/utils"
import { Button, buttonVariants } from "@/components/ui/button"
function Calendar({
className,
classNames,
showOutsideDays = true,
captionLayout = "label",
buttonVariant = "ghost",
formatters,
components,
...props
}: React.ComponentProps<typeof DayPicker> & {
buttonVariant?: React.ComponentProps<typeof Button>["variant"]
}) {
const defaultClassNames = getDefaultClassNames()
return (
<DayPicker
showOutsideDays={showOutsideDays}
className={cn(
"bg-background group/calendar p-3 [--cell-size:--spacing(8)] [[data-slot=card-content]_&]:bg-transparent [[data-slot=popover-content]_&]:bg-transparent",
String.raw`rtl:**:[.rdp-button\_next>svg]:rotate-180`,
String.raw`rtl:**:[.rdp-button\_previous>svg]:rotate-180`,
className
)}
captionLayout={captionLayout}
formatters={{
formatMonthDropdown: (date) =>
date.toLocaleString("default", { month: "short" }),
...formatters,
}}
classNames={{
root: cn("w-fit", defaultClassNames.root),
months: cn(
"flex gap-4 flex-col md:flex-row relative",
defaultClassNames.months
),
month: cn("flex flex-col w-full gap-4", defaultClassNames.month),
nav: cn(
"flex items-center gap-1 w-full absolute top-0 inset-x-0 justify-between",
defaultClassNames.nav
),
button_previous: cn(
buttonVariants({ variant: buttonVariant }),
"size-(--cell-size) aria-disabled:opacity-50 p-0 select-none",
defaultClassNames.button_previous
),
button_next: cn(
buttonVariants({ variant: buttonVariant }),
"size-(--cell-size) aria-disabled:opacity-50 p-0 select-none",
defaultClassNames.button_next
),
month_caption: cn(
"flex items-center justify-center h-(--cell-size) w-full px-(--cell-size)",
defaultClassNames.month_caption
),
dropdowns: cn(
"w-full flex items-center text-sm font-medium justify-center h-(--cell-size) gap-1.5",
defaultClassNames.dropdowns
),
dropdown_root: cn(
"relative has-focus:border-ring border border-input shadow-xs has-focus:ring-ring/50 has-focus:ring-[3px] rounded-md",
defaultClassNames.dropdown_root
),
dropdown: cn("absolute inset-0 opacity-0", defaultClassNames.dropdown),
caption_label: cn(
"select-none font-medium",
captionLayout === "label"
? "text-sm"
: "rounded-md pl-2 pr-1 flex items-center gap-1 text-sm h-8 [&>svg]:text-muted-foreground [&>svg]:size-3.5",
defaultClassNames.caption_label
),
table: "w-full border-collapse",
weekdays: cn("flex", defaultClassNames.weekdays),
weekday: cn(
"text-muted-foreground rounded-md flex-1 font-normal text-[0.8rem] select-none",
defaultClassNames.weekday
),
week: cn("flex w-full mt-2", defaultClassNames.week),
week_number_header: cn(
"select-none w-(--cell-size)",
defaultClassNames.week_number_header
),
week_number: cn(
"text-[0.8rem] select-none text-muted-foreground",
defaultClassNames.week_number
),
day: cn(
"relative w-full h-full p-0 text-center [&:first-child[data-selected=true]_button]:rounded-l-md [&:last-child[data-selected=true]_button]:rounded-r-md group/day aspect-square select-none",
defaultClassNames.day
),
range_start: cn(
"rounded-l-md bg-accent",
defaultClassNames.range_start
),
range_middle: cn("rounded-none", defaultClassNames.range_middle),
range_end: cn("rounded-r-md bg-accent", defaultClassNames.range_end),
today: cn(
"bg-accent text-accent-foreground rounded-md data-[selected=true]:rounded-none",
defaultClassNames.today
),
outside: cn(
"text-muted-foreground aria-selected:text-muted-foreground",
defaultClassNames.outside
),
disabled: cn(
"text-muted-foreground opacity-50",
defaultClassNames.disabled
),
hidden: cn("invisible", defaultClassNames.hidden),
...classNames,
}}
components={{
Root: ({ className, rootRef, ...props }) => {
return (
<div
data-slot="calendar"
ref={rootRef}
className={cn(className)}
{...props}
/>
)
},
Chevron: ({ className, orientation, ...props }) => {
if (orientation === "left") {
return (
<ChevronLeftIcon className={cn("size-4", className)} {...props} />
)
}
if (orientation === "right") {
return (
<ChevronRightIcon
className={cn("size-4", className)}
{...props}
/>
)
}
return (
<ChevronDownIcon className={cn("size-4", className)} {...props} />
)
},
DayButton: CalendarDayButton,
WeekNumber: ({ children, ...props }) => {
return (
<td {...props}>
<div className="flex size-(--cell-size) items-center justify-center text-center">
{children}
</div>
</td>
)
},
...components,
}}
{...props}
/>
)
}
function CalendarDayButton({
className,
day,
modifiers,
...props
}: React.ComponentProps<typeof DayButton>) {
const defaultClassNames = getDefaultClassNames()
const ref = React.useRef<HTMLButtonElement>(null)
React.useEffect(() => {
if (modifiers.focused) ref.current?.focus()
}, [modifiers.focused])
return (
<Button
ref={ref}
variant="ghost"
size="icon"
data-day={day.date.toLocaleDateString()}
data-selected-single={
modifiers.selected &&
!modifiers.range_start &&
!modifiers.range_end &&
!modifiers.range_middle
}
data-range-start={modifiers.range_start}
data-range-end={modifiers.range_end}
data-range-middle={modifiers.range_middle}
className={cn(
"data-[selected-single=true]:bg-primary data-[selected-single=true]:text-primary-foreground data-[range-middle=true]:bg-accent data-[range-middle=true]:text-accent-foreground data-[range-start=true]:bg-primary data-[range-start=true]:text-primary-foreground data-[range-end=true]:bg-primary data-[range-end=true]:text-primary-foreground group-data-[focused=true]/day:border-ring group-data-[focused=true]/day:ring-ring/50 dark:hover:text-accent-foreground flex aspect-square size-auto w-full min-w-(--cell-size) flex-col gap-1 leading-none font-normal group-data-[focused=true]/day:relative group-data-[focused=true]/day:z-10 group-data-[focused=true]/day:ring-[3px] data-[range-end=true]:rounded-md data-[range-end=true]:rounded-r-md data-[range-middle=true]:rounded-none data-[range-start=true]:rounded-md data-[range-start=true]:rounded-l-md [&>span]:text-xs [&>span]:opacity-70",
defaultClassNames.day,
className
)}
{...props}
/>
)
}
export { Calendar, CalendarDayButton }

View file

@ -253,7 +253,12 @@ export const useSearchSourceConnectors = () => {
/** /**
* Index content from a connector to a search space * Index content from a connector to a search space
*/ */
const indexConnector = async (connectorId: number, searchSpaceId: string | number) => { const indexConnector = async (
connectorId: number,
searchSpaceId: string | number,
startDate?: string,
endDate?: string
) => {
try { try {
const token = localStorage.getItem('surfsense_bearer_token'); const token = localStorage.getItem('surfsense_bearer_token');
@ -261,8 +266,17 @@ export const useSearchSourceConnectors = () => {
throw new Error('No authentication token found'); throw new Error('No authentication token found');
} }
// Build query parameters
const params = new URLSearchParams({ search_space_id: searchSpaceId.toString() });
if (startDate) {
params.append('start_date', startDate);
}
if (endDate) {
params.append('end_date', endDate);
}
const response = await fetch( const response = await fetch(
`${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors/${connectorId}/index?search_space_id=${searchSpaceId}`, `${process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL}/api/v1/search-source-connectors/${connectorId}/index?${params.toString()}`,
{ {
method: 'POST', method: 'POST',
headers: { headers: {

View file

@ -50,6 +50,7 @@
"next": "15.2.3", "next": "15.2.3",
"next-themes": "^0.4.4", "next-themes": "^0.4.4",
"react": "^19.0.0", "react": "^19.0.0",
"react-day-picker": "^9.7.0",
"react-dom": "^19.0.0", "react-dom": "^19.0.0",
"react-dropzone": "^14.3.8", "react-dropzone": "^14.3.8",
"react-hook-form": "^7.54.2", "react-hook-form": "^7.54.2",

View file

@ -113,6 +113,9 @@ importers:
react: react:
specifier: ^19.0.0 specifier: ^19.0.0
version: 19.0.0 version: 19.0.0
react-day-picker:
specifier: ^9.7.0
version: 9.7.0(react@19.0.0)
react-dom: react-dom:
specifier: ^19.0.0 specifier: ^19.0.0
version: 19.0.0(react@19.0.0) version: 19.0.0(react@19.0.0)
@ -234,6 +237,9 @@ packages:
resolution: {integrity: sha512-aA63XwOkcl4xxQa3HjPMqOP6LiK0ZDv3mUPYEFXkpHbaFjtGggE1A61FjFzJnB+p7/oy2gA8E+rcBNl/zC1tMg==} resolution: {integrity: sha512-aA63XwOkcl4xxQa3HjPMqOP6LiK0ZDv3mUPYEFXkpHbaFjtGggE1A61FjFzJnB+p7/oy2gA8E+rcBNl/zC1tMg==}
engines: {node: '>=6.9.0'} engines: {node: '>=6.9.0'}
'@date-fns/tz@1.2.0':
resolution: {integrity: sha512-LBrd7MiJZ9McsOgxqWX7AaxrDjcFVjWH/tIKJd7pnR7McaslGYOP1QmmiBXdJH/H/yLCT+rcQ7FaPBUxRGUtrg==}
'@emnapi/runtime@1.3.1': '@emnapi/runtime@1.3.1':
resolution: {integrity: sha512-kEBmG8KyqtxJZv+ygbEim+KCGtIq1fC22Ms3S4ziXmYKm8uyoLX0MHONVKwp+9opg390VaKRNt4a7A9NwmpNhw==} resolution: {integrity: sha512-kEBmG8KyqtxJZv+ygbEim+KCGtIq1fC22Ms3S4ziXmYKm8uyoLX0MHONVKwp+9opg390VaKRNt4a7A9NwmpNhw==}
@ -2456,6 +2462,9 @@ packages:
resolution: {integrity: sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==} resolution: {integrity: sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==}
engines: {node: '>= 0.4'} engines: {node: '>= 0.4'}
date-fns-jalali@4.1.0-0:
resolution: {integrity: sha512-hTIP/z+t+qKwBDcmmsnmjWTduxCg+5KfdqWQvb2X/8C9+knYY6epN/pfxdDuyVlSVeFz0sM5eEfwIUQ70U4ckg==}
date-fns@4.1.0: date-fns@4.1.0:
resolution: {integrity: sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==} resolution: {integrity: sha512-Ukq0owbQXxa/U3EGtsdVBkR1w7KOQ5gIBqdH2hkvknzZPYvBxb/aa6E8L7tmjFtkwZBu3UXBbjIgPo/Ez4xaNg==}
@ -3889,6 +3898,12 @@ packages:
react-base16-styling@0.6.0: react-base16-styling@0.6.0:
resolution: {integrity: sha512-yvh/7CArceR/jNATXOKDlvTnPKPmGZz7zsenQ3jUwLzHkNUR0CvY3yGYJbWJ/nnxsL8Sgmt5cO3/SILVuPO6TQ==} resolution: {integrity: sha512-yvh/7CArceR/jNATXOKDlvTnPKPmGZz7zsenQ3jUwLzHkNUR0CvY3yGYJbWJ/nnxsL8Sgmt5cO3/SILVuPO6TQ==}
react-day-picker@9.7.0:
resolution: {integrity: sha512-urlK4C9XJZVpQ81tmVgd2O7lZ0VQldZeHzNejbwLWZSkzHH498KnArT0EHNfKBOWwKc935iMLGZdxXPRISzUxQ==}
engines: {node: '>=18'}
peerDependencies:
react: '>=16.8.0'
react-dom@19.0.0: react-dom@19.0.0:
resolution: {integrity: sha512-4GV5sHFG0e/0AD4X+ySy6UJd3jVl1iNsNHdpad0qhABJ11twS3TTBnseqsKurKcsNqCEFeGL3uLpVChpIO3QfQ==} resolution: {integrity: sha512-4GV5sHFG0e/0AD4X+ySy6UJd3jVl1iNsNHdpad0qhABJ11twS3TTBnseqsKurKcsNqCEFeGL3uLpVChpIO3QfQ==}
peerDependencies: peerDependencies:
@ -4660,6 +4675,8 @@ snapshots:
dependencies: dependencies:
regenerator-runtime: 0.14.1 regenerator-runtime: 0.14.1
'@date-fns/tz@1.2.0': {}
'@emnapi/runtime@1.3.1': '@emnapi/runtime@1.3.1':
dependencies: dependencies:
tslib: 2.8.1 tslib: 2.8.1
@ -6780,6 +6797,8 @@ snapshots:
es-errors: 1.3.0 es-errors: 1.3.0
is-data-view: 1.0.2 is-data-view: 1.0.2
date-fns-jalali@4.1.0-0: {}
date-fns@4.1.0: {} date-fns@4.1.0: {}
debug@3.2.7: debug@3.2.7:
@ -8790,6 +8809,13 @@ snapshots:
lodash.flow: 3.5.0 lodash.flow: 3.5.0
pure-color: 1.3.0 pure-color: 1.3.0
react-day-picker@9.7.0(react@19.0.0):
dependencies:
'@date-fns/tz': 1.2.0
date-fns: 4.1.0
date-fns-jalali: 4.1.0-0
react: 19.0.0
react-dom@19.0.0(react@19.0.0): react-dom@19.0.0(react@19.0.0):
dependencies: dependencies:
react: 19.0.0 react: 19.0.0