From 65b6177ef0baf6f48539c1493c28aa9d1fa03e60 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Tue, 29 Jul 2025 21:23:59 +0200 Subject: [PATCH 01/15] add clickup connector --- .../app/connectors/clickup_connector.py | 250 ++++++++++++++++++ 1 file changed, 250 insertions(+) create mode 100644 surfsense_backend/app/connectors/clickup_connector.py diff --git a/surfsense_backend/app/connectors/clickup_connector.py b/surfsense_backend/app/connectors/clickup_connector.py new file mode 100644 index 0000000..9b68bda --- /dev/null +++ b/surfsense_backend/app/connectors/clickup_connector.py @@ -0,0 +1,250 @@ +""" +ClickUp Connector Module + +A module for retrieving data from ClickUp. +Allows fetching tasks from workspaces and lists. +""" + +from datetime import datetime +from typing import Any + +import requests + + +class ClickUpConnector: + """Class for retrieving data from ClickUp.""" + + def __init__(self, api_token: str | None = None): + """ + Initialize the ClickUpConnector class. + + Args: + api_token: ClickUp API token (optional) + """ + self.api_token = api_token + self.base_url = "https://api.clickup.com/api/v2" + + def set_api_token(self, api_token: str) -> None: + """ + Set the ClickUp API token. + + Args: + api_token: ClickUp API token + """ + self.api_token = api_token + + def get_headers(self) -> dict[str, str]: + """ + Get headers for ClickUp API requests. + + Returns: + Dictionary of headers + + Raises: + ValueError: If api_token has not been set + """ + if not self.api_token: + raise ValueError( + "ClickUp API token not initialized. Call set_api_token() first." + ) + + return { + "Content-Type": "application/json", + "Authorization": self.api_token, + } + + def make_api_request( + self, endpoint: str, params: dict[str, Any] | None = None + ) -> dict[str, Any]: + """ + Make a request to the ClickUp API. + + Args: + endpoint: API endpoint (without base URL) + params: Query parameters for the request (optional) + + Returns: + Response data from the API + + Raises: + ValueError: If api_token has not been set + Exception: If the API request fails + """ + if not self.api_token: + raise ValueError( + "ClickUp API token not initialized. Call set_api_token() first." + ) + + url = f"{self.base_url}/{endpoint}" + headers = self.get_headers() + + response = requests.get(url, headers=headers, params=params, timeout=500) + + if response.status_code == 200: + return response.json() + else: + raise Exception( + f"API request failed with status code {response.status_code}: {response.text}" + ) + + def get_authorized_workspaces(self) -> dict[str, Any]: + """ + Fetch authorized workspaces (teams) from ClickUp. + + Returns: + Dictionary containing teams data + + Raises: + ValueError: If credentials have not been set + Exception: If the API request fails + """ + return self.make_api_request("team") + + def get_workspace_tasks( + self, workspace_id: str, include_closed: bool = False + ) -> list[dict[str, Any]]: + """ + Fetch all tasks from a ClickUp workspace using the filtered team tasks endpoint. + + Args: + workspace_id: ClickUp workspace (team) ID + include_closed: Whether to include closed tasks (default: False) + + Returns: + List of task objects + + Raises: + ValueError: If credentials have not been set + Exception: If the API request fails + """ + params = { + "page": 0, + "order_by": "created", + "reverse": "true", + "subtasks": "true", + "include_closed": str(include_closed).lower(), + } + + all_tasks = [] + page = 0 + + while True: + params["page"] = page + result = self.make_api_request(f"team/{workspace_id}/task", params) + + if not isinstance(result, dict) or "tasks" not in result: + break + + tasks = result["tasks"] + if not tasks: + break + + all_tasks.extend(tasks) + + # Check if there are more pages + if len(tasks) < 100: # ClickUp returns max 100 tasks per page + break + + page += 1 + + return all_tasks + + def get_tasks_in_date_range( + self, + workspace_id: str, + start_date: str, + end_date: str, + include_closed: bool = False, + ) -> tuple[list[dict[str, Any]], str | None]: + """ + Fetch tasks from ClickUp within a specific date range. + + Args: + workspace_id: ClickUp workspace (team) ID + start_date: Start date in YYYY-MM-DD format + end_date: End date in YYYY-MM-DD format + include_closed: Whether to include closed tasks (default: False) + + Returns: + Tuple containing (tasks list, error message or None) + """ + try: + # Convert dates to Unix timestamps (milliseconds) + start_timestamp = int( + datetime.strptime(start_date, "%Y-%m-%d").timestamp() * 1000 + ) + end_timestamp = int( + datetime.strptime(end_date, "%Y-%m-%d").timestamp() * 1000 + ) + + params = { + "page": 0, + "order_by": "created", + "reverse": "true", + "subtasks": "true", + "include_closed": str(include_closed).lower(), + "date_created_gt": start_timestamp, + "date_created_lt": end_timestamp, + } + + all_tasks = [] + page = 0 + + while True: + params["page"] = page + result = self.make_api_request(f"team/{workspace_id}/task", params) + + if not isinstance(result, dict) or "tasks" not in result: + return [], "Invalid response from ClickUp API" + + tasks = result["tasks"] + if not tasks: + break + + all_tasks.extend(tasks) + + # Check if there are more pages + if len(tasks) < 100: # ClickUp returns max 100 tasks per page + break + + page += 1 + + if not all_tasks: + return [], "No tasks found in the specified date range." + + return all_tasks, None + + except Exception as e: + return [], f"Error fetching tasks: {e!s}" + + def get_task_details(self, task_id: str) -> dict[str, Any]: + """ + Fetch detailed information about a specific task. + + Args: + task_id: ClickUp task ID + + Returns: + Task details + + Raises: + ValueError: If credentials have not been set + Exception: If the API request fails + """ + return self.make_api_request(f"task/{task_id}") + + def get_task_comments(self, task_id: str) -> dict[str, Any]: + """ + Fetch comments for a specific task. + + Args: + task_id: ClickUp task ID + + Returns: + Task comments + + Raises: + ValueError: If credentials have not been set + Exception: If the API request fails + """ + return self.make_api_request(f"task/{task_id}/comment") From ede3dce9af4ae40685adff38f68ed72d4fe3b900 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 30 Jul 2025 21:28:31 +0200 Subject: [PATCH 02/15] update clikup connector --- surfsense_backend/app/connectors/clickup_connector.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/surfsense_backend/app/connectors/clickup_connector.py b/surfsense_backend/app/connectors/clickup_connector.py index 9b68bda..7e2d4ca 100644 --- a/surfsense_backend/app/connectors/clickup_connector.py +++ b/surfsense_backend/app/connectors/clickup_connector.py @@ -177,14 +177,13 @@ class ClickUpConnector: datetime.strptime(end_date, "%Y-%m-%d").timestamp() * 1000 ) + # TODO : Include date range in api request + params = { "page": 0, "order_by": "created", "reverse": "true", "subtasks": "true", - "include_closed": str(include_closed).lower(), - "date_created_gt": start_timestamp, - "date_created_lt": end_timestamp, } all_tasks = [] From 0348aff483c00a8988c0020cacd2c11e5c1bb5b6 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 30 Jul 2025 21:30:56 +0200 Subject: [PATCH 03/15] support clickup in reseacher agents --- .../app/agents/researcher/nodes.py | 26 +++++++++++++++++++ .../agents/researcher/qna_agent/prompts.py | 1 + 2 files changed, 27 insertions(+) diff --git a/surfsense_backend/app/agents/researcher/nodes.py b/surfsense_backend/app/agents/researcher/nodes.py index 6f036b7..7882fed 100644 --- a/surfsense_backend/app/agents/researcher/nodes.py +++ b/surfsense_backend/app/agents/researcher/nodes.py @@ -945,6 +945,32 @@ async def fetch_relevant_documents( ) } ) + elif connector == "CLICKUP_CONNECTOR": + ( + source_object, + clickup_chunks, + ) = await connector_service.search_clickup( + user_query=reformulated_query, + user_id=user_id, + search_space_id=search_space_id, + top_k=top_k, + search_mode=search_mode, + ) + + # Add to sources and raw documents + if source_object: + all_sources.append(source_object) + all_raw_documents.extend(clickup_chunks) + + # Stream found document count + if streaming_service and writer: + writer( + { + "yield_value": streaming_service.format_terminal_info_delta( + f"📋 Found {len(clickup_chunks)} ClickUp tasks related to your query" + ) + } + ) except Exception as e: error_message = f"Error searching connector {connector}: {e!s}" diff --git a/surfsense_backend/app/agents/researcher/qna_agent/prompts.py b/surfsense_backend/app/agents/researcher/qna_agent/prompts.py index bcd799c..608c165 100644 --- a/surfsense_backend/app/agents/researcher/qna_agent/prompts.py +++ b/surfsense_backend/app/agents/researcher/qna_agent/prompts.py @@ -17,6 +17,7 @@ You are SurfSense, an advanced AI research assistant that provides detailed, wel - LINEAR_CONNECTOR: "Linear project issues and discussions" (personal project management) - JIRA_CONNECTOR: "Jira project issues, tickets, and comments" (personal project tracking) - CONFLUENCE_CONNECTOR: "Confluence pages and comments" (personal project documentation) +- CLICKUP_CONNECTOR: "ClickUp tasks and project data" (personal task management) - DISCORD_CONNECTOR: "Discord server conversations and shared content" (personal community communications) - TAVILY_API: "Tavily search API results" (personalized search results) - LINKUP_API: "Linkup search API results" (personalized search results) From b1204f8ac077d98b38eeea1255312d3ab0b1660f Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 30 Jul 2025 21:31:52 +0200 Subject: [PATCH 04/15] add clickup indexing --- .../app/tasks/connectors_indexing_tasks.py | 327 ++++++++++++++++++ 1 file changed, 327 insertions(+) diff --git a/surfsense_backend/app/tasks/connectors_indexing_tasks.py b/surfsense_backend/app/tasks/connectors_indexing_tasks.py index 6829136..02959c1 100644 --- a/surfsense_backend/app/tasks/connectors_indexing_tasks.py +++ b/surfsense_backend/app/tasks/connectors_indexing_tasks.py @@ -1,13 +1,16 @@ import asyncio +import hashlib import logging from datetime import UTC, datetime, timedelta from slack_sdk.errors import SlackApiError +from sqlalchemy import func from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select from app.config import config +from app.connectors.clickup_connector import ClickUpConnector from app.connectors.confluence_connector import ConfluenceConnector from app.connectors.discord_connector import DiscordConnector from app.connectors.github_connector import GitHubConnector @@ -2698,3 +2701,327 @@ async def index_confluence_pages( ) logger.error(f"Failed to index Confluence pages: {e!s}", exc_info=True) return 0, f"Failed to index Confluence pages: {e!s}" + + +async def index_clickup_tasks( + session: AsyncSession, + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str | None = None, + end_date: str | None = None, + update_last_indexed: bool = True, +) -> tuple[int, str | None]: + """ + Index tasks from ClickUp workspace. + + Args: + session: Database session + connector_id: ID of the ClickUp connector + search_space_id: ID of the search space + user_id: ID of the user + start_date: Start date for filtering tasks (YYYY-MM-DD format) + end_date: End date for filtering tasks (YYYY-MM-DD format) + update_last_indexed: Whether to update the last_indexed_at timestamp + + Returns: + Tuple of (number of indexed tasks, error message if any) + """ + task_logger = TaskLoggingService(session, search_space_id) + + # Log task start + log_entry = await task_logger.log_task_start( + task_name="clickup_tasks_indexing", + source="connector_indexing_task", + message=f"Starting ClickUp tasks indexing for connector {connector_id}", + metadata={ + "connector_id": connector_id, + "start_date": start_date, + "end_date": end_date, + }, + ) + + try: + + # Get connector configuration + result = await session.execute( + select(SearchSourceConnector).filter( + SearchSourceConnector.id == connector_id + ) + ) + connector = result.scalars().first() + + if not connector: + error_msg = f"ClickUp connector with ID {connector_id} not found" + await task_logger.log_task_failure( + log_entry, + f"Connector with ID {connector_id} not found or is not a ClickUp connector", + "Connector not found", + {"error_type": "ConnectorNotFound"}, + ) + return 0, error_msg + + # Extract ClickUp configuration + clickup_api_token = connector.config.get("CLICKUP_API_TOKEN") + + if not clickup_api_token: + error_msg = "ClickUp API token not found in connector configuration" + await task_logger.log_task_failure( + log_entry, + f"ClickUp API token not found in connector config for connector {connector_id}", + "Missing ClickUp token", + {"error_type": "MissingToken"}, + ) + return 0, error_msg + + await task_logger.log_task_progress( + log_entry, + f"Initializing ClickUp client for connector {connector_id}", + {"stage": "client_initialization"}, + ) + + clickup_client = ClickUpConnector(api_token=clickup_api_token) + + # Get authorized workspaces + await task_logger.log_task_progress( + log_entry, + "Fetching authorized ClickUp workspaces", + {"stage": "workspace_fetching"}, + ) + + workspaces_response = clickup_client.get_authorized_workspaces() + workspaces = workspaces_response.get("teams", []) + + if not workspaces: + error_msg = "No authorized ClickUp workspaces found" + await task_logger.log_task_failure( + log_entry, + f"No authorized ClickUp workspaces found for connector {connector_id}", + "No workspaces found", + {"error_type": "NoWorkspacesFound"}, + ) + return 0, error_msg + + # Process and index each task + documents_indexed = 0 + documents_skipped = 0 + + for workspace in workspaces: + workspace_id = workspace.get("id") + workspace_name = workspace.get("name", "Unknown Workspace") + + if not workspace_id: + continue + + await task_logger.log_task_progress( + log_entry, + f"Processing workspace: {workspace_name}", + {"stage": "workspace_processing", "workspace_id": workspace_id}, + ) + + # Fetch tasks from workspace + if start_date and end_date: + tasks, error = clickup_client.get_tasks_in_date_range( + workspace_id=workspace_id, + start_date=start_date, + end_date=end_date, + include_closed=True, + ) + if error: + logger.warning(f"Error fetching tasks from workspace {workspace_name}: {error}") + continue + else: + tasks = clickup_client.get_workspace_tasks( + workspace_id=workspace_id, include_closed=True + ) + + await task_logger.log_task_progress( + log_entry, + f"Found {len(tasks)} tasks in workspace {workspace_name}", + {"stage": "tasks_found", "task_count": len(tasks)}, + ) + + # Process each task + for task in tasks: + try: + task_id = task.get("id") + task_name = task.get("name", "Untitled Task") + task_description = task.get("description", "") + task_status = task.get("status", {}).get("status", "Unknown") + task_priority = task.get("priority", {}).get("priority", "Unknown") if task.get("priority") else "None" + task_assignees = task.get("assignees", []) + task_due_date = task.get("due_date") + task_created = task.get("date_created") + task_updated = task.get("date_updated") + task_url = task.get("url", "") + + # Get list and space information + task_list = task.get("list", {}) + task_list_name = task_list.get("name", "Unknown List") + task_space = task.get("space", {}) + task_space_name = task_space.get("name", "Unknown Space") + + # Create task content + content_parts = [f"Task: {task_name}"] + + if task_description: + content_parts.append(f"Description: {task_description}") + + content_parts.extend([ + f"Status: {task_status}", + f"Priority: {task_priority}", + f"List: {task_list_name}", + f"Space: {task_space_name}", + ]) + + if task_assignees: + assignee_names = [assignee.get("username", "Unknown") for assignee in task_assignees] + content_parts.append(f"Assignees: {', '.join(assignee_names)}") + + if task_due_date: + content_parts.append(f"Due Date: {task_due_date}") + + task_content = "\n".join(content_parts) + + if not task_content.strip(): + logger.warning(f"Skipping task with no content: {task_name}") + continue + + # Create document metadata + document_metadata = { + "task_id": task_id, + "task_name": task_name, + "task_url": task_url, + "task_status": task_status, + "task_priority": task_priority, + "task_assignees": task_assignees, + "task_due_date": task_due_date, + "task_created": task_created, + "task_updated": task_updated, + "task_list_name": task_list_name, + "task_space_name": task_space_name, + "workspace_id": workspace_id, + "workspace_name": workspace_name, + "connector_id": connector_id, + "source": "CLICKUP_CONNECTOR", + } + + # Generate content hash + content_hash = generate_content_hash(task_content, search_space_id) + + # Check if document already exists + existing_doc_by_hash_result = await session.execute( + select(Document).where(Document.content_hash == content_hash) + ) + existing_document_by_hash = ( + existing_doc_by_hash_result.scalars().first() + ) + + if existing_document_by_hash: + logger.info( + f"Document with content hash {content_hash} already exists for task {task_name}. Skipping processing." + ) + documents_skipped += 1 + continue + + # Generate embedding for the summary + summary_embedding = config.embedding_model_instance.embed( + task_content + ) + + # Process chunks - using the full page content with comments + chunks = [ + Chunk( + content=chunk.text, + embedding=config.embedding_model_instance.embed(chunk.text), + ) + for chunk in config.chunker_instance.chunk(task_content) + ] + + # Create and store new document + logger.info(f"Creating new document for task {task_name}") + + document = Document( + search_space_id=search_space_id, + title=f"Task - {task_name}", + document_type=DocumentType.CLICKUP_CONNECTOR, + document_metadata={ + "task_id": task_id, + "task_name": task_name, + "task_status": task_status, + "task_priority": task_priority, + "task_assignees": task_assignees, + "task_due_date": task_due_date, + "task_created": task_created, + "task_updated": task_updated, + "indexed_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + }, + content=task_content, + content_hash=content_hash, + embedding=summary_embedding, + chunks=chunks, + ) + + session.add(document) + documents_indexed += 1 + logger.info(f"Successfully indexed new task {task_name}") + + except Exception as e: + logger.error( + f"Error processing task {task.get('name', 'Unknown')}: {e!s}", + exc_info=True, + ) + documents_skipped += 1 + + + # Update the last_indexed_at timestamp for the connector only if requested + total_processed = documents_indexed + if update_last_indexed: + connector.last_indexed_at = datetime.now() + logger.info(f"Updated last_indexed_at to {connector.last_indexed_at}") + + # Commit all changes + await session.commit() + logger.info( + "Successfully committed all clickup document changes to database" + ) + + # Log success + await task_logger.log_task_success( + log_entry, + f"Successfully completed clickup indexing for connector {connector_id}", + { + "pages_processed": total_processed, + "documents_indexed": documents_indexed, + "documents_skipped": documents_skipped, + }, + ) + + logger.info( + f"clickup indexing completed: {documents_indexed} new tasks, {documents_skipped} skipped" + ) + return ( + total_processed, + None, + ) # Return None as the error message to indicate success + + except SQLAlchemyError as db_error: + await session.rollback() + await task_logger.log_task_failure( + log_entry, + f"Database error during Cickup indexing for connector {connector_id}", + str(db_error), + {"error_type": "SQLAlchemyError"}, + ) + logger.error(f"Database error: {db_error!s}", exc_info=True) + return 0, f"Database error: {db_error!s}" + except Exception as e: + await session.rollback() + await task_logger.log_task_failure( + log_entry, + f"Failed to index ClickUp tasks for connector {connector_id}", + str(e), + {"error_type": type(e).__name__}, + ) + logger.error(f"Failed to index ClickUp tasks: {e!s}", exc_info=True) + return 0, f"Failed to index ClickUp tasks: {e!s}" \ No newline at end of file From 89589703206f3a3ab2f0bfaef23344c374545297 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 30 Jul 2025 21:32:33 +0200 Subject: [PATCH 05/15] update the connector service --- .../app/services/connector_service.py | 114 ++++++++++++++++++ 1 file changed, 114 insertions(+) diff --git a/surfsense_backend/app/services/connector_service.py b/surfsense_backend/app/services/connector_service.py index fb87a7b..74f7c5a 100644 --- a/surfsense_backend/app/services/connector_service.py +++ b/surfsense_backend/app/services/connector_service.py @@ -1170,6 +1170,120 @@ class ConnectorService: return result_object, confluence_chunks + async def search_clickup( + self, + user_query: str, + user_id: str, + search_space_id: int, + top_k: int = 20, + search_mode: SearchMode = SearchMode.CHUNKS, + ) -> tuple: + """ + Search for ClickUp tasks and return both the source information and langchain documents + + Args: + user_query: The user's query + user_id: The user's ID + search_space_id: The search space ID to search in + top_k: Maximum number of results to return + search_mode: Search mode (CHUNKS or DOCUMENTS) + + Returns: + tuple: (sources_info, langchain_documents) + """ + if search_mode == SearchMode.CHUNKS: + clickup_chunks = await self.chunk_retriever.hybrid_search( + query_text=user_query, + top_k=top_k, + user_id=user_id, + search_space_id=search_space_id, + document_type="CLICKUP_CONNECTOR", + ) + elif search_mode == SearchMode.DOCUMENTS: + clickup_chunks = await self.document_retriever.hybrid_search( + query_text=user_query, + top_k=top_k, + user_id=user_id, + search_space_id=search_space_id, + document_type="CLICKUP_CONNECTOR", + ) + # Transform document retriever results to match expected format + clickup_chunks = self._transform_document_results(clickup_chunks) + + # Early return if no results + if not clickup_chunks: + return { + "id": 31, + "name": "ClickUp Tasks", + "type": "CLICKUP_CONNECTOR", + "sources": [], + }, [] + + sources_list = [] + + for chunk in clickup_chunks: + if hasattr(chunk, "metadata") and chunk.metadata: + document = chunk.metadata + else: + # Handle case where chunk is a dict (from document retriever) + document = chunk + + # Extract ClickUp task information from metadata + task_name = document.get("task_name", "Unknown Task") + task_id = document.get("task_id", "") + task_url = document.get("task_url", "") + task_status = document.get("task_status", "Unknown") + task_priority = document.get("task_priority", "Unknown") + task_assignees = document.get("task_assignees", []) + task_due_date = document.get("task_due_date", "") + task_list_name = document.get("task_list_name", "") + task_space_name = document.get("task_space_name", "") + + # Create description from task details + description_parts = [] + if task_status: + description_parts.append(f"Status: {task_status}") + if task_priority: + description_parts.append(f"Priority: {task_priority}") + if task_assignees: + assignee_names = [assignee.get("username", "Unknown") for assignee in task_assignees] + description_parts.append(f"Assignees: {', '.join(assignee_names)}") + if task_due_date: + description_parts.append(f"Due: {task_due_date}") + if task_list_name: + description_parts.append(f"List: {task_list_name}") + if task_space_name: + description_parts.append(f"Space: {task_space_name}") + + description = " | ".join(description_parts) if description_parts else "ClickUp Task" + + source = { + "id": document.get("id", self.source_id_counter), + "title": task_name, + "description": description, + "url": task_url, + "task_id": task_id, + "status": task_status, + "priority": task_priority, + "assignees": task_assignees, + "due_date": task_due_date, + "list_name": task_list_name, + "space_name": task_space_name, + } + + self.source_id_counter += 1 + sources_list.append(source) + + # Create result object + result_object = { + "id": 31, # Assign a unique ID for the ClickUp connector + "name": "ClickUp Tasks", + "type": "CLICKUP_CONNECTOR", + "sources": sources_list, + } + + return result_object, clickup_chunks + async def search_linkup( self, user_query: str, user_id: str, mode: str = "standard" ) -> tuple: From 161d9c7b91062f59b8eedcb7e0fe452813e3f280 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 30 Jul 2025 21:33:38 +0200 Subject: [PATCH 06/15] update seach source connector schema --- .../app/schemas/search_source_connector.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/surfsense_backend/app/schemas/search_source_connector.py b/surfsense_backend/app/schemas/search_source_connector.py index a49d332..28d5425 100644 --- a/surfsense_backend/app/schemas/search_source_connector.py +++ b/surfsense_backend/app/schemas/search_source_connector.py @@ -167,6 +167,18 @@ class SearchSourceConnectorBase(BaseModel): if not config.get("CONFLUENCE_BASE_URL"): raise ValueError("CONFLUENCE_BASE_URL cannot be empty") + elif connector_type == SearchSourceConnectorType.CLICKUP_CONNECTOR: + # For CLICKUP_CONNECTOR, only allow CLICKUP_API_TOKEN + allowed_keys = ["CLICKUP_API_TOKEN"] + if set(config.keys()) != set(allowed_keys): + raise ValueError( + f"For CLICKUP_CONNECTOR connector type, config must only contain these keys: {allowed_keys}" + ) + + # Ensure the API token is not empty + if not config.get("CLICKUP_API_TOKEN"): + raise ValueError("CLICKUP_API_TOKEN cannot be empty") + return config From d92b7ee5386fe167afcf27e2dcc01c8c6ebdcb64 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 30 Jul 2025 21:34:08 +0200 Subject: [PATCH 07/15] update seach source connector routes --- .../routes/search_source_connectors_routes.py | 76 +++++++++++++++++++ 1 file changed, 76 insertions(+) diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index 6a10910..f46a62f 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -36,6 +36,7 @@ from app.schemas import ( SearchSourceConnectorUpdate, ) from app.tasks.connectors_indexing_tasks import ( + index_clickup_tasks, index_confluence_pages, index_discord_messages, index_github_repos, @@ -473,6 +474,21 @@ async def index_connector_content( ) response_message = "Confluence indexing started in the background." + elif connector.connector_type == SearchSourceConnectorType.CLICKUP_CONNECTOR: + # Run indexing in background + logger.info( + f"Triggering ClickUp indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}" + ) + background_tasks.add_task( + run_clickup_indexing_with_new_session, + connector_id, + search_space_id, + str(user.id), + indexing_from, + indexing_to, + ) + response_message = "ClickUp indexing started in the background." + elif connector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR: # Run indexing in background logger.info( @@ -960,3 +976,63 @@ async def run_confluence_indexing( exc_info=True, ) # Optionally update status in DB to indicate failure + + +# Add new helper functions for ClickUp indexing +async def run_clickup_indexing_with_new_session( + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """Wrapper to run ClickUp indexing with its own database session.""" + logger.info( + f"Background task started: Indexing ClickUp connector {connector_id} into space {search_space_id} from {start_date} to {end_date}" + ) + async with async_session_maker() as session: + await run_clickup_indexing( + session, connector_id, search_space_id, user_id, start_date, end_date + ) + logger.info( + f"Background task finished: Indexing ClickUp connector {connector_id}" + ) + + +async def run_clickup_indexing( + session: AsyncSession, + connector_id: int, + search_space_id: int, + user_id: str, + start_date: str, + end_date: str, +): + """Runs the ClickUp indexing task and updates the timestamp.""" + try: + indexed_count, error_message = await index_clickup_tasks( + session, + connector_id, + search_space_id, + user_id, + start_date, + end_date, + update_last_indexed=False, + ) + if error_message: + logger.error( + f"ClickUp indexing failed for connector {connector_id}: {error_message}" + ) + # Optionally update status in DB to indicate failure + else: + logger.info( + f"ClickUp indexing successful for connector {connector_id}. Indexed {indexed_count} tasks." + ) + # Update the last indexed timestamp only on success + await update_connector_last_indexed(session, connector_id) + await session.commit() # Commit timestamp update + except Exception as e: + logger.error( + f"Critical error in run_clickup_indexing for connector {connector_id}: {e}", + exc_info=True, + ) + # Optionally update status in DB to indicate failure From 442417b8082390689aa2f0598e24cb8e588a5eb7 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 30 Jul 2025 21:35:27 +0200 Subject: [PATCH 08/15] add migration file --- .../15_add_clickup_connector_enums.py | 63 +++++++++++++++++++ surfsense_backend/app/db.py | 2 + 2 files changed, 65 insertions(+) create mode 100644 surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py diff --git a/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py b/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py new file mode 100644 index 0000000..4f339c0 --- /dev/null +++ b/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py @@ -0,0 +1,63 @@ +"""Add ClickUp connector enums + +Revision ID: 15_add_clickup_connector_enums +Revises: 14_add_confluence_connector_enums +Create Date: 2025-07-29 12:00:00.000000 + +""" + +from typing import Sequence, Union + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "15_add_clickup_connector_enums" +down_revision: Union[str, None] = "14_add_confluence_connector_enums" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Safely add 'CLICKUP_CONNECTOR' to enum types if missing.""" + + # Add to searchsourceconnectortype enum + op.execute( + """ + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM pg_type t + JOIN pg_enum e ON t.oid = e.enumtypid + WHERE t.typname = 'searchsourceconnectortype' AND e.enumlabel = 'CLICKUP_CONNECTOR' + ) THEN + ALTER TYPE searchsourceconnectortype ADD VALUE 'CLICKUP_CONNECTOR'; + END IF; + END + $$; + """ + ) + + # Add to documenttype enum + op.execute( + """ + DO $$ + BEGIN + IF NOT EXISTS ( + SELECT 1 FROM pg_type t + JOIN pg_enum e ON t.oid = e.enumtypid + WHERE t.typname = 'documenttype' AND e.enumlabel = 'CLICKUP_CONNECTOR' + ) THEN + ALTER TYPE documenttype ADD VALUE 'CLICKUP_CONNECTOR'; + END IF; + END + $$; + """ + ) + + +def downgrade() -> None: + """Remove 'CLICKUP_CONNECTOR' from enum types.""" + # Note: PostgreSQL doesn't support removing enum values directly + # This would require recreating the enum type, which is complex + # For now, we'll leave the enum values in place + pass diff --git a/surfsense_backend/app/db.py b/surfsense_backend/app/db.py index b067752..1b7da7e 100644 --- a/surfsense_backend/app/db.py +++ b/surfsense_backend/app/db.py @@ -44,6 +44,7 @@ class DocumentType(str, Enum): DISCORD_CONNECTOR = "DISCORD_CONNECTOR" JIRA_CONNECTOR = "JIRA_CONNECTOR" CONFLUENCE_CONNECTOR = "CONFLUENCE_CONNECTOR" + CLICKUP_CONNECTOR = "CLICKUP_CONNECTOR" class SearchSourceConnectorType(str, Enum): @@ -57,6 +58,7 @@ class SearchSourceConnectorType(str, Enum): DISCORD_CONNECTOR = "DISCORD_CONNECTOR" JIRA_CONNECTOR = "JIRA_CONNECTOR" CONFLUENCE_CONNECTOR = "CONFLUENCE_CONNECTOR" + CLICKUP_CONNECTOR = "CLICKUP_CONNECTOR" class ChatType(str, Enum): From 9a98742f8139b1ec864809a5c0f1d0eb28ad2f00 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 30 Jul 2025 21:37:12 +0200 Subject: [PATCH 09/15] Add web implementation for clickup connector --- .../15_add_clickup_connector_enums.py | 8 +- .../app/services/connector_service.py | 8 +- .../connectors/[connector_id]/edit/page.tsx | 11 + .../connectors/add/clickup-connector/page.tsx | 232 ++++++++++++++++++ .../[search_space_id]/connectors/add/page.tsx | 8 + .../documents/(manage)/page.tsx | 2 + 6 files changed, 263 insertions(+), 6 deletions(-) create mode 100644 surfsense_web/app/dashboard/[search_space_id]/connectors/add/clickup-connector/page.tsx diff --git a/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py b/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py index 4f339c0..8882cfc 100644 --- a/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py +++ b/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py @@ -6,15 +6,15 @@ Create Date: 2025-07-29 12:00:00.000000 """ -from typing import Sequence, Union +from collections.abc import Sequence from alembic import op # revision identifiers, used by Alembic. revision: str = "15_add_clickup_connector_enums" -down_revision: Union[str, None] = "14_add_confluence_connector_enums" -branch_labels: Union[str, Sequence[str], None] = None -depends_on: Union[str, Sequence[str], None] = None +down_revision: str | None = "14_add_confluence_connector_enums" +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None def upgrade() -> None: diff --git a/surfsense_backend/app/services/connector_service.py b/surfsense_backend/app/services/connector_service.py index 74f7c5a..de9c4c2 100644 --- a/surfsense_backend/app/services/connector_service.py +++ b/surfsense_backend/app/services/connector_service.py @@ -1246,7 +1246,9 @@ class ConnectorService: if task_priority: description_parts.append(f"Priority: {task_priority}") if task_assignees: - assignee_names = [assignee.get("username", "Unknown") for assignee in task_assignees] + assignee_names = [ + assignee.get("username", "Unknown") for assignee in task_assignees + ] description_parts.append(f"Assignees: {', '.join(assignee_names)}") if task_due_date: description_parts.append(f"Due: {task_due_date}") @@ -1255,7 +1257,9 @@ class ConnectorService: if task_space_name: description_parts.append(f"Space: {task_space_name}") - description = " | ".join(description_parts) if description_parts else "ClickUp Task" + description = ( + " | ".join(description_parts) if description_parts else "ClickUp Task" + ) source = { "id": document.get("id", self.source_id_counter), diff --git a/surfsense_web/app/dashboard/[search_space_id]/connectors/[connector_id]/edit/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/connectors/[connector_id]/edit/page.tsx index a535319..abd223a 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/connectors/[connector_id]/edit/page.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/connectors/[connector_id]/edit/page.tsx @@ -228,6 +228,17 @@ export default function EditConnectorPage() { )} + {/* == ClickUp == */} + {connector.connector_type === "CLICKUP_CONNECTOR" && ( + + )} + {/* == Linkup == */} {connector.connector_type === "LINKUP_API" && ( ; + +export default function ClickUpConnectorPage({ params }: ClickUpConnectorPageProps) { + const router = useRouter(); + const { createConnector } = useSearchSourceConnectors(); + const [isLoading, setIsLoading] = useState(false); + const [showApiToken, setShowApiToken] = useState(false); + + // Initialize the form with react-hook-form and zod validation + const form = useForm({ + resolver: zodResolver(clickupConnectorFormSchema), + defaultValues: { + name: "ClickUp Connector", + api_token: "", + }, + }); + + // Handle form submission + async function onSubmit(values: ClickUpConnectorFormValues) { + setIsLoading(true); + + try { + const connectorData = { + name: values.name, + connector_type: "CLICKUP_CONNECTOR", + is_indexable: true, + config: { + CLICKUP_API_TOKEN: values.api_token, + }, + last_indexed_at: null, + }; + + await createConnector(connectorData); + + toast.success("ClickUp connector created successfully!"); + router.push(`/dashboard/${params.search_space_id}/connectors`); + } catch (error) { + console.error("Error creating ClickUp connector:", error); + toast.error("Failed to create ClickUp connector. Please try again."); + } finally { + setIsLoading(false); + } + } + + return ( +
+
+ + + Back to connectors + +

Add ClickUp Connector

+

+ Connect your ClickUp workspace to search and retrieve tasks. +

+
+ + + + ClickUp Configuration + + Enter your ClickUp API token to connect your workspace. You can generate a personal API token from your ClickUp settings. + + + +
+ + ( + + Connector Name + + + + + A friendly name to identify this ClickUp connector. + + + + )} + /> + + ( + + ClickUp API Token + +
+ + +
+
+ + Your ClickUp personal API token. You can generate one in your{" "} + + ClickUp settings + + + . + + +
+ )} + /> + +
+ + +
+ + +
+
+ + + + How to get your ClickUp API Token + + +
+

+ 1. Log in to your ClickUp account +

+

+ 2. Click your avatar in the upper-right corner and select "Settings" +

+

+ 3. In the sidebar, click "Apps" +

+

+ 4. Under "API Token", click "Generate" or "Regenerate" +

+

+ 5. Copy the generated token and paste it above +

+
+
+ + Go to ClickUp API Settings + + +
+
+
+
+ ); +} diff --git a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/page.tsx index f6dbd51..a1be455 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/page.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/page.tsx @@ -15,6 +15,7 @@ import { IconMail, IconTicket, IconWorldWww, + IconChecklist, } from "@tabler/icons-react"; import { AnimatePresence, motion, type Variants } from "framer-motion"; import Link from "next/link"; @@ -107,6 +108,13 @@ const connectorCategories: ConnectorCategory[] = [ icon: , status: "available", }, + { + id: "clickup-connector", + title: "ClickUp", + description: "Connect to ClickUp to search tasks, comments and project data.", + icon: , + status: "available", + }, ], }, { diff --git a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/page.tsx index e9f49c9..727a769 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/page.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/page.tsx @@ -7,6 +7,7 @@ import { IconBrandNotion, IconBrandSlack, IconBrandYoutube, + IconChecklist, IconLayoutKanban, IconTicket, } from "@tabler/icons-react"; @@ -146,6 +147,7 @@ const documentTypeIcons = { JIRA_CONNECTOR: IconTicket, DISCORD_CONNECTOR: IconBrandDiscord, CONFLUENCE_CONNECTOR: IconBook, + CLICKUP_CONNECTOR: IconChecklist, } as const; const columns: ColumnDef[] = [ From 4cb00735ac790325c99e8a030db51fe4348c9bfd Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 30 Jul 2025 22:25:47 +0200 Subject: [PATCH 10/15] add coderabbit suggestions --- .../app/connectors/clickup_connector.py | 9 -- .../app/tasks/connectors_indexing_tasks.py | 141 ++++++++---------- .../connectors/add/clickup-connector/page.tsx | 28 ++-- .../[search_space_id]/connectors/add/page.tsx | 2 +- 4 files changed, 76 insertions(+), 104 deletions(-) diff --git a/surfsense_backend/app/connectors/clickup_connector.py b/surfsense_backend/app/connectors/clickup_connector.py index 7e2d4ca..b10e01e 100644 --- a/surfsense_backend/app/connectors/clickup_connector.py +++ b/surfsense_backend/app/connectors/clickup_connector.py @@ -5,7 +5,6 @@ A module for retrieving data from ClickUp. Allows fetching tasks from workspaces and lists. """ -from datetime import datetime from typing import Any import requests @@ -169,14 +168,6 @@ class ClickUpConnector: Tuple containing (tasks list, error message or None) """ try: - # Convert dates to Unix timestamps (milliseconds) - start_timestamp = int( - datetime.strptime(start_date, "%Y-%m-%d").timestamp() * 1000 - ) - end_timestamp = int( - datetime.strptime(end_date, "%Y-%m-%d").timestamp() * 1000 - ) - # TODO : Include date range in api request params = { diff --git a/surfsense_backend/app/tasks/connectors_indexing_tasks.py b/surfsense_backend/app/tasks/connectors_indexing_tasks.py index 02959c1..5f257fb 100644 --- a/surfsense_backend/app/tasks/connectors_indexing_tasks.py +++ b/surfsense_backend/app/tasks/connectors_indexing_tasks.py @@ -1,10 +1,8 @@ import asyncio -import hashlib import logging from datetime import UTC, datetime, timedelta from slack_sdk.errors import SlackApiError -from sqlalchemy import func from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.future import select @@ -2742,7 +2740,6 @@ async def index_clickup_tasks( ) try: - # Get connector configuration result = await session.execute( select(SearchSourceConnector).filter( @@ -2828,7 +2825,9 @@ async def index_clickup_tasks( include_closed=True, ) if error: - logger.warning(f"Error fetching tasks from workspace {workspace_name}: {error}") + logger.warning( + f"Error fetching tasks from workspace {workspace_name}: {error}" + ) continue else: tasks = clickup_client.get_workspace_tasks( @@ -2848,12 +2847,15 @@ async def index_clickup_tasks( task_name = task.get("name", "Untitled Task") task_description = task.get("description", "") task_status = task.get("status", {}).get("status", "Unknown") - task_priority = task.get("priority", {}).get("priority", "Unknown") if task.get("priority") else "None" + task_priority = ( + task.get("priority", {}).get("priority", "Unknown") + if task.get("priority") + else "None" + ) task_assignees = task.get("assignees", []) task_due_date = task.get("due_date") task_created = task.get("date_created") task_updated = task.get("date_updated") - task_url = task.get("url", "") # Get list and space information task_list = task.get("list", {}) @@ -2867,15 +2869,20 @@ async def index_clickup_tasks( if task_description: content_parts.append(f"Description: {task_description}") - content_parts.extend([ - f"Status: {task_status}", - f"Priority: {task_priority}", - f"List: {task_list_name}", - f"Space: {task_space_name}", - ]) + content_parts.extend( + [ + f"Status: {task_status}", + f"Priority: {task_priority}", + f"List: {task_list_name}", + f"Space: {task_space_name}", + ] + ) if task_assignees: - assignee_names = [assignee.get("username", "Unknown") for assignee in task_assignees] + assignee_names = [ + assignee.get("username", "Unknown") + for assignee in task_assignees + ] content_parts.append(f"Assignees: {', '.join(assignee_names)}") if task_due_date: @@ -2887,53 +2894,34 @@ async def index_clickup_tasks( logger.warning(f"Skipping task with no content: {task_name}") continue - # Create document metadata - document_metadata = { - "task_id": task_id, - "task_name": task_name, - "task_url": task_url, - "task_status": task_status, - "task_priority": task_priority, - "task_assignees": task_assignees, - "task_due_date": task_due_date, - "task_created": task_created, - "task_updated": task_updated, - "task_list_name": task_list_name, - "task_space_name": task_space_name, - "workspace_id": workspace_id, - "workspace_name": workspace_name, - "connector_id": connector_id, - "source": "CLICKUP_CONNECTOR", - } - # Generate content hash content_hash = generate_content_hash(task_content, search_space_id) # Check if document already exists existing_doc_by_hash_result = await session.execute( - select(Document).where(Document.content_hash == content_hash) + select(Document).where(Document.content_hash == content_hash) ) existing_document_by_hash = ( - existing_doc_by_hash_result.scalars().first() + existing_doc_by_hash_result.scalars().first() ) if existing_document_by_hash: logger.info( - f"Document with content hash {content_hash} already exists for task {task_name}. Skipping processing." + f"Document with content hash {content_hash} already exists for task {task_name}. Skipping processing." ) documents_skipped += 1 continue # Generate embedding for the summary summary_embedding = config.embedding_model_instance.embed( - task_content + task_content ) # Process chunks - using the full page content with comments chunks = [ Chunk( - content=chunk.text, - embedding=config.embedding_model_instance.embed(chunk.text), + content=chunk.text, + embedding=config.embedding_model_instance.embed(chunk.text), ) for chunk in config.chunker_instance.chunk(task_content) ] @@ -2942,24 +2930,24 @@ async def index_clickup_tasks( logger.info(f"Creating new document for task {task_name}") document = Document( - search_space_id=search_space_id, - title=f"Task - {task_name}", - document_type=DocumentType.CLICKUP_CONNECTOR, - document_metadata={ - "task_id": task_id, - "task_name": task_name, - "task_status": task_status, - "task_priority": task_priority, - "task_assignees": task_assignees, - "task_due_date": task_due_date, - "task_created": task_created, - "task_updated": task_updated, - "indexed_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), - }, - content=task_content, - content_hash=content_hash, - embedding=summary_embedding, - chunks=chunks, + search_space_id=search_space_id, + title=f"Task - {task_name}", + document_type=DocumentType.CLICKUP_CONNECTOR, + document_metadata={ + "task_id": task_id, + "task_name": task_name, + "task_status": task_status, + "task_priority": task_priority, + "task_assignees": task_assignees, + "task_due_date": task_due_date, + "task_created": task_created, + "task_updated": task_updated, + "indexed_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + }, + content=task_content, + content_hash=content_hash, + embedding=summary_embedding, + chunks=chunks, ) session.add(document) @@ -2968,12 +2956,11 @@ async def index_clickup_tasks( except Exception as e: logger.error( - f"Error processing task {task.get('name', 'Unknown')}: {e!s}", - exc_info=True, + f"Error processing task {task.get('name', 'Unknown')}: {e!s}", + exc_info=True, ) documents_skipped += 1 - # Update the last_indexed_at timestamp for the connector only if requested total_processed = documents_indexed if update_last_indexed: @@ -3006,22 +2993,22 @@ async def index_clickup_tasks( ) # Return None as the error message to indicate success except SQLAlchemyError as db_error: - await session.rollback() - await task_logger.log_task_failure( - log_entry, - f"Database error during Cickup indexing for connector {connector_id}", - str(db_error), - {"error_type": "SQLAlchemyError"}, - ) - logger.error(f"Database error: {db_error!s}", exc_info=True) - return 0, f"Database error: {db_error!s}" + await session.rollback() + await task_logger.log_task_failure( + log_entry, + f"Database error during Cickup indexing for connector {connector_id}", + str(db_error), + {"error_type": "SQLAlchemyError"}, + ) + logger.error(f"Database error: {db_error!s}", exc_info=True) + return 0, f"Database error: {db_error!s}" except Exception as e: - await session.rollback() - await task_logger.log_task_failure( - log_entry, - f"Failed to index ClickUp tasks for connector {connector_id}", - str(e), - {"error_type": type(e).__name__}, - ) - logger.error(f"Failed to index ClickUp tasks: {e!s}", exc_info=True) - return 0, f"Failed to index ClickUp tasks: {e!s}" \ No newline at end of file + await session.rollback() + await task_logger.log_task_failure( + log_entry, + f"Failed to index ClickUp tasks for connector {connector_id}", + str(e), + {"error_type": type(e).__name__}, + ) + logger.error(f"Failed to index ClickUp tasks: {e!s}", exc_info=True) + return 0, f"Failed to index ClickUp tasks: {e!s}" diff --git a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/clickup-connector/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/clickup-connector/page.tsx index 5f6eee5..5e337ac 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/clickup-connector/page.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/clickup-connector/page.tsx @@ -1,11 +1,15 @@ "use client"; -import { useState } from "react"; -import { useRouter } from "next/navigation"; import { zodResolver } from "@hookform/resolvers/zod"; +import { ArrowLeft, ExternalLink, Eye, EyeOff } from "lucide-react"; +import Link from "next/link"; +import { useRouter } from "next/navigation"; +import { useState } from "react"; import { useForm } from "react-hook-form"; +import { toast } from "sonner"; import * as z from "zod"; import { Button } from "@/components/ui/button"; +import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"; import { Form, FormControl, @@ -16,10 +20,6 @@ import { FormMessage, } from "@/components/ui/form"; import { Input } from "@/components/ui/input"; -import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"; -import { ArrowLeft, ExternalLink, Eye, EyeOff } from "lucide-react"; -import Link from "next/link"; -import { toast } from "sonner"; import { useSearchSourceConnectors } from "@/hooks/useSearchSourceConnectors"; interface ClickUpConnectorPageProps { @@ -103,7 +103,8 @@ export default function ClickUpConnectorPage({ params }: ClickUpConnectorPagePro ClickUp Configuration - Enter your ClickUp API token to connect your workspace. You can generate a personal API token from your ClickUp settings. + Enter your ClickUp API token to connect your workspace. You can generate a personal API + token from your ClickUp settings. @@ -116,10 +117,7 @@ export default function ClickUpConnectorPage({ params }: ClickUpConnectorPagePro Connector Name - + A friendly name to identify this ClickUp connector. @@ -199,15 +197,11 @@ export default function ClickUpConnectorPage({ params }: ClickUpConnectorPagePro
-

- 1. Log in to your ClickUp account -

+

1. Log in to your ClickUp account

2. Click your avatar in the upper-right corner and select "Settings"

-

- 3. In the sidebar, click "Apps" -

+

3. In the sidebar, click "Apps"

4. Under "API Token", click "Generate" or "Regenerate"

diff --git a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/page.tsx index a1be455..7f79bf8 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/page.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/page.tsx @@ -8,6 +8,7 @@ import { IconBrandSlack, IconBrandWindows, IconBrandZoom, + IconChecklist, IconChevronDown, IconChevronRight, IconLayoutKanban, @@ -15,7 +16,6 @@ import { IconMail, IconTicket, IconWorldWww, - IconChecklist, } from "@tabler/icons-react"; import { AnimatePresence, motion, type Variants } from "framer-motion"; import Link from "next/link"; From eb6830c4fa71665df2ba42fbdd9d0c681bf17696 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Wed, 30 Jul 2025 22:30:50 +0200 Subject: [PATCH 11/15] fix ruff issues --- .../app/routes/search_source_connectors_routes.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/surfsense_backend/app/routes/search_source_connectors_routes.py b/surfsense_backend/app/routes/search_source_connectors_routes.py index f46a62f..f91bd9f 100644 --- a/surfsense_backend/app/routes/search_source_connectors_routes.py +++ b/surfsense_backend/app/routes/search_source_connectors_routes.py @@ -994,9 +994,7 @@ async def run_clickup_indexing_with_new_session( await run_clickup_indexing( session, connector_id, search_space_id, user_id, start_date, end_date ) - logger.info( - f"Background task finished: Indexing ClickUp connector {connector_id}" - ) + logger.info(f"Background task finished: Indexing ClickUp connector {connector_id}") async def run_clickup_indexing( From 9c2408d0264e3eb6caee5744252ce05da5e19932 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Thu, 31 Jul 2025 23:21:01 +0200 Subject: [PATCH 12/15] fix source generation for clickup tasks --- .../15_add_clickup_connector_enums.py | 8 +++--- .../app/services/connector_service.py | 28 +++++++++---------- 2 files changed, 18 insertions(+), 18 deletions(-) diff --git a/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py b/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py index 8882cfc..4c5a625 100644 --- a/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py +++ b/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py @@ -1,7 +1,7 @@ """Add ClickUp connector enums -Revision ID: 15_add_clickup_connector_enums -Revises: 14_add_confluence_connector_enums +Revision ID: '15' +Revises: '14' Create Date: 2025-07-29 12:00:00.000000 """ @@ -11,8 +11,8 @@ from collections.abc import Sequence from alembic import op # revision identifiers, used by Alembic. -revision: str = "15_add_clickup_connector_enums" -down_revision: str | None = "14_add_confluence_connector_enums" +revision: str = '15' +down_revision: str | None = '14' branch_labels: str | Sequence[str] | None = None depends_on: str | Sequence[str] | None = None diff --git a/surfsense_backend/app/services/connector_service.py b/surfsense_backend/app/services/connector_service.py index de9c4c2..9389807 100644 --- a/surfsense_backend/app/services/connector_service.py +++ b/surfsense_backend/app/services/connector_service.py @@ -1,4 +1,5 @@ import asyncio +import json from typing import Any from linkup import LinkupClient @@ -1222,22 +1223,21 @@ class ConnectorService: sources_list = [] for chunk in clickup_chunks: - if hasattr(chunk, "metadata") and chunk.metadata: - document = chunk.metadata - else: - # Handle case where chunk is a dict (from document retriever) - document = chunk + + # Extract document metadata + document = chunk.get("document", {}) + metadata = document.get("metadata", {}) # Extract ClickUp task information from metadata - task_name = document.get("task_name", "Unknown Task") - task_id = document.get("task_id", "") - task_url = document.get("task_url", "") - task_status = document.get("task_status", "Unknown") - task_priority = document.get("task_priority", "Unknown") - task_assignees = document.get("task_assignees", []) - task_due_date = document.get("task_due_date", "") - task_list_name = document.get("task_list_name", "") - task_space_name = document.get("task_space_name", "") + task_name = metadata.get("task_name", "Unknown Task") + task_id = metadata.get("task_id", "") + task_url = metadata.get("task_url", "") + task_status = metadata.get("task_status", "Unknown") + task_priority = metadata.get("task_priority", "Unknown") + task_assignees = metadata.get("task_assignees", []) + task_due_date = metadata.get("task_due_date", "") + task_list_name = metadata.get("task_list_name", "") + task_space_name = metadata.get("task_space_name", "") # Create description from task details description_parts = [] From 644f356430ad5848d808ea2dd49270e3b73aa57b Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Fri, 1 Aug 2025 00:06:17 +0200 Subject: [PATCH 13/15] fix type filtering in document table --- .../documents/(manage)/page.tsx | 18 +++++++++++++++--- 1 file changed, 15 insertions(+), 3 deletions(-) diff --git a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/page.tsx index 727a769..b7eb2ec 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/page.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/page.tsx @@ -224,6 +224,7 @@ const columns: ColumnDef[] = [ ); }, size: 180, + enableColumnFilter: true, }, { header: "Content Summary", @@ -404,25 +405,29 @@ export default function DocumentsTable() { // Get unique status values const uniqueStatusValues = useMemo(() => { const statusColumn = table.getColumn("document_type"); + if (!data.length) return []; // Don't compute until data is present if (!statusColumn) return []; const values = Array.from(statusColumn.getFacetedUniqueValues().keys()); return values.sort(); - }, [table.getColumn]); + }, [table.getColumn, data]); // Get counts for each status const statusCounts = useMemo(() => { const statusColumn = table.getColumn("document_type"); + if (!data.length) return new Map(); // Don't compute until data is present if (!statusColumn) return new Map(); return statusColumn.getFacetedUniqueValues(); - }, [table.getColumn]); + }, [table.getColumn, data, columnFilters]); const selectedStatuses = useMemo(() => { const filterValue = table.getColumn("document_type")?.getFilterValue() as string[]; + if (!data.length) return []; // Don't compute until data is present + return filterValue ?? []; - }, [table.getColumn]); + }, [table.getColumn, data, columnFilters]); const handleStatusChange = (checked: boolean, value: string) => { const filterValue = table.getColumn("document_type")?.getFilterValue() as string[]; @@ -437,6 +442,13 @@ export default function DocumentsTable() { } } + setColumnFilters([ + { + id: "document_type", + value: newFilterValue, + }, + ]); + table .getColumn("document_type") ?.setFilterValue(newFilterValue.length ? newFilterValue : undefined); From dabb30c0434264df8b8eecf4001a002517be58cd Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Fri, 1 Aug 2025 00:21:28 +0200 Subject: [PATCH 14/15] fix issues in add clickup connector page --- .../connectors/add/clickup-connector/page.tsx | 35 +++++++------------ 1 file changed, 13 insertions(+), 22 deletions(-) diff --git a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/clickup-connector/page.tsx b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/clickup-connector/page.tsx index 5e337ac..cfe0cb3 100644 --- a/surfsense_web/app/dashboard/[search_space_id]/connectors/add/clickup-connector/page.tsx +++ b/surfsense_web/app/dashboard/[search_space_id]/connectors/add/clickup-connector/page.tsx @@ -3,7 +3,7 @@ import { zodResolver } from "@hookform/resolvers/zod"; import { ArrowLeft, ExternalLink, Eye, EyeOff } from "lucide-react"; import Link from "next/link"; -import { useRouter } from "next/navigation"; +import { useParams, useRouter } from "next/navigation"; import { useState } from "react"; import { useForm } from "react-hook-form"; import { toast } from "sonner"; @@ -22,12 +22,6 @@ import { import { Input } from "@/components/ui/input"; import { useSearchSourceConnectors } from "@/hooks/useSearchSourceConnectors"; -interface ClickUpConnectorPageProps { - params: { - search_space_id: string; - }; -} - // Define the form schema with Zod const clickupConnectorFormSchema = z.object({ name: z.string().min(3, { @@ -41,8 +35,10 @@ const clickupConnectorFormSchema = z.object({ // Define the type for the form values type ClickUpConnectorFormValues = z.infer; -export default function ClickUpConnectorPage({ params }: ClickUpConnectorPageProps) { +export default function ClickUpConnectorPage() { const router = useRouter(); + const params = useParams(); + const searchSpaceId = params.search_space_id as string; const { createConnector } = useSearchSourceConnectors(); const [isLoading, setIsLoading] = useState(false); const [showApiToken, setShowApiToken] = useState(false); @@ -74,7 +70,7 @@ export default function ClickUpConnectorPage({ params }: ClickUpConnectorPagePro await createConnector(connectorData); toast.success("ClickUp connector created successfully!"); - router.push(`/dashboard/${params.search_space_id}/connectors`); + router.push(`/dashboard/${searchSpaceId}/connectors`); } catch (error) { console.error("Error creating ClickUp connector:", error); toast.error("Failed to create ClickUp connector. Please try again."); @@ -85,19 +81,14 @@ export default function ClickUpConnectorPage({ params }: ClickUpConnectorPagePro return (
-
- - - Back to connectors - -

Add ClickUp Connector

-

- Connect your ClickUp workspace to search and retrieve tasks. -

-
+ From e96590ff86f261bbc3120c4446e8a5a57ecac444 Mon Sep 17 00:00:00 2001 From: CREDO23 Date: Fri, 1 Aug 2025 00:27:50 +0200 Subject: [PATCH 15/15] fix ruff isues --- .../alembic/versions/15_add_clickup_connector_enums.py | 4 ++-- surfsense_backend/app/services/connector_service.py | 2 -- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py b/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py index 4c5a625..7e41e62 100644 --- a/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py +++ b/surfsense_backend/alembic/versions/15_add_clickup_connector_enums.py @@ -11,8 +11,8 @@ from collections.abc import Sequence from alembic import op # revision identifiers, used by Alembic. -revision: str = '15' -down_revision: str | None = '14' +revision: str = "15" +down_revision: str | None = "14" branch_labels: str | Sequence[str] | None = None depends_on: str | Sequence[str] | None = None diff --git a/surfsense_backend/app/services/connector_service.py b/surfsense_backend/app/services/connector_service.py index 9389807..3b3cce7 100644 --- a/surfsense_backend/app/services/connector_service.py +++ b/surfsense_backend/app/services/connector_service.py @@ -1,5 +1,4 @@ import asyncio -import json from typing import Any from linkup import LinkupClient @@ -1223,7 +1222,6 @@ class ConnectorService: sources_list = [] for chunk in clickup_chunks: - # Extract document metadata document = chunk.get("document", {}) metadata = document.get("metadata", {})