mirror of
https://github.com/MODSetter/SurfSense.git
synced 2025-09-01 18:19:08 +00:00
feat: Added Linear Connector
This commit is contained in:
parent
2b7a1b1082
commit
e0eb9d4b8b
16 changed files with 1419 additions and 38 deletions
|
@ -6,7 +6,7 @@
|
|||
|
||||
|
||||
# SurfSense
|
||||
While tools like NotebookLM and Perplexity are impressive and highly effective for conducting research on any topic/query, SurfSense elevates this capability by integrating with your personal knowledge base. It is a highly customizable AI research agent, connected to external sources such as search engines (Tavily), Slack, Notion, YouTube, GitHub and more to come.
|
||||
While tools like NotebookLM and Perplexity are impressive and highly effective for conducting research on any topic/query, SurfSense elevates this capability by integrating with your personal knowledge base. It is a highly customizable AI research agent, connected to external sources such as search engines (Tavily), Slack, Linear, Notion, YouTube, GitHub and more to come.
|
||||
|
||||
|
||||
# Video
|
||||
|
@ -44,6 +44,7 @@ Open source and easy to deploy locally.
|
|||
#### ℹ️ **External Sources**
|
||||
- Search Engines (Tavily)
|
||||
- Slack
|
||||
- Linear
|
||||
- Notion
|
||||
- Youtube Videos
|
||||
- GitHub
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
"""Add LINEAR_CONNECTOR to SearchSourceConnectorType enum
|
||||
|
||||
Revision ID: 2
|
||||
Revises: e55302644c51
|
||||
Create Date: 2025-04-16 10:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '2'
|
||||
down_revision: Union[str, None] = 'e55302644c51'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
# Manually add the command to add the enum value
|
||||
op.execute("ALTER TYPE searchsourceconnectortype ADD VALUE 'LINEAR_CONNECTOR'")
|
||||
|
||||
# Pass for the rest, as autogenerate didn't run to add other schema details
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
|
||||
# Downgrading removal of an enum value requires recreating the type
|
||||
op.execute("ALTER TYPE searchsourceconnectortype RENAME TO searchsourceconnectortype_old")
|
||||
op.execute("CREATE TYPE searchsourceconnectortype AS ENUM('SERPER_API', 'TAVILY_API', 'SLACK_CONNECTOR', 'NOTION_CONNECTOR', 'GITHUB_CONNECTOR')")
|
||||
op.execute((
|
||||
"ALTER TABLE search_source_connectors ALTER COLUMN connector_type TYPE searchsourceconnectortype USING "
|
||||
"connector_type::text::searchsourceconnectortype"
|
||||
))
|
||||
op.execute("DROP TYPE searchsourceconnectortype_old")
|
||||
|
||||
pass
|
||||
# ### end Alembic commands ###
|
|
@ -0,0 +1,71 @@
|
|||
"""Add LINEAR_CONNECTOR to DocumentType enum
|
||||
|
||||
Revision ID: 3
|
||||
Revises: 2
|
||||
Create Date: 2025-04-16 10:05:00.059921
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '3'
|
||||
down_revision: Union[str, None] = '2'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
# Define the ENUM type name and the new value
|
||||
ENUM_NAME = 'documenttype' # Make sure this matches the name in your DB (usually lowercase class name)
|
||||
NEW_VALUE = 'LINEAR_CONNECTOR'
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
op.execute(f"ALTER TYPE {ENUM_NAME} ADD VALUE '{NEW_VALUE}'")
|
||||
|
||||
|
||||
# Warning: This will delete all rows with the new value
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema - remove LINEAR_CONNECTOR from enum."""
|
||||
|
||||
# The old type name
|
||||
old_enum_name = f"{ENUM_NAME}_old"
|
||||
|
||||
# Enum values *before* LINEAR_CONNECTOR was added
|
||||
old_values = (
|
||||
'EXTENSION',
|
||||
'CRAWLED_URL',
|
||||
'FILE',
|
||||
'SLACK_CONNECTOR',
|
||||
'NOTION_CONNECTOR',
|
||||
'YOUTUBE_VIDEO',
|
||||
'GITHUB_CONNECTOR'
|
||||
)
|
||||
old_values_sql = ", ".join([f"'{v}'" for v in old_values])
|
||||
|
||||
# Table and column names (adjust if different)
|
||||
table_name = 'documents'
|
||||
column_name = 'document_type'
|
||||
|
||||
# 1. Rename the current enum type
|
||||
op.execute(f"ALTER TYPE {ENUM_NAME} RENAME TO {old_enum_name}")
|
||||
|
||||
# 2. Create the new enum type with the old values
|
||||
op.execute(f"CREATE TYPE {ENUM_NAME} AS ENUM({old_values_sql})")
|
||||
|
||||
# 3. Update the table:
|
||||
op.execute(
|
||||
f"DELETE FROM {table_name} WHERE {column_name}::text = '{NEW_VALUE}'"
|
||||
)
|
||||
|
||||
# 4. Alter the column to use the new enum type (casting old values)
|
||||
op.execute(
|
||||
f"ALTER TABLE {table_name} ALTER COLUMN {column_name} "
|
||||
f"TYPE {ENUM_NAME} USING {column_name}::text::{ENUM_NAME}"
|
||||
)
|
||||
|
||||
# 5. Drop the old enum type
|
||||
op.execute(f"DROP TYPE {old_enum_name}")
|
||||
# ### end Alembic commands ###
|
454
surfsense_backend/app/connectors/linear_connector.py
Normal file
454
surfsense_backend/app/connectors/linear_connector.py
Normal file
|
@ -0,0 +1,454 @@
|
|||
"""
|
||||
Linear Connector Module
|
||||
|
||||
A module for retrieving issues and comments from Linear.
|
||||
Allows fetching issue lists and their comments with date range filtering.
|
||||
"""
|
||||
|
||||
import requests
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Optional, Tuple, Any, Union
|
||||
|
||||
|
||||
class LinearConnector:
|
||||
"""Class for retrieving issues and comments from Linear."""
|
||||
|
||||
def __init__(self, token: str = None):
|
||||
"""
|
||||
Initialize the LinearConnector class.
|
||||
|
||||
Args:
|
||||
token: Linear API token (optional, can be set later with set_token)
|
||||
"""
|
||||
self.token = token
|
||||
self.api_url = "https://api.linear.app/graphql"
|
||||
|
||||
def set_token(self, token: str) -> None:
|
||||
"""
|
||||
Set the Linear API token.
|
||||
|
||||
Args:
|
||||
token: Linear API token
|
||||
"""
|
||||
self.token = token
|
||||
|
||||
def get_headers(self) -> Dict[str, str]:
|
||||
"""
|
||||
Get headers for Linear API requests.
|
||||
|
||||
Returns:
|
||||
Dictionary of headers
|
||||
|
||||
Raises:
|
||||
ValueError: If no Linear token has been set
|
||||
"""
|
||||
if not self.token:
|
||||
raise ValueError("Linear token not initialized. Call set_token() first.")
|
||||
|
||||
return {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': self.token
|
||||
}
|
||||
|
||||
def execute_graphql_query(self, query: str, variables: Dict[str, Any] = None) -> Dict[str, Any]:
|
||||
"""
|
||||
Execute a GraphQL query against the Linear API.
|
||||
|
||||
Args:
|
||||
query: GraphQL query string
|
||||
variables: Variables for the GraphQL query (optional)
|
||||
|
||||
Returns:
|
||||
Response data from the API
|
||||
|
||||
Raises:
|
||||
ValueError: If no Linear token has been set
|
||||
Exception: If the API request fails
|
||||
"""
|
||||
if not self.token:
|
||||
raise ValueError("Linear token not initialized. Call set_token() first.")
|
||||
|
||||
headers = self.get_headers()
|
||||
payload = {'query': query}
|
||||
|
||||
if variables:
|
||||
payload['variables'] = variables
|
||||
|
||||
response = requests.post(
|
||||
self.api_url,
|
||||
headers=headers,
|
||||
json=payload
|
||||
)
|
||||
|
||||
if response.status_code == 200:
|
||||
return response.json()
|
||||
else:
|
||||
raise Exception(f"Query failed with status code {response.status_code}: {response.text}")
|
||||
|
||||
def get_all_issues(self, include_comments: bool = True) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Fetch all issues from Linear.
|
||||
|
||||
Args:
|
||||
include_comments: Whether to include comments in the response
|
||||
|
||||
Returns:
|
||||
List of issue objects
|
||||
|
||||
Raises:
|
||||
ValueError: If no Linear token has been set
|
||||
Exception: If the API request fails
|
||||
"""
|
||||
comments_query = ""
|
||||
if include_comments:
|
||||
comments_query = """
|
||||
comments {
|
||||
nodes {
|
||||
id
|
||||
body
|
||||
user {
|
||||
id
|
||||
name
|
||||
email
|
||||
}
|
||||
createdAt
|
||||
updatedAt
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
query = f"""
|
||||
query {{
|
||||
issues {{
|
||||
nodes {{
|
||||
id
|
||||
identifier
|
||||
title
|
||||
description
|
||||
state {{
|
||||
id
|
||||
name
|
||||
type
|
||||
}}
|
||||
assignee {{
|
||||
id
|
||||
name
|
||||
email
|
||||
}}
|
||||
creator {{
|
||||
id
|
||||
name
|
||||
email
|
||||
}}
|
||||
createdAt
|
||||
updatedAt
|
||||
{comments_query}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
"""
|
||||
|
||||
result = self.execute_graphql_query(query)
|
||||
|
||||
# Extract issues from the response
|
||||
if "data" in result and "issues" in result["data"] and "nodes" in result["data"]["issues"]:
|
||||
return result["data"]["issues"]["nodes"]
|
||||
|
||||
return []
|
||||
|
||||
def get_issues_by_date_range(
|
||||
self,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
include_comments: bool = True
|
||||
) -> Tuple[List[Dict[str, Any]], Optional[str]]:
|
||||
"""
|
||||
Fetch issues within a date range.
|
||||
|
||||
Args:
|
||||
start_date: Start date in YYYY-MM-DD format
|
||||
end_date: End date in YYYY-MM-DD format (inclusive)
|
||||
include_comments: Whether to include comments in the response
|
||||
|
||||
Returns:
|
||||
Tuple containing (issues list, error message or None)
|
||||
"""
|
||||
# Convert date strings to ISO format
|
||||
try:
|
||||
# For Linear API: we need to use a more specific format for the filter
|
||||
# Instead of DateTime, use a string in the filter for DateTimeOrDuration
|
||||
comments_query = ""
|
||||
if include_comments:
|
||||
comments_query = """
|
||||
comments {
|
||||
nodes {
|
||||
id
|
||||
body
|
||||
user {
|
||||
id
|
||||
name
|
||||
email
|
||||
}
|
||||
createdAt
|
||||
updatedAt
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
# Query issues that were either created OR updated within the date range
|
||||
# This ensures we catch both new issues and updated existing issues
|
||||
query = f"""
|
||||
query IssuesByDateRange($after: String) {{
|
||||
issues(
|
||||
first: 100,
|
||||
after: $after,
|
||||
filter: {{
|
||||
or: [
|
||||
{{
|
||||
createdAt: {{
|
||||
gte: "{start_date}T00:00:00Z"
|
||||
lte: "{end_date}T23:59:59Z"
|
||||
}}
|
||||
}},
|
||||
{{
|
||||
updatedAt: {{
|
||||
gte: "{start_date}T00:00:00Z"
|
||||
lte: "{end_date}T23:59:59Z"
|
||||
}}
|
||||
}}
|
||||
]
|
||||
}}
|
||||
) {{
|
||||
nodes {{
|
||||
id
|
||||
identifier
|
||||
title
|
||||
description
|
||||
state {{
|
||||
id
|
||||
name
|
||||
type
|
||||
}}
|
||||
assignee {{
|
||||
id
|
||||
name
|
||||
email
|
||||
}}
|
||||
creator {{
|
||||
id
|
||||
name
|
||||
email
|
||||
}}
|
||||
createdAt
|
||||
updatedAt
|
||||
{comments_query}
|
||||
}}
|
||||
pageInfo {{
|
||||
hasNextPage
|
||||
endCursor
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
"""
|
||||
|
||||
try:
|
||||
all_issues = []
|
||||
has_next_page = True
|
||||
cursor = None
|
||||
|
||||
# Handle pagination to get all issues
|
||||
while has_next_page:
|
||||
variables = {"after": cursor} if cursor else {}
|
||||
result = self.execute_graphql_query(query, variables)
|
||||
|
||||
# Check for errors
|
||||
if "errors" in result:
|
||||
error_message = "; ".join([error.get("message", "Unknown error") for error in result["errors"]])
|
||||
return [], f"GraphQL errors: {error_message}"
|
||||
|
||||
# Extract issues from the response
|
||||
if "data" in result and "issues" in result["data"]:
|
||||
issues_page = result["data"]["issues"]
|
||||
|
||||
# Add issues from this page
|
||||
if "nodes" in issues_page:
|
||||
all_issues.extend(issues_page["nodes"])
|
||||
|
||||
# Check if there are more pages
|
||||
if "pageInfo" in issues_page:
|
||||
page_info = issues_page["pageInfo"]
|
||||
has_next_page = page_info.get("hasNextPage", False)
|
||||
cursor = page_info.get("endCursor") if has_next_page else None
|
||||
else:
|
||||
has_next_page = False
|
||||
else:
|
||||
has_next_page = False
|
||||
|
||||
if not all_issues:
|
||||
return [], "No issues found in the specified date range."
|
||||
|
||||
return all_issues, None
|
||||
|
||||
except Exception as e:
|
||||
return [], f"Error fetching issues: {str(e)}"
|
||||
|
||||
except ValueError as e:
|
||||
return [], f"Invalid date format: {str(e)}. Please use YYYY-MM-DD."
|
||||
|
||||
def format_issue(self, issue: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Format an issue for easier consumption.
|
||||
|
||||
Args:
|
||||
issue: The issue object from Linear API
|
||||
|
||||
Returns:
|
||||
Formatted issue dictionary
|
||||
"""
|
||||
# Extract basic issue details
|
||||
formatted = {
|
||||
"id": issue.get("id", ""),
|
||||
"identifier": issue.get("identifier", ""),
|
||||
"title": issue.get("title", ""),
|
||||
"description": issue.get("description", ""),
|
||||
"state": issue.get("state", {}).get("name", "Unknown") if issue.get("state") else "Unknown",
|
||||
"state_type": issue.get("state", {}).get("type", "Unknown") if issue.get("state") else "Unknown",
|
||||
"created_at": issue.get("createdAt", ""),
|
||||
"updated_at": issue.get("updatedAt", ""),
|
||||
"creator": {
|
||||
"id": issue.get("creator", {}).get("id", "") if issue.get("creator") else "",
|
||||
"name": issue.get("creator", {}).get("name", "Unknown") if issue.get("creator") else "Unknown",
|
||||
"email": issue.get("creator", {}).get("email", "") if issue.get("creator") else ""
|
||||
} if issue.get("creator") else {"id": "", "name": "Unknown", "email": ""},
|
||||
"assignee": {
|
||||
"id": issue.get("assignee", {}).get("id", ""),
|
||||
"name": issue.get("assignee", {}).get("name", "Unknown"),
|
||||
"email": issue.get("assignee", {}).get("email", "")
|
||||
} if issue.get("assignee") else None,
|
||||
"comments": []
|
||||
}
|
||||
|
||||
# Extract comments if available
|
||||
if "comments" in issue and "nodes" in issue["comments"]:
|
||||
for comment in issue["comments"]["nodes"]:
|
||||
formatted_comment = {
|
||||
"id": comment.get("id", ""),
|
||||
"body": comment.get("body", ""),
|
||||
"created_at": comment.get("createdAt", ""),
|
||||
"updated_at": comment.get("updatedAt", ""),
|
||||
"user": {
|
||||
"id": comment.get("user", {}).get("id", "") if comment.get("user") else "",
|
||||
"name": comment.get("user", {}).get("name", "Unknown") if comment.get("user") else "Unknown",
|
||||
"email": comment.get("user", {}).get("email", "") if comment.get("user") else ""
|
||||
} if comment.get("user") else {"id": "", "name": "Unknown", "email": ""}
|
||||
}
|
||||
formatted["comments"].append(formatted_comment)
|
||||
|
||||
return formatted
|
||||
|
||||
def format_issue_to_markdown(self, issue: Dict[str, Any]) -> str:
|
||||
"""
|
||||
Convert an issue to markdown format.
|
||||
|
||||
Args:
|
||||
issue: The issue object (either raw or formatted)
|
||||
|
||||
Returns:
|
||||
Markdown string representation of the issue
|
||||
"""
|
||||
# Format the issue if it's not already formatted
|
||||
if "identifier" not in issue:
|
||||
issue = self.format_issue(issue)
|
||||
|
||||
# Build the markdown content
|
||||
markdown = f"# {issue.get('identifier', 'No ID')}: {issue.get('title', 'No Title')}\n\n"
|
||||
|
||||
if issue.get('state'):
|
||||
markdown += f"**Status:** {issue['state']}\n\n"
|
||||
|
||||
if issue.get('assignee') and issue['assignee'].get('name'):
|
||||
markdown += f"**Assignee:** {issue['assignee']['name']}\n"
|
||||
|
||||
if issue.get('creator') and issue['creator'].get('name'):
|
||||
markdown += f"**Created by:** {issue['creator']['name']}\n"
|
||||
|
||||
if issue.get('created_at'):
|
||||
created_date = self.format_date(issue['created_at'])
|
||||
markdown += f"**Created:** {created_date}\n"
|
||||
|
||||
if issue.get('updated_at'):
|
||||
updated_date = self.format_date(issue['updated_at'])
|
||||
markdown += f"**Updated:** {updated_date}\n\n"
|
||||
|
||||
if issue.get('description'):
|
||||
markdown += f"## Description\n\n{issue['description']}\n\n"
|
||||
|
||||
if issue.get('comments'):
|
||||
markdown += f"## Comments ({len(issue['comments'])})\n\n"
|
||||
|
||||
for comment in issue['comments']:
|
||||
user_name = "Unknown"
|
||||
if comment.get('user') and comment['user'].get('name'):
|
||||
user_name = comment['user']['name']
|
||||
|
||||
comment_date = "Unknown date"
|
||||
if comment.get('created_at'):
|
||||
comment_date = self.format_date(comment['created_at'])
|
||||
|
||||
markdown += f"### {user_name} ({comment_date})\n\n{comment.get('body', '')}\n\n---\n\n"
|
||||
|
||||
return markdown
|
||||
|
||||
@staticmethod
|
||||
def format_date(iso_date: str) -> str:
|
||||
"""
|
||||
Format an ISO date string to a more readable format.
|
||||
|
||||
Args:
|
||||
iso_date: ISO format date string
|
||||
|
||||
Returns:
|
||||
Formatted date string
|
||||
"""
|
||||
if not iso_date or not isinstance(iso_date, str):
|
||||
return "Unknown date"
|
||||
|
||||
try:
|
||||
dt = datetime.fromisoformat(iso_date.replace('Z', '+00:00'))
|
||||
return dt.strftime('%Y-%m-%d %H:%M:%S')
|
||||
except ValueError:
|
||||
return iso_date
|
||||
|
||||
|
||||
# Example usage (uncomment to use):
|
||||
"""
|
||||
if __name__ == "__main__":
|
||||
# Set your token here
|
||||
token = "YOUR_LINEAR_API_KEY"
|
||||
|
||||
linear = LinearConnector(token)
|
||||
|
||||
try:
|
||||
# Get all issues with comments
|
||||
issues = linear.get_all_issues()
|
||||
print(f"Retrieved {len(issues)} issues")
|
||||
|
||||
# Format and print the first issue as markdown
|
||||
if issues:
|
||||
issue_md = linear.format_issue_to_markdown(issues[0])
|
||||
print("\nSample Issue in Markdown:\n")
|
||||
print(issue_md)
|
||||
|
||||
# Get issues by date range
|
||||
start_date = "2023-01-01"
|
||||
end_date = "2023-01-31"
|
||||
date_issues, error = linear.get_issues_by_date_range(start_date, end_date)
|
||||
|
||||
if error:
|
||||
print(f"Error: {error}")
|
||||
else:
|
||||
print(f"\nRetrieved {len(date_issues)} issues from {start_date} to {end_date}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
"""
|
|
@ -41,6 +41,7 @@ class DocumentType(str, Enum):
|
|||
NOTION_CONNECTOR = "NOTION_CONNECTOR"
|
||||
YOUTUBE_VIDEO = "YOUTUBE_VIDEO"
|
||||
GITHUB_CONNECTOR = "GITHUB_CONNECTOR"
|
||||
LINEAR_CONNECTOR = "LINEAR_CONNECTOR"
|
||||
|
||||
class SearchSourceConnectorType(str, Enum):
|
||||
SERPER_API = "SERPER_API"
|
||||
|
@ -48,6 +49,7 @@ class SearchSourceConnectorType(str, Enum):
|
|||
SLACK_CONNECTOR = "SLACK_CONNECTOR"
|
||||
NOTION_CONNECTOR = "NOTION_CONNECTOR"
|
||||
GITHUB_CONNECTOR = "GITHUB_CONNECTOR"
|
||||
LINEAR_CONNECTOR = "LINEAR_CONNECTOR"
|
||||
|
||||
class ChatType(str, Enum):
|
||||
GENERAL = "GENERAL"
|
||||
|
|
|
@ -7,7 +7,7 @@ PUT /search-source-connectors/{connector_id} - Update a specific connector
|
|||
DELETE /search-source-connectors/{connector_id} - Delete a specific connector
|
||||
POST /search-source-connectors/{connector_id}/index - Index content from a connector to a search space
|
||||
|
||||
Note: Each user can have only one connector of each type (SERPER_API, TAVILY_API, SLACK_CONNECTOR, NOTION_CONNECTOR).
|
||||
Note: Each user can have only one connector of each type (SERPER_API, TAVILY_API, SLACK_CONNECTOR, NOTION_CONNECTOR, GITHUB_CONNECTOR, LINEAR_CONNECTOR).
|
||||
"""
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, BackgroundTasks
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
@ -19,8 +19,8 @@ from app.schemas import SearchSourceConnectorCreate, SearchSourceConnectorUpdate
|
|||
from app.users import current_active_user
|
||||
from app.utils.check_ownership import check_ownership
|
||||
from pydantic import ValidationError
|
||||
from app.tasks.connectors_indexing_tasks import index_slack_messages, index_notion_pages, index_github_repos
|
||||
from datetime import datetime, timezone
|
||||
from app.tasks.connectors_indexing_tasks import index_slack_messages, index_notion_pages, index_github_repos, index_linear_issues
|
||||
from datetime import datetime, timezone, timedelta
|
||||
import logging
|
||||
|
||||
# Set up logging
|
||||
|
@ -37,7 +37,7 @@ async def create_search_source_connector(
|
|||
"""
|
||||
Create a new search source connector.
|
||||
|
||||
Each user can have only one connector of each type (SERPER_API, TAVILY_API, SLACK_CONNECTOR).
|
||||
Each user can have only one connector of each type (SERPER_API, TAVILY_API, SLACK_CONNECTOR, etc.).
|
||||
The config must contain the appropriate keys for the connector type.
|
||||
"""
|
||||
try:
|
||||
|
@ -131,7 +131,7 @@ async def update_search_source_connector(
|
|||
"""
|
||||
Update a search source connector.
|
||||
|
||||
Each user can have only one connector of each type (SERPER_API, TAVILY_API, SLACK_CONNECTOR).
|
||||
Each user can have only one connector of each type (SERPER_API, TAVILY_API, SLACK_CONNECTOR, etc.).
|
||||
The config must contain the appropriate keys for the connector type.
|
||||
"""
|
||||
try:
|
||||
|
@ -216,10 +216,10 @@ async def index_connector_content(
|
|||
Index content from a connector to a search space.
|
||||
|
||||
Currently supports:
|
||||
- SLACK_CONNECTOR: Indexes messages from all accessible Slack channels since the last indexing
|
||||
(or the last 365 days if never indexed before)
|
||||
- NOTION_CONNECTOR: Indexes pages from all accessible Notion pages since the last indexing
|
||||
(or the last 365 days if never indexed before)
|
||||
- SLACK_CONNECTOR: Indexes messages from all accessible Slack channels
|
||||
- NOTION_CONNECTOR: Indexes pages from all accessible Notion pages
|
||||
- GITHUB_CONNECTOR: Indexes code and documentation from GitHub repositories
|
||||
- LINEAR_CONNECTOR: Indexes issues and comments from Linear
|
||||
|
||||
Args:
|
||||
connector_id: ID of the connector to use
|
||||
|
@ -251,7 +251,7 @@ async def index_connector_content(
|
|||
today = datetime.now().date()
|
||||
if connector.last_indexed_at.date() == today:
|
||||
# If last indexed today, go back 1 day to ensure we don't miss anything
|
||||
start_date = (today - datetime.timedelta(days=1)).strftime("%Y-%m-%d")
|
||||
start_date = (today - timedelta(days=1)).strftime("%Y-%m-%d")
|
||||
else:
|
||||
start_date = connector.last_indexed_at.strftime("%Y-%m-%d")
|
||||
|
||||
|
@ -272,7 +272,7 @@ async def index_connector_content(
|
|||
today = datetime.now().date()
|
||||
if connector.last_indexed_at.date() == today:
|
||||
# If last indexed today, go back 1 day to ensure we don't miss anything
|
||||
start_date = (today - datetime.timedelta(days=1)).strftime("%Y-%m-%d")
|
||||
start_date = (today - timedelta(days=1)).strftime("%Y-%m-%d")
|
||||
else:
|
||||
start_date = connector.last_indexed_at.strftime("%Y-%m-%d")
|
||||
|
||||
|
@ -294,6 +294,27 @@ async def index_connector_content(
|
|||
logger.info(f"Triggering GitHub indexing for connector {connector_id} into search space {search_space_id}")
|
||||
background_tasks.add_task(run_github_indexing_with_new_session, connector_id, search_space_id)
|
||||
response_message = "GitHub indexing started in the background."
|
||||
|
||||
elif connector.connector_type == SearchSourceConnectorType.LINEAR_CONNECTOR:
|
||||
# Determine the time range that will be indexed
|
||||
if not connector.last_indexed_at:
|
||||
start_date = "365 days ago"
|
||||
else:
|
||||
# Check if last_indexed_at is today
|
||||
today = datetime.now().date()
|
||||
if connector.last_indexed_at.date() == today:
|
||||
# If last indexed today, go back 1 day to ensure we don't miss anything
|
||||
start_date = (today - timedelta(days=1)).strftime("%Y-%m-%d")
|
||||
else:
|
||||
start_date = connector.last_indexed_at.strftime("%Y-%m-%d")
|
||||
|
||||
indexing_from = start_date
|
||||
indexing_to = today_str
|
||||
|
||||
# Run indexing in background
|
||||
logger.info(f"Triggering Linear indexing for connector {connector_id} into search space {search_space_id}")
|
||||
background_tasks.add_task(run_linear_indexing_with_new_session, connector_id, search_space_id)
|
||||
response_message = "Linear indexing started in the background."
|
||||
|
||||
else:
|
||||
raise HTTPException(
|
||||
|
@ -460,3 +481,37 @@ async def run_github_indexing(
|
|||
await session.rollback()
|
||||
logger.error(f"Critical error in run_github_indexing for connector {connector_id}: {e}", exc_info=True)
|
||||
# Optionally update status in DB to indicate failure
|
||||
|
||||
# Add new helper functions for Linear indexing
|
||||
async def run_linear_indexing_with_new_session(
|
||||
connector_id: int,
|
||||
search_space_id: int
|
||||
):
|
||||
"""Wrapper to run Linear indexing with its own database session."""
|
||||
logger.info(f"Background task started: Indexing Linear connector {connector_id} into space {search_space_id}")
|
||||
async with async_session_maker() as session:
|
||||
await run_linear_indexing(session, connector_id, search_space_id)
|
||||
logger.info(f"Background task finished: Indexing Linear connector {connector_id}")
|
||||
|
||||
async def run_linear_indexing(
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
search_space_id: int
|
||||
):
|
||||
"""Runs the Linear indexing task and updates the timestamp."""
|
||||
try:
|
||||
indexed_count, error_message = await index_linear_issues(
|
||||
session, connector_id, search_space_id, update_last_indexed=False
|
||||
)
|
||||
if error_message:
|
||||
logger.error(f"Linear indexing failed for connector {connector_id}: {error_message}")
|
||||
# Optionally update status in DB to indicate failure
|
||||
else:
|
||||
logger.info(f"Linear indexing successful for connector {connector_id}. Indexed {indexed_count} documents.")
|
||||
# Update the last indexed timestamp only on success
|
||||
await update_connector_last_indexed(session, connector_id)
|
||||
await session.commit() # Commit timestamp update
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
logger.error(f"Critical error in run_linear_indexing for connector {connector_id}: {e}", exc_info=True)
|
||||
# Optionally update status in DB to indicate failure
|
||||
|
|
|
@ -67,6 +67,16 @@ class SearchSourceConnectorBase(BaseModel):
|
|||
# Ensure the token is not empty
|
||||
if not config.get("GITHUB_PAT"):
|
||||
raise ValueError("GITHUB_PAT cannot be empty")
|
||||
|
||||
elif connector_type == SearchSourceConnectorType.LINEAR_CONNECTOR:
|
||||
# For LINEAR_CONNECTOR, only allow LINEAR_API_KEY
|
||||
allowed_keys = ["LINEAR_API_KEY"]
|
||||
if set(config.keys()) != set(allowed_keys):
|
||||
raise ValueError(f"For LINEAR_CONNECTOR connector type, config must only contain these keys: {allowed_keys}")
|
||||
|
||||
# Ensure the token is not empty
|
||||
if not config.get("LINEAR_API_KEY"):
|
||||
raise ValueError("LINEAR_API_KEY cannot be empty")
|
||||
|
||||
return config
|
||||
|
||||
|
|
|
@ -10,6 +10,7 @@ from app.prompts import SUMMARY_PROMPT_TEMPLATE
|
|||
from app.connectors.slack_history import SlackHistory
|
||||
from app.connectors.notion_history import NotionHistoryConnector
|
||||
from app.connectors.github_connector import GitHubConnector
|
||||
from app.connectors.linear_connector import LinearConnector
|
||||
from slack_sdk.errors import SlackApiError
|
||||
import logging
|
||||
|
||||
|
@ -60,8 +61,20 @@ async def index_slack_messages(
|
|||
end_date = datetime.now()
|
||||
|
||||
# Use last_indexed_at as start date if available, otherwise use 365 days ago
|
||||
|
||||
start_date = end_date - timedelta(days=365)
|
||||
if connector.last_indexed_at:
|
||||
# Convert dates to be comparable (both timezone-naive)
|
||||
last_indexed_naive = connector.last_indexed_at.replace(tzinfo=None) if connector.last_indexed_at.tzinfo else connector.last_indexed_at
|
||||
|
||||
# Check if last_indexed_at is in the future or after end_date
|
||||
if last_indexed_naive > end_date:
|
||||
logger.warning(f"Last indexed date ({last_indexed_naive.strftime('%Y-%m-%d')}) is in the future. Using 30 days ago instead.")
|
||||
start_date = end_date - timedelta(days=30)
|
||||
else:
|
||||
start_date = last_indexed_naive
|
||||
logger.info(f"Using last_indexed_at ({start_date.strftime('%Y-%m-%d')}) as start date")
|
||||
else:
|
||||
start_date = end_date - timedelta(days=30) # Use 30 days instead of 365 to catch recent issues
|
||||
logger.info(f"No last_indexed_at found, using {start_date.strftime('%Y-%m-%d')} (30 days ago) as start date")
|
||||
|
||||
# Format dates for Slack API
|
||||
start_date_str = start_date.strftime("%Y-%m-%d")
|
||||
|
@ -782,3 +795,280 @@ async def index_github_repos(
|
|||
|
||||
error_message = "; ".join(errors) if errors else None
|
||||
return documents_processed, error_message
|
||||
|
||||
async def index_linear_issues(
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
update_last_indexed: bool = True
|
||||
) -> Tuple[int, Optional[str]]:
|
||||
"""
|
||||
Index Linear issues and comments.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
connector_id: ID of the Linear connector
|
||||
search_space_id: ID of the search space to store documents in
|
||||
update_last_indexed: Whether to update the last_indexed_at timestamp (default: True)
|
||||
|
||||
Returns:
|
||||
Tuple containing (number of documents indexed, error message or None)
|
||||
"""
|
||||
try:
|
||||
# Get the connector
|
||||
result = await session.execute(
|
||||
select(SearchSourceConnector)
|
||||
.filter(
|
||||
SearchSourceConnector.id == connector_id,
|
||||
SearchSourceConnector.connector_type == SearchSourceConnectorType.LINEAR_CONNECTOR
|
||||
)
|
||||
)
|
||||
connector = result.scalars().first()
|
||||
|
||||
if not connector:
|
||||
return 0, f"Connector with ID {connector_id} not found or is not a Linear connector"
|
||||
|
||||
# Get the Linear token from the connector config
|
||||
linear_token = connector.config.get("LINEAR_API_KEY")
|
||||
if not linear_token:
|
||||
return 0, "Linear API token not found in connector config"
|
||||
|
||||
# Initialize Linear client
|
||||
linear_client = LinearConnector(token=linear_token)
|
||||
|
||||
# Calculate date range
|
||||
end_date = datetime.now()
|
||||
|
||||
# Use last_indexed_at as start date if available, otherwise use 365 days ago
|
||||
if connector.last_indexed_at:
|
||||
# Convert dates to be comparable (both timezone-naive)
|
||||
last_indexed_naive = connector.last_indexed_at.replace(tzinfo=None) if connector.last_indexed_at.tzinfo else connector.last_indexed_at
|
||||
|
||||
# Check if last_indexed_at is in the future or after end_date
|
||||
if last_indexed_naive > end_date:
|
||||
logger.warning(f"Last indexed date ({last_indexed_naive.strftime('%Y-%m-%d')}) is in the future. Using 30 days ago instead.")
|
||||
start_date = end_date - timedelta(days=30)
|
||||
else:
|
||||
start_date = last_indexed_naive
|
||||
logger.info(f"Using last_indexed_at ({start_date.strftime('%Y-%m-%d')}) as start date")
|
||||
else:
|
||||
start_date = end_date - timedelta(days=30) # Use 30 days instead of 365 to catch recent issues
|
||||
logger.info(f"No last_indexed_at found, using {start_date.strftime('%Y-%m-%d')} (30 days ago) as start date")
|
||||
|
||||
# Format dates for Linear API
|
||||
start_date_str = start_date.strftime("%Y-%m-%d")
|
||||
end_date_str = end_date.strftime("%Y-%m-%d")
|
||||
|
||||
logger.info(f"Fetching Linear issues from {start_date_str} to {end_date_str}")
|
||||
|
||||
# Get issues within date range
|
||||
try:
|
||||
issues, error = linear_client.get_issues_by_date_range(
|
||||
start_date=start_date_str,
|
||||
end_date=end_date_str,
|
||||
include_comments=True
|
||||
)
|
||||
|
||||
if error:
|
||||
logger.error(f"Failed to get Linear issues: {error}")
|
||||
|
||||
# Don't treat "No issues found" as an error that should stop indexing
|
||||
if "No issues found" in error:
|
||||
logger.info("No issues found is not a critical error, continuing with update")
|
||||
if update_last_indexed:
|
||||
connector.last_indexed_at = datetime.now()
|
||||
await session.commit()
|
||||
logger.info(f"Updated last_indexed_at to {connector.last_indexed_at} despite no issues found")
|
||||
return 0, None
|
||||
else:
|
||||
return 0, f"Failed to get Linear issues: {error}"
|
||||
|
||||
logger.info(f"Retrieved {len(issues)} issues from Linear API")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Exception when calling Linear API: {str(e)}", exc_info=True)
|
||||
return 0, f"Failed to get Linear issues: {str(e)}"
|
||||
|
||||
if not issues:
|
||||
logger.info("No Linear issues found for the specified date range")
|
||||
if update_last_indexed:
|
||||
connector.last_indexed_at = datetime.now()
|
||||
await session.commit()
|
||||
logger.info(f"Updated last_indexed_at to {connector.last_indexed_at} despite no issues found")
|
||||
return 0, None # Return None instead of error message when no issues found
|
||||
|
||||
# Log issue IDs and titles for debugging
|
||||
logger.info("Issues retrieved from Linear API:")
|
||||
for idx, issue in enumerate(issues[:10]): # Log first 10 issues
|
||||
logger.info(f" {idx+1}. {issue.get('identifier', 'Unknown')} - {issue.get('title', 'Unknown')} - Created: {issue.get('createdAt', 'Unknown')} - Updated: {issue.get('updatedAt', 'Unknown')}")
|
||||
if len(issues) > 10:
|
||||
logger.info(f" ...and {len(issues) - 10} more issues")
|
||||
|
||||
# Get existing documents for this search space and connector type to prevent duplicates
|
||||
existing_docs_result = await session.execute(
|
||||
select(Document)
|
||||
.filter(
|
||||
Document.search_space_id == search_space_id,
|
||||
Document.document_type == DocumentType.LINEAR_CONNECTOR
|
||||
)
|
||||
)
|
||||
existing_docs = existing_docs_result.scalars().all()
|
||||
|
||||
# Create a lookup dictionary of existing documents by issue_id
|
||||
existing_docs_by_issue_id = {}
|
||||
for doc in existing_docs:
|
||||
if "issue_id" in doc.document_metadata:
|
||||
existing_docs_by_issue_id[doc.document_metadata["issue_id"]] = doc
|
||||
|
||||
logger.info(f"Found {len(existing_docs_by_issue_id)} existing Linear documents in database")
|
||||
|
||||
# Log existing document IDs for debugging
|
||||
if existing_docs_by_issue_id:
|
||||
logger.info("Existing Linear document issue IDs in database:")
|
||||
for idx, (issue_id, doc) in enumerate(list(existing_docs_by_issue_id.items())[:10]): # Log first 10
|
||||
logger.info(f" {idx+1}. {issue_id} - {doc.document_metadata.get('issue_identifier', 'Unknown')} - {doc.document_metadata.get('issue_title', 'Unknown')}")
|
||||
if len(existing_docs_by_issue_id) > 10:
|
||||
logger.info(f" ...and {len(existing_docs_by_issue_id) - 10} more existing documents")
|
||||
|
||||
# Track the number of documents indexed
|
||||
documents_indexed = 0
|
||||
documents_updated = 0
|
||||
documents_skipped = 0
|
||||
skipped_issues = []
|
||||
|
||||
# Process each issue
|
||||
for issue in issues:
|
||||
try:
|
||||
issue_id = issue.get("id")
|
||||
issue_identifier = issue.get("identifier", "")
|
||||
issue_title = issue.get("title", "")
|
||||
|
||||
if not issue_id or not issue_title:
|
||||
logger.warning(f"Skipping issue with missing ID or title: {issue_id or 'Unknown'}")
|
||||
skipped_issues.append(f"{issue_identifier or 'Unknown'} (missing data)")
|
||||
documents_skipped += 1
|
||||
continue
|
||||
|
||||
# Format the issue first to get well-structured data
|
||||
formatted_issue = linear_client.format_issue(issue)
|
||||
|
||||
# Convert issue to markdown format
|
||||
issue_content = linear_client.format_issue_to_markdown(formatted_issue)
|
||||
|
||||
if not issue_content:
|
||||
logger.warning(f"Skipping issue with no content: {issue_identifier} - {issue_title}")
|
||||
skipped_issues.append(f"{issue_identifier} (no content)")
|
||||
documents_skipped += 1
|
||||
continue
|
||||
|
||||
# Create a short summary for the embedding
|
||||
# This avoids using the LLM and just uses the issue data directly
|
||||
state = formatted_issue.get("state", "Unknown")
|
||||
description = formatted_issue.get("description", "")
|
||||
# Truncate description if it's too long for the summary
|
||||
if description and len(description) > 500:
|
||||
description = description[:497] + "..."
|
||||
|
||||
# Create a simple summary from the issue data
|
||||
summary_content = f"Linear Issue {issue_identifier}: {issue_title}\n\nStatus: {state}\n\n"
|
||||
if description:
|
||||
summary_content += f"Description: {description}\n\n"
|
||||
|
||||
# Add comment count
|
||||
comment_count = len(formatted_issue.get("comments", []))
|
||||
summary_content += f"Comments: {comment_count}"
|
||||
|
||||
# Generate embedding for the summary
|
||||
summary_embedding = config.embedding_model_instance.embed(summary_content)
|
||||
|
||||
# Process chunks - using the full issue content with comments
|
||||
chunks = [
|
||||
Chunk(content=chunk.text, embedding=chunk.embedding)
|
||||
for chunk in config.chunker_instance.chunk(issue_content)
|
||||
]
|
||||
|
||||
# Check if this issue already exists in our database
|
||||
existing_document = existing_docs_by_issue_id.get(issue_id)
|
||||
|
||||
if existing_document:
|
||||
# Update existing document instead of creating a new one
|
||||
logger.info(f"Updating existing document for issue {issue_identifier} - {issue_title}")
|
||||
|
||||
# Update document fields
|
||||
existing_document.title = f"Linear - {issue_identifier}: {issue_title}"
|
||||
existing_document.document_metadata = {
|
||||
"issue_id": issue_id,
|
||||
"issue_identifier": issue_identifier,
|
||||
"issue_title": issue_title,
|
||||
"state": state,
|
||||
"comment_count": comment_count,
|
||||
"indexed_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
"last_updated": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
}
|
||||
existing_document.content = summary_content
|
||||
existing_document.embedding = summary_embedding
|
||||
|
||||
# Delete existing chunks and add new ones
|
||||
await session.execute(
|
||||
delete(Chunk)
|
||||
.where(Chunk.document_id == existing_document.id)
|
||||
)
|
||||
|
||||
# Assign new chunks to existing document
|
||||
for chunk in chunks:
|
||||
chunk.document_id = existing_document.id
|
||||
session.add(chunk)
|
||||
|
||||
documents_updated += 1
|
||||
else:
|
||||
# Create and store new document
|
||||
logger.info(f"Creating new document for issue {issue_identifier} - {issue_title}")
|
||||
document = Document(
|
||||
search_space_id=search_space_id,
|
||||
title=f"Linear - {issue_identifier}: {issue_title}",
|
||||
document_type=DocumentType.LINEAR_CONNECTOR,
|
||||
document_metadata={
|
||||
"issue_id": issue_id,
|
||||
"issue_identifier": issue_identifier,
|
||||
"issue_title": issue_title,
|
||||
"state": state,
|
||||
"comment_count": comment_count,
|
||||
"indexed_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
},
|
||||
content=summary_content,
|
||||
embedding=summary_embedding,
|
||||
chunks=chunks
|
||||
)
|
||||
|
||||
session.add(document)
|
||||
documents_indexed += 1
|
||||
logger.info(f"Successfully indexed new issue {issue_identifier} - {issue_title}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing issue {issue.get('identifier', 'Unknown')}: {str(e)}", exc_info=True)
|
||||
skipped_issues.append(f"{issue.get('identifier', 'Unknown')} (processing error)")
|
||||
documents_skipped += 1
|
||||
continue # Skip this issue and continue with others
|
||||
|
||||
# Update the last_indexed_at timestamp for the connector only if requested
|
||||
total_processed = documents_indexed + documents_updated
|
||||
if update_last_indexed:
|
||||
connector.last_indexed_at = datetime.now()
|
||||
logger.info(f"Updated last_indexed_at to {connector.last_indexed_at}")
|
||||
|
||||
# Commit all changes
|
||||
await session.commit()
|
||||
logger.info(f"Successfully committed all Linear document changes to database")
|
||||
|
||||
|
||||
logger.info(f"Linear indexing completed: {documents_indexed} new issues, {documents_updated} updated, {documents_skipped} skipped")
|
||||
return total_processed, None # Return None as the error message to indicate success
|
||||
|
||||
except SQLAlchemyError as db_error:
|
||||
await session.rollback()
|
||||
logger.error(f"Database error: {str(db_error)}", exc_info=True)
|
||||
return 0, f"Database error: {str(db_error)}"
|
||||
except Exception as e:
|
||||
await session.rollback()
|
||||
logger.error(f"Failed to index Linear issues: {str(e)}", exc_info=True)
|
||||
return 0, f"Failed to index Linear issues: {str(e)}"
|
||||
|
|
|
@ -270,6 +270,32 @@ async def stream_connector_search_results(
|
|||
# Add documents to collection
|
||||
all_raw_documents.extend(github_chunks)
|
||||
|
||||
# Linear Connector
|
||||
if connector == "LINEAR_CONNECTOR":
|
||||
# Send terminal message about starting search
|
||||
yield streaming_service.add_terminal_message("Starting to search for Linear issues...")
|
||||
|
||||
# Search using Linear API with reformulated query
|
||||
result_object, linear_chunks = await connector_service.search_linear(
|
||||
user_query=reformulated_query,
|
||||
user_id=user_id,
|
||||
search_space_id=search_space_id,
|
||||
top_k=TOP_K
|
||||
)
|
||||
|
||||
# Send terminal message about search results
|
||||
yield streaming_service.add_terminal_message(
|
||||
f"Found {len(result_object['sources'])} relevant results from Linear",
|
||||
"success"
|
||||
)
|
||||
|
||||
# Update sources
|
||||
all_sources.append(result_object)
|
||||
yield streaming_service.update_sources(all_sources)
|
||||
|
||||
# Add documents to collection
|
||||
all_raw_documents.extend(linear_chunks)
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -559,3 +559,87 @@ class ConnectorService:
|
|||
}
|
||||
|
||||
return result_object, github_chunks
|
||||
|
||||
async def search_linear(self, user_query: str, user_id: str, search_space_id: int, top_k: int = 20) -> tuple:
|
||||
"""
|
||||
Search for Linear issues and comments and return both the source information and langchain documents
|
||||
|
||||
Args:
|
||||
user_query: The user's query
|
||||
user_id: The user's ID
|
||||
search_space_id: The search space ID to search in
|
||||
top_k: Maximum number of results to return
|
||||
|
||||
Returns:
|
||||
tuple: (sources_info, langchain_documents)
|
||||
"""
|
||||
linear_chunks = await self.retriever.hybrid_search(
|
||||
query_text=user_query,
|
||||
top_k=top_k,
|
||||
user_id=user_id,
|
||||
search_space_id=search_space_id,
|
||||
document_type="LINEAR_CONNECTOR"
|
||||
)
|
||||
|
||||
# Process each chunk and create sources directly without deduplication
|
||||
sources_list = []
|
||||
for i, chunk in enumerate(linear_chunks):
|
||||
# Fix for UI
|
||||
linear_chunks[i]['document']['id'] = self.source_id_counter
|
||||
|
||||
# Extract document metadata
|
||||
document = chunk.get('document', {})
|
||||
metadata = document.get('metadata', {})
|
||||
|
||||
# Extract Linear-specific metadata
|
||||
issue_identifier = metadata.get('issue_identifier', '')
|
||||
issue_title = metadata.get('issue_title', 'Untitled Issue')
|
||||
issue_state = metadata.get('state', '')
|
||||
comment_count = metadata.get('comment_count', 0)
|
||||
|
||||
# Create a more descriptive title for Linear issues
|
||||
title = f"Linear: {issue_identifier} - {issue_title}"
|
||||
if issue_state:
|
||||
title += f" ({issue_state})"
|
||||
|
||||
# Create a more descriptive description for Linear issues
|
||||
description = chunk.get('content', '')[:100]
|
||||
if len(description) == 100:
|
||||
description += "..."
|
||||
|
||||
# Add comment count info to description
|
||||
if comment_count:
|
||||
if description:
|
||||
description += f" | Comments: {comment_count}"
|
||||
else:
|
||||
description = f"Comments: {comment_count}"
|
||||
|
||||
# For URL, we could construct a URL to the Linear issue if we have the workspace info
|
||||
# For now, use a generic placeholder
|
||||
url = ""
|
||||
if issue_identifier:
|
||||
# This is a generic format, may need to be adjusted based on actual Linear workspace
|
||||
url = f"https://linear.app/issue/{issue_identifier}"
|
||||
|
||||
source = {
|
||||
"id": self.source_id_counter,
|
||||
"title": title,
|
||||
"description": description,
|
||||
"url": url,
|
||||
"issue_identifier": issue_identifier,
|
||||
"state": issue_state,
|
||||
"comment_count": comment_count
|
||||
}
|
||||
|
||||
self.source_id_counter += 1
|
||||
sources_list.append(source)
|
||||
|
||||
# Create result object
|
||||
result_object = {
|
||||
"id": 9, # Assign a unique ID for the Linear connector
|
||||
"name": "Linear Issues",
|
||||
"type": "LINEAR_CONNECTOR",
|
||||
"sources": sources_list,
|
||||
}
|
||||
|
||||
return result_object, linear_chunks
|
||||
|
|
|
@ -45,6 +45,7 @@ const getConnectorTypeDisplay = (type: string): string => {
|
|||
"SLACK_CONNECTOR": "Slack",
|
||||
"NOTION_CONNECTOR": "Notion",
|
||||
"GITHUB_CONNECTOR": "GitHub",
|
||||
"LINEAR_CONNECTOR": "Linear",
|
||||
// Add other connector types here as needed
|
||||
};
|
||||
return typeMap[type] || type;
|
||||
|
|
|
@ -0,0 +1,321 @@
|
|||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { useRouter, useParams } from "next/navigation";
|
||||
import { motion } from "framer-motion";
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { useForm } from "react-hook-form";
|
||||
import * as z from "zod";
|
||||
import { toast } from "sonner";
|
||||
import { ArrowLeft, Check, Info, Loader2 } from "lucide-react";
|
||||
|
||||
import { useSearchSourceConnectors } from "@/hooks/useSearchSourceConnectors";
|
||||
import {
|
||||
Form,
|
||||
FormControl,
|
||||
FormDescription,
|
||||
FormField,
|
||||
FormItem,
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from "@/components/ui/form";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import {
|
||||
Card,
|
||||
CardContent,
|
||||
CardDescription,
|
||||
CardFooter,
|
||||
CardHeader,
|
||||
CardTitle,
|
||||
} from "@/components/ui/card";
|
||||
import {
|
||||
Alert,
|
||||
AlertDescription,
|
||||
AlertTitle,
|
||||
} from "@/components/ui/alert";
|
||||
import {
|
||||
Accordion,
|
||||
AccordionContent,
|
||||
AccordionItem,
|
||||
AccordionTrigger,
|
||||
} from "@/components/ui/accordion";
|
||||
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
|
||||
|
||||
// Define the form schema with Zod
|
||||
const linearConnectorFormSchema = z.object({
|
||||
name: z.string().min(3, {
|
||||
message: "Connector name must be at least 3 characters.",
|
||||
}),
|
||||
api_key: z.string().min(10, {
|
||||
message: "Linear API Key is required and must be valid.",
|
||||
}).regex(/^lin_api_/, {
|
||||
message: "Linear API Key should start with 'lin_api_'",
|
||||
}),
|
||||
});
|
||||
|
||||
// Define the type for the form values
|
||||
type LinearConnectorFormValues = z.infer<typeof linearConnectorFormSchema>;
|
||||
|
||||
export default function LinearConnectorPage() {
|
||||
const router = useRouter();
|
||||
const params = useParams();
|
||||
const searchSpaceId = params.search_space_id as string;
|
||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||
const { createConnector } = useSearchSourceConnectors();
|
||||
|
||||
// Initialize the form
|
||||
const form = useForm<LinearConnectorFormValues>({
|
||||
resolver: zodResolver(linearConnectorFormSchema),
|
||||
defaultValues: {
|
||||
name: "Linear Connector",
|
||||
api_key: "",
|
||||
},
|
||||
});
|
||||
|
||||
// Handle form submission
|
||||
const onSubmit = async (values: LinearConnectorFormValues) => {
|
||||
setIsSubmitting(true);
|
||||
try {
|
||||
await createConnector({
|
||||
name: values.name,
|
||||
connector_type: "LINEAR_CONNECTOR",
|
||||
config: {
|
||||
LINEAR_API_KEY: values.api_key,
|
||||
},
|
||||
is_indexable: true,
|
||||
last_indexed_at: null,
|
||||
});
|
||||
|
||||
toast.success("Linear connector created successfully!");
|
||||
|
||||
// Navigate back to connectors page
|
||||
router.push(`/dashboard/${searchSpaceId}/connectors`);
|
||||
} catch (error) {
|
||||
console.error("Error creating connector:", error);
|
||||
toast.error(error instanceof Error ? error.message : "Failed to create connector");
|
||||
} finally {
|
||||
setIsSubmitting(false);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="container mx-auto py-8 max-w-3xl">
|
||||
<Button
|
||||
variant="ghost"
|
||||
className="mb-6"
|
||||
onClick={() => router.push(`/dashboard/${searchSpaceId}/connectors/add`)}
|
||||
>
|
||||
<ArrowLeft className="mr-2 h-4 w-4" />
|
||||
Back to Connectors
|
||||
</Button>
|
||||
|
||||
<motion.div
|
||||
initial={{ opacity: 0, y: 20 }}
|
||||
animate={{ opacity: 1, y: 0 }}
|
||||
transition={{ duration: 0.5 }}
|
||||
>
|
||||
<Tabs defaultValue="connect" className="w-full">
|
||||
<TabsList className="grid w-full grid-cols-2 mb-6">
|
||||
<TabsTrigger value="connect">Connect</TabsTrigger>
|
||||
<TabsTrigger value="documentation">Documentation</TabsTrigger>
|
||||
</TabsList>
|
||||
|
||||
<TabsContent value="connect">
|
||||
<Card className="border-2 border-border">
|
||||
<CardHeader>
|
||||
<CardTitle className="text-2xl font-bold">Connect Linear Workspace</CardTitle>
|
||||
<CardDescription>
|
||||
Integrate with Linear to search and retrieve information from your issues and comments. This connector can index your Linear content for search.
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent>
|
||||
<Alert className="mb-6 bg-muted">
|
||||
<Info className="h-4 w-4" />
|
||||
<AlertTitle>Linear API Key Required</AlertTitle>
|
||||
<AlertDescription>
|
||||
You'll need a Linear API Key to use this connector. You can create a Linear API key from{" "}
|
||||
<a
|
||||
href="https://linear.app/settings/api"
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="font-medium underline underline-offset-4"
|
||||
>
|
||||
Linear API Settings
|
||||
</a>
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
|
||||
<Form {...form}>
|
||||
<form onSubmit={form.handleSubmit(onSubmit)} className="space-y-6">
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="name"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Connector Name</FormLabel>
|
||||
<FormControl>
|
||||
<Input placeholder="My Linear Connector" {...field} />
|
||||
</FormControl>
|
||||
<FormDescription>
|
||||
A friendly name to identify this connector.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="api_key"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel>Linear API Key</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
type="password"
|
||||
placeholder="lin_api_..."
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormDescription>
|
||||
Your Linear API Key will be encrypted and stored securely. It typically starts with "lin_api_".
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<div className="flex justify-end">
|
||||
<Button
|
||||
type="submit"
|
||||
disabled={isSubmitting}
|
||||
className="w-full sm:w-auto"
|
||||
>
|
||||
{isSubmitting ? (
|
||||
<>
|
||||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
Connecting...
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<Check className="mr-2 h-4 w-4" />
|
||||
Connect Linear
|
||||
</>
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
</Form>
|
||||
</CardContent>
|
||||
<CardFooter className="flex flex-col items-start border-t bg-muted/50 px-6 py-4">
|
||||
<h4 className="text-sm font-medium">What you get with Linear integration:</h4>
|
||||
<ul className="mt-2 list-disc pl-5 text-sm text-muted-foreground">
|
||||
<li>Search through all your Linear issues and comments</li>
|
||||
<li>Access issue titles, descriptions, and full discussion threads</li>
|
||||
<li>Connect your team's project management directly to your search space</li>
|
||||
<li>Keep your search results up-to-date with latest Linear content</li>
|
||||
<li>Index your Linear issues for enhanced search capabilities</li>
|
||||
</ul>
|
||||
</CardFooter>
|
||||
</Card>
|
||||
</TabsContent>
|
||||
|
||||
<TabsContent value="documentation">
|
||||
<Card className="border-2 border-border">
|
||||
<CardHeader>
|
||||
<CardTitle className="text-2xl font-bold">Linear Connector Documentation</CardTitle>
|
||||
<CardDescription>
|
||||
Learn how to set up and use the Linear connector to index your project management data.
|
||||
</CardDescription>
|
||||
</CardHeader>
|
||||
<CardContent className="space-y-6">
|
||||
<div>
|
||||
<h3 className="text-xl font-semibold mb-2">How it works</h3>
|
||||
<p className="text-muted-foreground">
|
||||
The Linear connector uses the Linear GraphQL API to fetch all issues and comments that the API key has access to within a workspace.
|
||||
</p>
|
||||
<ul className="mt-2 list-disc pl-5 text-muted-foreground">
|
||||
<li>For follow up indexing runs, the connector retrieves issues and comments that have been updated since the last indexing attempt.</li>
|
||||
<li>Indexing is configured to run periodically, so updates should appear in your search results within minutes.</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<Accordion type="single" collapsible className="w-full">
|
||||
<AccordionItem value="authorization">
|
||||
<AccordionTrigger className="text-lg font-medium">Authorization</AccordionTrigger>
|
||||
<AccordionContent className="space-y-4">
|
||||
<Alert className="bg-muted">
|
||||
<Info className="h-4 w-4" />
|
||||
<AlertTitle>Read-Only Access is Sufficient</AlertTitle>
|
||||
<AlertDescription>
|
||||
You only need a read-only API key for this connector to work. This limits the permissions to just reading your Linear data.
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
|
||||
<div className="space-y-6">
|
||||
<div>
|
||||
<h4 className="font-medium mb-2">Step 1: Create an API key</h4>
|
||||
<ol className="list-decimal pl-5 space-y-3">
|
||||
<li>Log in to your Linear account</li>
|
||||
<li>Navigate to <a href="https://linear.app/settings/api" target="_blank" rel="noopener noreferrer" className="font-medium underline underline-offset-4">https://linear.app/settings/api</a> in your browser.</li>
|
||||
<li>Alternatively, click on your profile picture → Settings → API</li>
|
||||
<li>Click the <strong>+ New API key</strong> button.</li>
|
||||
<li>Enter a description for your key (like "Search Connector").</li>
|
||||
<li>Select "Read-only" as the permission.</li>
|
||||
<li>Click <strong>Create</strong> to generate the API key.</li>
|
||||
<li>Copy the generated API key that starts with 'lin_api_' as it will only be shown once.</li>
|
||||
</ol>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h4 className="font-medium mb-2">Step 2: Grant necessary access</h4>
|
||||
<p className="text-muted-foreground mb-3">
|
||||
The API key will have access to all issues and comments that your user account can see. If you're creating the key as an admin, it will have access to all issues in the workspace.
|
||||
</p>
|
||||
<Alert className="bg-muted">
|
||||
<Info className="h-4 w-4" />
|
||||
<AlertTitle>Data Privacy</AlertTitle>
|
||||
<AlertDescription>
|
||||
Only issues and comments will be indexed. Linear attachments and linked files are not indexed by this connector.
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
</div>
|
||||
</div>
|
||||
</AccordionContent>
|
||||
</AccordionItem>
|
||||
|
||||
<AccordionItem value="indexing">
|
||||
<AccordionTrigger className="text-lg font-medium">Indexing</AccordionTrigger>
|
||||
<AccordionContent className="space-y-4">
|
||||
<ol className="list-decimal pl-5 space-y-3">
|
||||
<li>Navigate to the Connector Dashboard and select the <strong>Linear</strong> Connector.</li>
|
||||
<li>Place the <strong>API Key</strong> in the form field.</li>
|
||||
<li>Click <strong>Connect</strong> to establish the connection.</li>
|
||||
<li>Once connected, your Linear issues will be indexed automatically.</li>
|
||||
</ol>
|
||||
|
||||
<Alert className="bg-muted">
|
||||
<Info className="h-4 w-4" />
|
||||
<AlertTitle>What Gets Indexed</AlertTitle>
|
||||
<AlertDescription>
|
||||
<p className="mb-2">The Linear connector indexes the following data:</p>
|
||||
<ul className="list-disc pl-5">
|
||||
<li>Issue titles and identifiers (e.g., PROJ-123)</li>
|
||||
<li>Issue descriptions</li>
|
||||
<li>Issue comments</li>
|
||||
<li>Issue status and metadata</li>
|
||||
</ul>
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
</AccordionContent>
|
||||
</AccordionItem>
|
||||
</Accordion>
|
||||
</CardContent>
|
||||
</Card>
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
</motion.div>
|
||||
</div>
|
||||
);
|
||||
}
|
|
@ -14,6 +14,8 @@ import {
|
|||
IconChevronRight,
|
||||
IconMail,
|
||||
IconWorldWww,
|
||||
IconTicket,
|
||||
IconLayoutKanban,
|
||||
} from "@tabler/icons-react";
|
||||
import { AnimatePresence, motion } from "framer-motion";
|
||||
import Link from "next/link";
|
||||
|
@ -78,6 +80,26 @@ const connectorCategories: ConnectorCategory[] = [
|
|||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "project-management",
|
||||
title: "Project Management",
|
||||
connectors: [
|
||||
{
|
||||
id: "linear-connector",
|
||||
title: "Linear",
|
||||
description: "Connect to Linear to search issues, comments and project data.",
|
||||
icon: <IconLayoutKanban className="h-6 w-6" />,
|
||||
status: "available",
|
||||
},
|
||||
{
|
||||
id: "jira-connector",
|
||||
title: "Jira",
|
||||
description: "Connect to Jira to search issues, tickets and project data.",
|
||||
icon: <IconTicket className="h-6 w-6" />,
|
||||
status: "coming-soon",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "knowledge-bases",
|
||||
title: "Knowledge Bases",
|
||||
|
@ -161,7 +183,7 @@ const cardVariants = {
|
|||
export default function ConnectorsPage() {
|
||||
const params = useParams();
|
||||
const searchSpaceId = params.search_space_id as string;
|
||||
const [expandedCategories, setExpandedCategories] = useState<string[]>(["search-engines", "knowledge-bases"]);
|
||||
const [expandedCategories, setExpandedCategories] = useState<string[]>(["search-engines", "knowledge-bases", "project-management"]);
|
||||
|
||||
const toggleCategory = (categoryId: string) => {
|
||||
setExpandedCategories(prev =>
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
"use client";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
import { DocumentViewer } from "@/components/document-viewer";
|
||||
import { JsonMetadataViewer } from "@/components/json-metadata-viewer";
|
||||
import {
|
||||
AlertDialog,
|
||||
AlertDialogAction,
|
||||
|
@ -12,7 +13,6 @@ import {
|
|||
AlertDialogTitle,
|
||||
AlertDialogTrigger,
|
||||
} from "@/components/ui/alert-dialog";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Checkbox } from "@/components/ui/checkbox";
|
||||
import {
|
||||
|
@ -43,6 +43,9 @@ import {
|
|||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import { useDocuments } from "@/hooks/use-documents";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { IconBrandGithub, IconBrandNotion, IconBrandSlack, IconBrandYoutube, IconLayoutKanban } from "@tabler/icons-react";
|
||||
import {
|
||||
ColumnDef,
|
||||
ColumnFiltersState,
|
||||
|
@ -59,6 +62,7 @@ import {
|
|||
getSortedRowModel,
|
||||
useReactTable,
|
||||
} from "@tanstack/react-table";
|
||||
import { AnimatePresence, motion } from "framer-motion";
|
||||
import {
|
||||
AlertCircle,
|
||||
ChevronDown,
|
||||
|
@ -70,31 +74,22 @@ import {
|
|||
CircleAlert,
|
||||
CircleX,
|
||||
Columns3,
|
||||
Filter,
|
||||
ListFilter,
|
||||
Plus,
|
||||
FileText,
|
||||
Globe,
|
||||
MessageSquare,
|
||||
FileX,
|
||||
File,
|
||||
Trash,
|
||||
FileX,
|
||||
Filter,
|
||||
Globe,
|
||||
ListFilter,
|
||||
MoreHorizontal,
|
||||
Webhook,
|
||||
Trash,
|
||||
Webhook
|
||||
} from "lucide-react";
|
||||
import { useEffect, useId, useMemo, useRef, useState, useContext } from "react";
|
||||
import { motion, AnimatePresence } from "framer-motion";
|
||||
import { useParams } from "next/navigation";
|
||||
import { useDocuments } from "@/hooks/use-documents";
|
||||
import React from "react";
|
||||
import { toast } from "sonner";
|
||||
import React, { useContext, useEffect, useId, useMemo, useRef, useState } from "react";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
import rehypeRaw from "rehype-raw";
|
||||
import rehypeSanitize from "rehype-sanitize";
|
||||
import remarkGfm from "remark-gfm";
|
||||
import { DocumentViewer } from "@/components/document-viewer";
|
||||
import { JsonMetadataViewer } from "@/components/json-metadata-viewer";
|
||||
import { IconBrandGithub, IconBrandNotion, IconBrandSlack, IconBrandYoutube } from "@tabler/icons-react";
|
||||
import { toast } from "sonner";
|
||||
|
||||
// Define animation variants for reuse
|
||||
const fadeInScale = {
|
||||
|
@ -114,7 +109,7 @@ const fadeInScale = {
|
|||
type Document = {
|
||||
id: number;
|
||||
title: string;
|
||||
document_type: "EXTENSION" | "CRAWLED_URL" | "SLACK_CONNECTOR" | "NOTION_CONNECTOR" | "FILE" | "YOUTUBE_VIDEO";
|
||||
document_type: "EXTENSION" | "CRAWLED_URL" | "SLACK_CONNECTOR" | "NOTION_CONNECTOR" | "FILE" | "YOUTUBE_VIDEO" | "LINEAR_CONNECTOR";
|
||||
document_metadata: any;
|
||||
content: string;
|
||||
created_at: string;
|
||||
|
@ -143,6 +138,7 @@ const documentTypeIcons = {
|
|||
FILE: File,
|
||||
YOUTUBE_VIDEO: IconBrandYoutube,
|
||||
GITHUB_CONNECTOR: IconBrandGithub,
|
||||
LINEAR_CONNECTOR: IconLayoutKanban,
|
||||
} as const;
|
||||
|
||||
const columns: ColumnDef<Document>[] = [
|
||||
|
@ -1029,4 +1025,5 @@ function RowActions({ row }: { row: Row<Document> }) {
|
|||
);
|
||||
}
|
||||
|
||||
export { DocumentsTable }
|
||||
export { DocumentsTable };
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ export function ModernHeroWithGradients() {
|
|||
</h1>
|
||||
</div>
|
||||
<p className="mx-auto max-w-3xl py-6 text-center text-base text-gray-600 dark:text-neutral-300 md:text-lg lg:text-xl">
|
||||
A Customizable AI Research Agent just like NotebookLM or Perplexity, but connected to external sources such as search engines (Tavily), Slack, Notion, YouTube, GitHub and more.
|
||||
A Customizable AI Research Agent just like NotebookLM or Perplexity, but connected to external sources such as search engines (Tavily), Slack, Linear, Notion, YouTube, GitHub and more.
|
||||
</p>
|
||||
<div className="flex flex-col items-center gap-6 py-6 sm:flex-row">
|
||||
<Link
|
||||
|
|
|
@ -11,7 +11,7 @@ import {
|
|||
Link,
|
||||
Webhook,
|
||||
} from 'lucide-react';
|
||||
import { IconBrandNotion, IconBrandSlack, IconBrandYoutube, IconBrandGithub } from "@tabler/icons-react";
|
||||
import { IconBrandNotion, IconBrandSlack, IconBrandYoutube, IconBrandGithub, IconLayoutKanban } from "@tabler/icons-react";
|
||||
import { Button } from '@/components/ui/button';
|
||||
import { Connector, ResearchMode } from './types';
|
||||
|
||||
|
@ -20,6 +20,8 @@ export const getConnectorIcon = (connectorType: string) => {
|
|||
const iconProps = { className: "h-4 w-4" };
|
||||
|
||||
switch(connectorType) {
|
||||
case 'LINEAR_CONNECTOR':
|
||||
return <IconLayoutKanban {...iconProps} />;
|
||||
case 'GITHUB_CONNECTOR':
|
||||
return <IconBrandGithub {...iconProps} />;
|
||||
case 'YOUTUBE_VIDEO':
|
||||
|
|
Loading…
Add table
Reference in a new issue