Fixed formatting and linting post Jira connector PR

This commit is contained in:
Utkarsh-Patel-13 2025-07-25 10:52:34 -07:00
commit 2827522ebc
30 changed files with 5428 additions and 3279 deletions

View file

@ -10,7 +10,7 @@
# SurfSense # SurfSense
While tools like NotebookLM and Perplexity are impressive and highly effective for conducting research on any topic/query, SurfSense elevates this capability by integrating with your personal knowledge base. It is a highly customizable AI research agent, connected to external sources such as search engines (Tavily, LinkUp), Slack, Linear, Notion, YouTube, GitHub, Discord and more to come. While tools like NotebookLM and Perplexity are impressive and highly effective for conducting research on any topic/query, SurfSense elevates this capability by integrating with your personal knowledge base. It is a highly customizable AI research agent, connected to external sources such as search engines (Tavily, LinkUp), Slack, Linear, Jira, Notion, YouTube, GitHub, Discord and more to come.
<div align="center"> <div align="center">
<a href="https://trendshift.io/repositories/13606" target="_blank"><img src="https://trendshift.io/api/badge/repositories/13606" alt="MODSetter%2FSurfSense | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a> <a href="https://trendshift.io/repositories/13606" target="_blank"><img src="https://trendshift.io/api/badge/repositories/13606" alt="MODSetter%2FSurfSense | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
@ -63,6 +63,7 @@ Open source and easy to deploy locally.
- Search Engines (Tavily, LinkUp) - Search Engines (Tavily, LinkUp)
- Slack - Slack
- Linear - Linear
- Jira
- Notion - Notion
- Youtube Videos - Youtube Videos
- GitHub - GitHub

View file

@ -0,0 +1 @@
{"2d0ec64d93969318101ee479b664221b32241665":{"files":{"surfsense_web/app/dashboard/[search_space_id]/documents/(manage)/page.tsx":["EHKKvlOK0vfy0GgHwlG/J2Bx5rw=",true]},"modified":1753426633288}}

View file

@ -20,18 +20,33 @@ depends_on: str | Sequence[str] | None = None
def upgrade() -> None: def upgrade() -> None:
"""Upgrade schema - add LiteLLMProvider enum, LLMConfig table and user LLM preferences.""" """Upgrade schema - add LiteLLMProvider enum, LLMConfig table and user LLM preferences."""
# Check if enum type exists and create if it doesn't # Create enum only if not exists
op.execute(""" op.execute(
"""
DO $$ DO $$
BEGIN BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'litellmprovider') THEN IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'litellmprovider') THEN
CREATE TYPE litellmprovider AS ENUM ('OPENAI', 'ANTHROPIC', 'GROQ', 'COHERE', 'HUGGINGFACE', 'AZURE_OPENAI', 'GOOGLE', 'AWS_BEDROCK', 'OLLAMA', 'MISTRAL', 'TOGETHER_AI', 'REPLICATE', 'PALM', 'VERTEX_AI', 'ANYSCALE', 'PERPLEXITY', 'DEEPINFRA', 'AI21', 'NLPCLOUD', 'ALEPH_ALPHA', 'PETALS', 'CUSTOM'); CREATE TYPE litellmprovider AS ENUM (
'OPENAI', 'ANTHROPIC', 'GROQ', 'COHERE', 'HUGGINGFACE',
'AZURE_OPENAI', 'GOOGLE', 'AWS_BEDROCK', 'OLLAMA', 'MISTRAL',
'TOGETHER_AI', 'REPLICATE', 'PALM', 'VERTEX_AI', 'ANYSCALE',
'PERPLEXITY', 'DEEPINFRA', 'AI21', 'NLPCLOUD', 'ALEPH_ALPHA',
'PETALS', 'CUSTOM'
);
END IF; END IF;
END$$; END$$;
""") """
)
# Create llm_configs table using raw SQL to avoid enum creation conflicts # Create llm_configs table only if it doesn't already exist
op.execute(""" op.execute(
"""
DO $$
BEGIN
IF NOT EXISTS (
SELECT FROM information_schema.tables
WHERE table_name = 'llm_configs'
) THEN
CREATE TABLE llm_configs ( CREATE TABLE llm_configs (
id SERIAL PRIMARY KEY, id SERIAL PRIMARY KEY,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(),
@ -43,41 +58,75 @@ def upgrade() -> None:
api_base VARCHAR(500), api_base VARCHAR(500),
litellm_params JSONB, litellm_params JSONB,
user_id UUID NOT NULL REFERENCES "user"(id) ON DELETE CASCADE user_id UUID NOT NULL REFERENCES "user"(id) ON DELETE CASCADE
);
END IF;
END$$;
"""
) )
""")
# Create indexes # Create indexes if they don't exist
op.create_index(op.f("ix_llm_configs_id"), "llm_configs", ["id"], unique=False) op.execute(
op.create_index( """
op.f("ix_llm_configs_created_at"), "llm_configs", ["created_at"], unique=False DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_indexes
WHERE tablename = 'llm_configs' AND indexname = 'ix_llm_configs_id'
) THEN
CREATE INDEX ix_llm_configs_id ON llm_configs(id);
END IF;
IF NOT EXISTS (
SELECT 1 FROM pg_indexes
WHERE tablename = 'llm_configs' AND indexname = 'ix_llm_configs_created_at'
) THEN
CREATE INDEX ix_llm_configs_created_at ON llm_configs(created_at);
END IF;
IF NOT EXISTS (
SELECT 1 FROM pg_indexes
WHERE tablename = 'llm_configs' AND indexname = 'ix_llm_configs_name'
) THEN
CREATE INDEX ix_llm_configs_name ON llm_configs(name);
END IF;
END$$;
"""
) )
op.create_index(op.f("ix_llm_configs_name"), "llm_configs", ["name"], unique=False)
# Add LLM preference columns to user table # Safely add columns to user table
op.add_column("user", sa.Column("long_context_llm_id", sa.Integer(), nullable=True)) bind = op.get_bind()
op.add_column("user", sa.Column("fast_llm_id", sa.Integer(), nullable=True)) inspector = sa.inspect(bind)
op.add_column("user", sa.Column("strategic_llm_id", sa.Integer(), nullable=True)) existing_columns = [col["name"] for col in inspector.get_columns("user")]
# Create foreign key constraints for LLM preferences with op.batch_alter_table("user") as batch_op:
op.create_foreign_key( if "long_context_llm_id" not in existing_columns:
batch_op.add_column(
sa.Column("long_context_llm_id", sa.Integer(), nullable=True)
)
batch_op.create_foreign_key(
op.f("fk_user_long_context_llm_id_llm_configs"), op.f("fk_user_long_context_llm_id_llm_configs"),
"user",
"llm_configs", "llm_configs",
["long_context_llm_id"], ["long_context_llm_id"],
["id"], ["id"],
ondelete="SET NULL", ondelete="SET NULL",
) )
op.create_foreign_key(
if "fast_llm_id" not in existing_columns:
batch_op.add_column(sa.Column("fast_llm_id", sa.Integer(), nullable=True))
batch_op.create_foreign_key(
op.f("fk_user_fast_llm_id_llm_configs"), op.f("fk_user_fast_llm_id_llm_configs"),
"user",
"llm_configs", "llm_configs",
["fast_llm_id"], ["fast_llm_id"],
["id"], ["id"],
ondelete="SET NULL", ondelete="SET NULL",
) )
op.create_foreign_key(
if "strategic_llm_id" not in existing_columns:
batch_op.add_column(
sa.Column("strategic_llm_id", sa.Integer(), nullable=True)
)
batch_op.create_foreign_key(
op.f("fk_user_strategic_llm_id_llm_configs"), op.f("fk_user_strategic_llm_id_llm_configs"),
"user",
"llm_configs", "llm_configs",
["strategic_llm_id"], ["strategic_llm_id"],
["id"], ["id"],

View file

@ -6,6 +6,8 @@ Revises: 11
from collections.abc import Sequence from collections.abc import Sequence
from sqlalchemy import inspect
from alembic import op from alembic import op
# revision identifiers, used by Alembic. # revision identifiers, used by Alembic.
@ -18,47 +20,73 @@ depends_on: str | Sequence[str] | None = None
def upgrade() -> None: def upgrade() -> None:
"""Upgrade schema - add LogLevel and LogStatus enums and logs table.""" """Upgrade schema - add LogLevel and LogStatus enums and logs table."""
# Create LogLevel enum # Create LogLevel enum if it doesn't exist
op.execute(""" op.execute(
CREATE TYPE loglevel AS ENUM ('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL') """
""") DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'loglevel') THEN
CREATE TYPE loglevel AS ENUM ('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL');
END IF;
END$$;
"""
)
# Create LogStatus enum # Create LogStatus enum if it doesn't exist
op.execute(""" op.execute(
CREATE TYPE logstatus AS ENUM ('IN_PROGRESS', 'SUCCESS', 'FAILED') """
""") DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'logstatus') THEN
CREATE TYPE logstatus AS ENUM ('IN_PROGRESS', 'SUCCESS', 'FAILED');
END IF;
END$$;
"""
)
# Create logs table # Create logs table if it doesn't exist
op.execute(""" op.execute(
CREATE TABLE logs ( """
CREATE TABLE IF NOT EXISTS logs (
id SERIAL PRIMARY KEY, id SERIAL PRIMARY KEY,
created_at TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
level loglevel NOT NULL, level loglevel NOT NULL,
status logstatus NOT NULL, status logstatus NOT NULL,
message TEXT NOT NULL, message TEXT NOT NULL,
source VARCHAR(200), source VARCHAR(200),
log_metadata JSONB DEFAULT '{}', log_metadata JSONB DEFAULT '{}',
search_space_id INTEGER NOT NULL REFERENCES searchspaces(id) ON DELETE CASCADE search_space_id INTEGER NOT NULL REFERENCES searchspaces(id) ON DELETE CASCADE
);
"""
) )
""")
# Create indexes # Get existing indexes
op.create_index(op.f("ix_logs_id"), "logs", ["id"], unique=False) conn = op.get_bind()
op.create_index(op.f("ix_logs_created_at"), "logs", ["created_at"], unique=False) inspector = inspect(conn)
op.create_index(op.f("ix_logs_level"), "logs", ["level"], unique=False) existing_indexes = [idx["name"] for idx in inspector.get_indexes("logs")]
op.create_index(op.f("ix_logs_status"), "logs", ["status"], unique=False)
op.create_index(op.f("ix_logs_source"), "logs", ["source"], unique=False) # Create indexes only if they don't already exist
if "ix_logs_id" not in existing_indexes:
op.create_index("ix_logs_id", "logs", ["id"])
if "ix_logs_created_at" not in existing_indexes:
op.create_index("ix_logs_created_at", "logs", ["created_at"])
if "ix_logs_level" not in existing_indexes:
op.create_index("ix_logs_level", "logs", ["level"])
if "ix_logs_status" not in existing_indexes:
op.create_index("ix_logs_status", "logs", ["status"])
if "ix_logs_source" not in existing_indexes:
op.create_index("ix_logs_source", "logs", ["source"])
def downgrade() -> None: def downgrade() -> None:
"""Downgrade schema - remove logs table and enums.""" """Downgrade schema - remove logs table and enums."""
# Drop indexes # Drop indexes
op.drop_index(op.f("ix_logs_source"), table_name="logs") op.drop_index("ix_logs_source", table_name="logs")
op.drop_index(op.f("ix_logs_status"), table_name="logs") op.drop_index("ix_logs_status", table_name="logs")
op.drop_index(op.f("ix_logs_level"), table_name="logs") op.drop_index("ix_logs_level", table_name="logs")
op.drop_index(op.f("ix_logs_created_at"), table_name="logs") op.drop_index("ix_logs_created_at", table_name="logs")
op.drop_index(op.f("ix_logs_id"), table_name="logs") op.drop_index("ix_logs_id", table_name="logs")
# Drop logs table # Drop logs table
op.drop_table("logs") op.drop_table("logs")

View file

@ -0,0 +1,61 @@
"""Add JIRA_CONNECTOR to enums
Revision ID: 13
Revises: 12
"""
from collections.abc import Sequence
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "13"
down_revision: str | None = "12"
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None
def upgrade() -> None:
"""Safely add 'JIRA_CONNECTOR' to enum types if missing."""
# Add to searchsourceconnectortype enum
op.execute(
"""
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_type t
JOIN pg_enum e ON t.oid = e.enumtypid
WHERE t.typname = 'searchsourceconnectortype' AND e.enumlabel = 'JIRA_CONNECTOR'
) THEN
ALTER TYPE searchsourceconnectortype ADD VALUE 'JIRA_CONNECTOR';
END IF;
END
$$;
"""
)
# Add to documenttype enum
op.execute(
"""
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_type t
JOIN pg_enum e ON t.oid = e.enumtypid
WHERE t.typname = 'documenttype' AND e.enumlabel = 'JIRA_CONNECTOR'
) THEN
ALTER TYPE documenttype ADD VALUE 'JIRA_CONNECTOR';
END IF;
END
$$;
"""
)
def downgrade() -> None:
"""
Downgrade logic not implemented since PostgreSQL
does not support removing enum values.
"""
pass

View file

@ -25,7 +25,23 @@ def upgrade() -> None:
# Manually add the command to add the enum value # Manually add the command to add the enum value
# Note: It's generally better to let autogenerate handle this, but we're bypassing it # Note: It's generally better to let autogenerate handle this, but we're bypassing it
op.execute("ALTER TYPE searchsourceconnectortype ADD VALUE 'GITHUB_CONNECTOR'") op.execute(
"""
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1
FROM pg_enum
WHERE enumlabel = 'GITHUB_CONNECTOR'
AND enumtypid = (
SELECT oid FROM pg_type WHERE typname = 'searchsourceconnectortype'
)
) THEN
ALTER TYPE searchsourceconnectortype ADD VALUE 'GITHUB_CONNECTOR';
END IF;
END$$;
"""
)
# Pass for the rest, as autogenerate didn't run to add other schema details # Pass for the rest, as autogenerate didn't run to add other schema details
pass pass

View file

@ -17,14 +17,25 @@ depends_on: str | Sequence[str] | None = None
def upgrade() -> None: def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ### op.execute(
"""
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_enum
WHERE enumlabel = 'LINEAR_CONNECTOR'
AND enumtypid = (
SELECT oid FROM pg_type WHERE typname = 'searchsourceconnectortype'
)
) THEN
ALTER TYPE searchsourceconnectortype ADD VALUE 'LINEAR_CONNECTOR';
END IF;
END$$;
"""
)
# Manually add the command to add the enum value
op.execute("ALTER TYPE searchsourceconnectortype ADD VALUE 'LINEAR_CONNECTOR'")
# Pass for the rest, as autogenerate didn't run to add other schema details #
pass
# ### end Alembic commands ###
def downgrade() -> None: def downgrade() -> None:

View file

@ -22,7 +22,22 @@ NEW_VALUE = "LINEAR_CONNECTOR"
def upgrade() -> None: def upgrade() -> None:
"""Upgrade schema.""" """Upgrade schema."""
op.execute(f"ALTER TYPE {ENUM_NAME} ADD VALUE '{NEW_VALUE}'") op.execute(
f"""
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_enum
WHERE enumlabel = '{NEW_VALUE}'
AND enumtypid = (
SELECT oid FROM pg_type WHERE typname = '{ENUM_NAME}'
)
) THEN
ALTER TYPE {ENUM_NAME} ADD VALUE '{NEW_VALUE}';
END IF;
END$$;
"""
)
# Warning: This will delete all rows with the new value # Warning: This will delete all rows with the new value

View file

@ -8,6 +8,7 @@ Revises: 5
from collections.abc import Sequence from collections.abc import Sequence
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy import inspect
from sqlalchemy.dialects.postgresql import JSON from sqlalchemy.dialects.postgresql import JSON
from alembic import op from alembic import op
@ -20,20 +21,27 @@ depends_on: str | Sequence[str] | None = None
def upgrade() -> None: def upgrade() -> None:
# Drop the old column and create a new one with the new name and type bind = op.get_bind()
# We need to do this because PostgreSQL doesn't support direct column renames with type changes inspector = inspect(bind)
columns = [col["name"] for col in inspector.get_columns("podcasts")]
if "podcast_transcript" not in columns:
op.add_column( op.add_column(
"podcasts", "podcasts",
sa.Column("podcast_transcript", JSON, nullable=False, server_default="{}"), sa.Column("podcast_transcript", JSON, nullable=False, server_default="{}"),
) )
# Copy data from old column to new column # Copy data from old column to new column
# Convert text to JSON by storing it as a JSON string value
op.execute( op.execute(
"UPDATE podcasts SET podcast_transcript = jsonb_build_object('text', podcast_content) WHERE podcast_content != ''" """
UPDATE podcasts
SET podcast_transcript = jsonb_build_object('text', podcast_content)
WHERE podcast_content != ''
"""
) )
# Drop the old column # Drop the old column only if it exists
if "podcast_content" in columns:
op.drop_column("podcasts", "podcast_content") op.drop_column("podcasts", "podcast_content")

View file

@ -8,6 +8,7 @@ Revises: 6
from collections.abc import Sequence from collections.abc import Sequence
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy import inspect
from alembic import op from alembic import op
@ -19,7 +20,13 @@ depends_on: str | Sequence[str] | None = None
def upgrade() -> None: def upgrade() -> None:
# Drop the is_generated column # Get the current database connection
bind = op.get_bind()
inspector = inspect(bind)
# Check if the column exists before attempting to drop it
columns = [col["name"] for col in inspector.get_columns("podcasts")]
if "is_generated" in columns:
op.drop_column("podcasts", "is_generated") op.drop_column("podcasts", "is_generated")

View file

@ -7,6 +7,7 @@ Revises: 7
from collections.abc import Sequence from collections.abc import Sequence
import sqlalchemy as sa import sqlalchemy as sa
from sqlalchemy import inspect
from alembic import op from alembic import op
@ -18,44 +19,53 @@ depends_on: str | Sequence[str] | None = None
def upgrade() -> None: def upgrade() -> None:
# Add content_hash column as nullable first to handle existing data bind = op.get_bind()
op.add_column("documents", sa.Column("content_hash", sa.String(), nullable=True)) inspector = inspect(bind)
columns = [col["name"] for col in inspector.get_columns("documents")]
# Update existing documents to generate content hashes # Only add the column if it doesn't already exist
# Using SHA-256 hash of the content column with proper UTF-8 encoding if "content_hash" not in columns:
op.execute(""" op.add_column(
"documents", sa.Column("content_hash", sa.String(), nullable=True)
)
# Populate the content_hash column
op.execute(
"""
UPDATE documents UPDATE documents
SET content_hash = encode(sha256(convert_to(content, 'UTF8')), 'hex') SET content_hash = encode(sha256(convert_to(content, 'UTF8')), 'hex')
WHERE content_hash IS NULL WHERE content_hash IS NULL
""") """
)
# Handle duplicate content hashes by keeping only the oldest document for each hash op.execute(
# Delete newer documents with duplicate content hashes """
op.execute("""
DELETE FROM documents DELETE FROM documents
WHERE id NOT IN ( WHERE id NOT IN (
SELECT MIN(id) SELECT MIN(id)
FROM documents FROM documents
GROUP BY content_hash GROUP BY content_hash
) )
""") """
)
# Now alter the column to match the model: nullable=False, index=True, unique=True
op.alter_column( op.alter_column(
"documents", "content_hash", existing_type=sa.String(), nullable=False "documents", "content_hash", existing_type=sa.String(), nullable=False
) )
op.create_index( op.create_index(
op.f("ix_documents_content_hash"), "documents", ["content_hash"], unique=False op.f("ix_documents_content_hash"),
"documents",
["content_hash"],
unique=False,
) )
op.create_unique_constraint( op.create_unique_constraint(
op.f("uq_documents_content_hash"), "documents", ["content_hash"] op.f("uq_documents_content_hash"), "documents", ["content_hash"]
) )
else:
print("Column 'content_hash' already exists. Skipping column creation.")
def downgrade() -> None: def downgrade() -> None:
# Remove constraints and index first
op.drop_constraint(op.f("uq_documents_content_hash"), "documents", type_="unique") op.drop_constraint(op.f("uq_documents_content_hash"), "documents", type_="unique")
op.drop_index(op.f("ix_documents_content_hash"), table_name="documents") op.drop_index(op.f("ix_documents_content_hash"), table_name="documents")
# Remove content_hash column from documents table
op.drop_column("documents", "content_hash") op.drop_column("documents", "content_hash")

View file

@ -22,11 +22,38 @@ DOCUMENT_NEW_VALUE = "DISCORD_CONNECTOR"
def upgrade() -> None: def upgrade() -> None:
"""Upgrade schema - add DISCORD_CONNECTOR to connector and document enum.""" """Upgrade schema - add DISCORD_CONNECTOR to connector and document enum safely."""
# Add DISCORD_CONNECTOR to searchsourceconnectortype # Add DISCORD_CONNECTOR to searchsourceconnectortype only if not exists
op.execute(f"ALTER TYPE {CONNECTOR_ENUM} ADD VALUE '{CONNECTOR_NEW_VALUE}'") op.execute(
# Add DISCORD_CONNECTOR to documenttype f"""
op.execute(f"ALTER TYPE {DOCUMENT_ENUM} ADD VALUE '{DOCUMENT_NEW_VALUE}'") DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_enum
WHERE enumlabel = '{CONNECTOR_NEW_VALUE}'
AND enumtypid = (SELECT oid FROM pg_type WHERE typname = '{CONNECTOR_ENUM}')
) THEN
ALTER TYPE {CONNECTOR_ENUM} ADD VALUE '{CONNECTOR_NEW_VALUE}';
END IF;
END$$;
"""
)
# Add DISCORD_CONNECTOR to documenttype only if not exists
op.execute(
f"""
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_enum
WHERE enumlabel = '{DOCUMENT_NEW_VALUE}'
AND enumtypid = (SELECT oid FROM pg_type WHERE typname = '{DOCUMENT_ENUM}')
) THEN
ALTER TYPE {DOCUMENT_ENUM} ADD VALUE '{DOCUMENT_NEW_VALUE}';
END IF;
END$$;
"""
)
def downgrade() -> None: def downgrade() -> None:

View file

@ -1,10 +1,3 @@
"""Add GITHUB_CONNECTOR to DocumentType enum
Revision ID: e55302644c51
Revises: 1
"""
from collections.abc import Sequence from collections.abc import Sequence
from alembic import op from alembic import op
@ -16,23 +9,34 @@ branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None depends_on: str | Sequence[str] | None = None
# Define the ENUM type name and the new value # Define the ENUM type name and the new value
ENUM_NAME = "documenttype" # Make sure this matches the name in your DB (usually lowercase class name) ENUM_NAME = "documenttype"
NEW_VALUE = "GITHUB_CONNECTOR" NEW_VALUE = "GITHUB_CONNECTOR"
def upgrade() -> None: def upgrade() -> None:
"""Upgrade schema.""" """Upgrade schema."""
op.execute(f"ALTER TYPE {ENUM_NAME} ADD VALUE '{NEW_VALUE}'") op.execute(
f"""
DO $$
BEGIN
IF NOT EXISTS (
SELECT 1 FROM pg_enum
WHERE enumlabel = '{NEW_VALUE}'
AND enumtypid = (
SELECT oid FROM pg_type WHERE typname = '{ENUM_NAME}'
)
) THEN
ALTER TYPE {ENUM_NAME} ADD VALUE '{NEW_VALUE}';
END IF;
END$$;
"""
)
# Warning: This will delete all rows with the new value
def downgrade() -> None: def downgrade() -> None:
"""Downgrade schema - remove GITHUB_CONNECTOR from enum.""" """Downgrade schema - remove GITHUB_CONNECTOR from enum."""
# The old type name
old_enum_name = f"{ENUM_NAME}_old" old_enum_name = f"{ENUM_NAME}_old"
# Enum values *before* GITHUB_CONNECTOR was added
old_values = ( old_values = (
"EXTENSION", "EXTENSION",
"CRAWLED_URL", "CRAWLED_URL",
@ -43,25 +47,21 @@ def downgrade() -> None:
) )
old_values_sql = ", ".join([f"'{v}'" for v in old_values]) old_values_sql = ", ".join([f"'{v}'" for v in old_values])
# Table and column names (adjust if different)
table_name = "documents" table_name = "documents"
column_name = "document_type" column_name = "document_type"
# 1. Rename the current enum type # 1. Create the new enum type with the old values
op.execute(f"ALTER TYPE {ENUM_NAME} RENAME TO {old_enum_name}") op.execute(f"CREATE TYPE {old_enum_name} AS ENUM({old_values_sql})")
# 2. Create the new enum type with the old values # 2. Delete rows using the new value
op.execute(f"CREATE TYPE {ENUM_NAME} AS ENUM({old_values_sql})")
# 3. Update the table:
op.execute(f"DELETE FROM {table_name} WHERE {column_name}::text = '{NEW_VALUE}'") op.execute(f"DELETE FROM {table_name} WHERE {column_name}::text = '{NEW_VALUE}'")
# 4. Alter the column to use the new enum type (casting old values) # 3. Alter the column to use the old enum type
op.execute( op.execute(
f"ALTER TABLE {table_name} ALTER COLUMN {column_name} " f"ALTER TABLE {table_name} ALTER COLUMN {column_name} "
f"TYPE {ENUM_NAME} USING {column_name}::text::{ENUM_NAME}" f"TYPE {old_enum_name} USING {column_name}::text::{old_enum_name}"
) )
# 5. Drop the old enum type # 4. Drop the current enum type and rename the old one
op.execute(f"DROP TYPE {old_enum_name}") op.execute(f"DROP TYPE {ENUM_NAME}")
# ### end Alembic commands ### op.execute(f"ALTER TYPE {old_enum_name} RENAME TO {ENUM_NAME}")

View file

@ -84,9 +84,9 @@ async def fetch_documents_by_ids(
"document": { "document": {
"id": doc.id, "id": doc.id,
"title": doc.title, "title": doc.title,
"document_type": doc.document_type.value "document_type": (
if doc.document_type doc.document_type.value if doc.document_type else "UNKNOWN"
else "UNKNOWN", ),
"metadata": doc.document_metadata or {}, "metadata": doc.document_metadata or {},
}, },
"source": doc.document_type.value if doc.document_type else "UNKNOWN", "source": doc.document_type.value if doc.document_type else "UNKNOWN",
@ -186,9 +186,11 @@ async def fetch_documents_by_ids(
title = f"GitHub: {doc.title}" title = f"GitHub: {doc.title}"
description = metadata.get( description = metadata.get(
"description", "description",
(
doc.content[:100] + "..." doc.content[:100] + "..."
if len(doc.content) > 100 if len(doc.content) > 100
else doc.content, else doc.content
),
) )
url = metadata.get("url", "") url = metadata.get("url", "")
@ -204,9 +206,11 @@ async def fetch_documents_by_ids(
description = metadata.get( description = metadata.get(
"description", "description",
(
doc.content[:100] + "..." doc.content[:100] + "..."
if len(doc.content) > 100 if len(doc.content) > 100
else doc.content, else doc.content
),
) )
url = ( url = (
f"https://www.youtube.com/watch?v={video_id}" f"https://www.youtube.com/watch?v={video_id}"
@ -238,6 +242,35 @@ async def fetch_documents_by_ids(
else: else:
url = "" url = ""
elif doc_type == "JIRA_CONNECTOR":
# Extract Jira-specific metadata
issue_key = metadata.get("issue_key", "Unknown Issue")
issue_title = metadata.get("issue_title", "Untitled Issue")
status = metadata.get("status", "")
priority = metadata.get("priority", "")
issue_type = metadata.get("issue_type", "")
title = f"Jira: {issue_key} - {issue_title}"
if status:
title += f" ({status})"
description = (
doc.content[:100] + "..."
if len(doc.content) > 100
else doc.content
)
if priority:
description += f" | Priority: {priority}"
if issue_type:
description += f" | Type: {issue_type}"
# Construct Jira URL if we have the base URL
base_url = metadata.get("base_url", "")
if base_url and issue_key:
url = f"{base_url}/browse/{issue_key}"
else:
url = ""
elif doc_type == "EXTENSION": elif doc_type == "EXTENSION":
# Extract Extension-specific metadata # Extract Extension-specific metadata
webpage_title = metadata.get("VisitedWebPageTitle", doc.title) webpage_title = metadata.get("VisitedWebPageTitle", doc.title)
@ -268,9 +301,11 @@ async def fetch_documents_by_ids(
"og:description", "og:description",
metadata.get( metadata.get(
"ogDescription", "ogDescription",
(
doc.content[:100] + "..." doc.content[:100] + "..."
if len(doc.content) > 100 if len(doc.content) > 100
else doc.content, else doc.content
),
), ),
) )
url = metadata.get("url", "") url = metadata.get("url", "")
@ -301,6 +336,7 @@ async def fetch_documents_by_ids(
"GITHUB_CONNECTOR": "GitHub (Selected)", "GITHUB_CONNECTOR": "GitHub (Selected)",
"YOUTUBE_VIDEO": "YouTube Videos (Selected)", "YOUTUBE_VIDEO": "YouTube Videos (Selected)",
"DISCORD_CONNECTOR": "Discord (Selected)", "DISCORD_CONNECTOR": "Discord (Selected)",
"JIRA_CONNECTOR": "Jira Issues (Selected)",
"EXTENSION": "Browser Extension (Selected)", "EXTENSION": "Browser Extension (Selected)",
"CRAWLED_URL": "Web Pages (Selected)", "CRAWLED_URL": "Web Pages (Selected)",
"FILE": "Files (Selected)", "FILE": "Files (Selected)",
@ -802,7 +838,9 @@ async def fetch_relevant_documents(
source_object, source_object,
linkup_chunks, linkup_chunks,
) = await connector_service.search_linkup( ) = await connector_service.search_linkup(
user_query=reformulated_query, user_id=user_id, mode=linkup_mode user_query=reformulated_query,
user_id=user_id,
mode=linkup_mode,
) )
# Add to sources and raw documents # Add to sources and raw documents
@ -845,6 +883,30 @@ async def fetch_relevant_documents(
} }
) )
elif connector == "JIRA_CONNECTOR":
source_object, jira_chunks = await connector_service.search_jira(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode,
)
# Add to sources and raw documents
if source_object:
all_sources.append(source_object)
all_raw_documents.extend(jira_chunks)
# Stream found document count
if streaming_service and writer:
writer(
{
"yield_value": streaming_service.format_terminal_info_delta(
f"🎫 Found {len(jira_chunks)} Jira issues related to your query"
)
}
)
except Exception as e: except Exception as e:
error_message = f"Error searching connector {connector}: {e!s}" error_message = f"Error searching connector {connector}: {e!s}"
print(error_message) print(error_message)
@ -1214,7 +1276,7 @@ async def process_sections(
# Combine the results into a final report with section titles # Combine the results into a final report with section titles
final_report = [] final_report = []
for _, (section, content) in enumerate( for _i, (section, content) in enumerate(
zip(answer_outline.answer_outline, processed_results, strict=False) zip(answer_outline.answer_outline, processed_results, strict=False)
): ):
# Skip adding the section header since the content already contains the title # Skip adding the section header since the content already contains the title

View file

@ -15,7 +15,8 @@ You are SurfSense, an advanced AI research assistant that provides detailed, wel
- YOUTUBE_VIDEO: "YouTube video transcripts and metadata" (personally saved videos) - YOUTUBE_VIDEO: "YouTube video transcripts and metadata" (personally saved videos)
- GITHUB_CONNECTOR: "GitHub repository content and issues" (personal repositories and interactions) - GITHUB_CONNECTOR: "GitHub repository content and issues" (personal repositories and interactions)
- LINEAR_CONNECTOR: "Linear project issues and discussions" (personal project management) - LINEAR_CONNECTOR: "Linear project issues and discussions" (personal project management)
- DISCORD_CONNECTOR: "Discord server messages and channels" (personal community interactions) - JIRA_CONNECTOR: "Jira project issues, tickets, and comments" (personal project tracking)
- DISCORD_CONNECTOR: "Discord server conversations and shared content" (personal community communications)
- TAVILY_API: "Tavily search API results" (personalized search results) - TAVILY_API: "Tavily search API results" (personalized search results)
- LINKUP_API: "Linkup search API results" (personalized search results) - LINKUP_API: "Linkup search API results" (personalized search results)
</knowledge_sources> </knowledge_sources>

View file

@ -43,6 +43,8 @@ def get_connector_emoji(connector_name: str) -> str:
"NOTION_CONNECTOR": "📘", "NOTION_CONNECTOR": "📘",
"GITHUB_CONNECTOR": "🐙", "GITHUB_CONNECTOR": "🐙",
"LINEAR_CONNECTOR": "📊", "LINEAR_CONNECTOR": "📊",
"JIRA_CONNECTOR": "🎫",
"DISCORD_CONNECTOR": "🗨️",
"TAVILY_API": "🔍", "TAVILY_API": "🔍",
"LINKUP_API": "🔗", "LINKUP_API": "🔗",
} }
@ -60,6 +62,8 @@ def get_connector_friendly_name(connector_name: str) -> str:
"NOTION_CONNECTOR": "Notion", "NOTION_CONNECTOR": "Notion",
"GITHUB_CONNECTOR": "GitHub", "GITHUB_CONNECTOR": "GitHub",
"LINEAR_CONNECTOR": "Linear", "LINEAR_CONNECTOR": "Linear",
"JIRA_CONNECTOR": "Jira",
"DISCORD_CONNECTOR": "Discord",
"TAVILY_API": "Tavily Search", "TAVILY_API": "Tavily Search",
"LINKUP_API": "Linkup Search", "LINKUP_API": "Linkup Search",
} }

View file

@ -0,0 +1,487 @@
"""
Jira Connector Module
A module for retrieving data from Jira.
Allows fetching issue lists and their comments, projects and more.
"""
import base64
from datetime import datetime
from typing import Any
import requests
class JiraConnector:
"""Class for retrieving data from Jira."""
def __init__(
self,
base_url: str | None = None,
email: str | None = None,
api_token: str | None = None,
):
"""
Initialize the JiraConnector class.
Args:
base_url: Jira instance base URL (e.g., 'https://yourcompany.atlassian.net') (optional)
email: Jira account email address (optional)
api_token: Jira API token (optional)
"""
self.base_url = base_url.rstrip("/") if base_url else None
self.email = email
self.api_token = api_token
self.api_version = "3" # Jira Cloud API version
def set_credentials(self, base_url: str, email: str, api_token: str) -> None:
"""
Set the Jira credentials.
Args:
base_url: Jira instance base URL
email: Jira account email address
api_token: Jira API token
"""
self.base_url = base_url.rstrip("/")
self.email = email
self.api_token = api_token
def set_email(self, email: str) -> None:
"""
Set the Jira account email.
Args:
email: Jira account email address
"""
self.email = email
def set_api_token(self, api_token: str) -> None:
"""
Set the Jira API token.
Args:
api_token: Jira API token
"""
self.api_token = api_token
def get_headers(self) -> dict[str, str]:
"""
Get headers for Jira API requests using Basic Authentication.
Returns:
Dictionary of headers
Raises:
ValueError: If email, api_token, or base_url have not been set
"""
if not all([self.base_url, self.email, self.api_token]):
raise ValueError(
"Jira credentials not initialized. Call set_credentials() first."
)
# Create Basic Auth header using email:api_token
auth_str = f"{self.email}:{self.api_token}"
auth_bytes = auth_str.encode("utf-8")
auth_header = "Basic " + base64.b64encode(auth_bytes).decode("ascii")
return {
"Content-Type": "application/json",
"Authorization": auth_header,
"Accept": "application/json",
}
def make_api_request(
self, endpoint: str, params: dict[str, Any] | None = None
) -> dict[str, Any]:
"""
Make a request to the Jira API.
Args:
endpoint: API endpoint (without base URL)
params: Query parameters for the request (optional)
Returns:
Response data from the API
Raises:
ValueError: If email, api_token, or base_url have not been set
Exception: If the API request fails
"""
if not all([self.base_url, self.email, self.api_token]):
raise ValueError(
"Jira credentials not initialized. Call set_credentials() first."
)
url = f"{self.base_url}/rest/api/{self.api_version}/{endpoint}"
headers = self.get_headers()
response = requests.get(url, headers=headers, params=params, timeout=500)
if response.status_code == 200:
return response.json()
else:
raise Exception(
f"API request failed with status code {response.status_code}: {response.text}"
)
def get_all_projects(self) -> dict[str, Any]:
"""
Fetch all projects from Jira.
Returns:
List of project objects
Raises:
ValueError: If credentials have not been set
Exception: If the API request fails
"""
return self.make_api_request("project/search")
def get_all_issues(self, project_key: str | None = None) -> list[dict[str, Any]]:
"""
Fetch all issues from Jira.
Args:
project_key: Optional project key to filter issues (e.g., 'PROJ')
Returns:
List of issue objects
Raises:
ValueError: If credentials have not been set
Exception: If the API request fails
"""
jql = "ORDER BY created DESC"
if project_key:
jql = f'project = "{project_key}" ' + jql
fields = [
"summary",
"description",
"status",
"assignee",
"reporter",
"created",
"updated",
"priority",
"issuetype",
"project",
]
params = {
"jql": jql,
"fields": ",".join(fields),
"maxResults": 100,
"startAt": 0,
}
all_issues = []
start_at = 0
while True:
params["startAt"] = start_at
result = self.make_api_request("search", params)
if not isinstance(result, dict) or "issues" not in result:
raise Exception("Invalid response from Jira API")
issues = result["issues"]
all_issues.extend(issues)
print(f"Fetched {len(issues)} issues (startAt={start_at})")
total = result.get("total", 0)
if start_at + len(issues) >= total:
break
start_at += len(issues)
return all_issues
def get_issues_by_date_range(
self,
start_date: str,
end_date: str,
include_comments: bool = True,
project_key: str | None = None,
) -> tuple[list[dict[str, Any]], str | None]:
"""
Fetch issues within a date range.
Args:
start_date: Start date in YYYY-MM-DD format
end_date: End date in YYYY-MM-DD format (inclusive)
include_comments: Whether to include comments in the response
project_key: Optional project key to filter issues
Returns:
Tuple containing (issues list, error message or None)
"""
try:
# Build JQL query for date range
# Query issues that were either created OR updated within the date range
date_filter = (
f"(createdDate >= '{start_date}' AND createdDate <= '{end_date}')"
)
# TODO : This JQL needs some improvement to work as expected
_jql = f"{date_filter}"
if project_key:
_jql = (
f'project = "{project_key}" AND {date_filter} ORDER BY created DESC'
)
# Define fields to retrieve
fields = [
"summary",
"description",
"status",
"assignee",
"reporter",
"created",
"updated",
"priority",
"issuetype",
"project",
]
if include_comments:
fields.append("comment")
params = {
# "jql": "", TODO : Add a JQL query to filter from a date range
"fields": ",".join(fields),
"maxResults": 100,
"startAt": 0,
}
all_issues = []
start_at = 0
while True:
params["startAt"] = start_at
result = self.make_api_request("search", params)
if not isinstance(result, dict) or "issues" not in result:
return [], "Invalid response from Jira API"
issues = result["issues"]
all_issues.extend(issues)
# Check if there are more issues to fetch
total = result.get("total", 0)
if start_at + len(issues) >= total:
break
start_at += len(issues)
if not all_issues:
return [], "No issues found in the specified date range."
return all_issues, None
except Exception as e:
return [], f"Error fetching issues: {e!s}"
def format_issue(self, issue: dict[str, Any]) -> dict[str, Any]:
"""
Format an issue for easier consumption.
Args:
issue: The issue object from Jira API
Returns:
Formatted issue dictionary
"""
fields = issue.get("fields", {})
# Extract basic issue details
formatted = {
"id": issue.get("id", ""),
"key": issue.get("key", ""),
"title": fields.get("summary", ""),
"description": fields.get("description", ""),
"status": (
fields.get("status", {}).get("name", "Unknown")
if fields.get("status")
else "Unknown"
),
"status_category": (
fields.get("status", {})
.get("statusCategory", {})
.get("name", "Unknown")
if fields.get("status")
else "Unknown"
),
"priority": (
fields.get("priority", {}).get("name", "Unknown")
if fields.get("priority")
else "Unknown"
),
"issue_type": (
fields.get("issuetype", {}).get("name", "Unknown")
if fields.get("issuetype")
else "Unknown"
),
"project": (
fields.get("project", {}).get("key", "Unknown")
if fields.get("project")
else "Unknown"
),
"created_at": fields.get("created", ""),
"updated_at": fields.get("updated", ""),
"reporter": (
{
"account_id": (
fields.get("reporter", {}).get("accountId", "")
if fields.get("reporter")
else ""
),
"display_name": (
fields.get("reporter", {}).get("displayName", "Unknown")
if fields.get("reporter")
else "Unknown"
),
"email": (
fields.get("reporter", {}).get("emailAddress", "")
if fields.get("reporter")
else ""
),
}
if fields.get("reporter")
else {"account_id": "", "display_name": "Unknown", "email": ""}
),
"assignee": (
{
"account_id": fields.get("assignee", {}).get("accountId", ""),
"display_name": fields.get("assignee", {}).get(
"displayName", "Unknown"
),
"email": fields.get("assignee", {}).get("emailAddress", ""),
}
if fields.get("assignee")
else None
),
"comments": [],
}
# Extract comments if available
if "comment" in fields and "comments" in fields["comment"]:
for comment in fields["comment"]["comments"]:
formatted_comment = {
"id": comment.get("id", ""),
"body": comment.get("body", ""),
"created_at": comment.get("created", ""),
"updated_at": comment.get("updated", ""),
"author": (
{
"account_id": (
comment.get("author", {}).get("accountId", "")
if comment.get("author")
else ""
),
"display_name": (
comment.get("author", {}).get("displayName", "Unknown")
if comment.get("author")
else "Unknown"
),
"email": (
comment.get("author", {}).get("emailAddress", "")
if comment.get("author")
else ""
),
}
if comment.get("author")
else {"account_id": "", "display_name": "Unknown", "email": ""}
),
}
formatted["comments"].append(formatted_comment)
return formatted
def format_issue_to_markdown(self, issue: dict[str, Any]) -> str:
"""
Convert an issue to markdown format.
Args:
issue: The issue object (either raw or formatted)
Returns:
Markdown string representation of the issue
"""
# Format the issue if it's not already formatted
if "key" not in issue:
issue = self.format_issue(issue)
# Build the markdown content
markdown = (
f"# {issue.get('key', 'No Key')}: {issue.get('title', 'No Title')}\n\n"
)
if issue.get("status"):
markdown += f"**Status:** {issue['status']}\n"
if issue.get("priority"):
markdown += f"**Priority:** {issue['priority']}\n"
if issue.get("issue_type"):
markdown += f"**Type:** {issue['issue_type']}\n"
if issue.get("project"):
markdown += f"**Project:** {issue['project']}\n\n"
if issue.get("assignee") and issue["assignee"].get("display_name"):
markdown += f"**Assignee:** {issue['assignee']['display_name']}\n"
if issue.get("reporter") and issue["reporter"].get("display_name"):
markdown += f"**Reporter:** {issue['reporter']['display_name']}\n"
if issue.get("created_at"):
created_date = self.format_date(issue["created_at"])
markdown += f"**Created:** {created_date}\n"
if issue.get("updated_at"):
updated_date = self.format_date(issue["updated_at"])
markdown += f"**Updated:** {updated_date}\n\n"
if issue.get("description"):
markdown += f"## Description\n\n{issue['description']}\n\n"
if issue.get("comments"):
markdown += f"## Comments ({len(issue['comments'])})\n\n"
for comment in issue["comments"]:
author_name = "Unknown"
if comment.get("author") and comment["author"].get("display_name"):
author_name = comment["author"]["display_name"]
comment_date = "Unknown date"
if comment.get("created_at"):
comment_date = self.format_date(comment["created_at"])
markdown += f"### {author_name} ({comment_date})\n\n{comment.get('body', '')}\n\n---\n\n"
return markdown
@staticmethod
def format_date(iso_date: str) -> str:
"""
Format an ISO date string to a more readable format.
Args:
iso_date: ISO format date string
Returns:
Formatted date string
"""
if not iso_date or not isinstance(iso_date, str):
return "Unknown date"
try:
# Jira dates are typically in format: 2023-01-01T12:00:00.000+0000
dt = datetime.fromisoformat(iso_date.replace("Z", "+00:00"))
return dt.strftime("%Y-%m-%d %H:%M:%S")
except ValueError:
return iso_date

View file

@ -3,6 +3,7 @@ from datetime import UTC, datetime
from enum import Enum from enum import Enum
from fastapi import Depends from fastapi import Depends
from fastapi_users.db import SQLAlchemyBaseUserTableUUID, SQLAlchemyUserDatabase
from pgvector.sqlalchemy import Vector from pgvector.sqlalchemy import Vector
from sqlalchemy import ( from sqlalchemy import (
ARRAY, ARRAY,
@ -26,13 +27,7 @@ from app.retriver.chunks_hybrid_search import ChucksHybridSearchRetriever
from app.retriver.documents_hybrid_search import DocumentHybridSearchRetriever from app.retriver.documents_hybrid_search import DocumentHybridSearchRetriever
if config.AUTH_TYPE == "GOOGLE": if config.AUTH_TYPE == "GOOGLE":
from fastapi_users.db import ( from fastapi_users.db import SQLAlchemyBaseOAuthAccountTableUUID
SQLAlchemyBaseOAuthAccountTableUUID,
SQLAlchemyBaseUserTableUUID,
SQLAlchemyUserDatabase,
)
else:
from fastapi_users.db import SQLAlchemyBaseUserTableUUID, SQLAlchemyUserDatabase
DATABASE_URL = config.DATABASE_URL DATABASE_URL = config.DATABASE_URL
@ -47,6 +42,7 @@ class DocumentType(str, Enum):
GITHUB_CONNECTOR = "GITHUB_CONNECTOR" GITHUB_CONNECTOR = "GITHUB_CONNECTOR"
LINEAR_CONNECTOR = "LINEAR_CONNECTOR" LINEAR_CONNECTOR = "LINEAR_CONNECTOR"
DISCORD_CONNECTOR = "DISCORD_CONNECTOR" DISCORD_CONNECTOR = "DISCORD_CONNECTOR"
JIRA_CONNECTOR = "JIRA_CONNECTOR"
class SearchSourceConnectorType(str, Enum): class SearchSourceConnectorType(str, Enum):
@ -58,6 +54,7 @@ class SearchSourceConnectorType(str, Enum):
GITHUB_CONNECTOR = "GITHUB_CONNECTOR" GITHUB_CONNECTOR = "GITHUB_CONNECTOR"
LINEAR_CONNECTOR = "LINEAR_CONNECTOR" LINEAR_CONNECTOR = "LINEAR_CONNECTOR"
DISCORD_CONNECTOR = "DISCORD_CONNECTOR" DISCORD_CONNECTOR = "DISCORD_CONNECTOR"
JIRA_CONNECTOR = "JIRA_CONNECTOR"
class ChatType(str, Enum): class ChatType(str, Enum):
@ -320,6 +317,7 @@ if config.AUTH_TYPE == "GOOGLE":
strategic_llm = relationship( strategic_llm = relationship(
"LLMConfig", foreign_keys=[strategic_llm_id], post_update=True "LLMConfig", foreign_keys=[strategic_llm_id], post_update=True
) )
else: else:
class User(SQLAlchemyBaseUserTableUUID, Base): class User(SQLAlchemyBaseUserTableUUID, Base):
@ -402,6 +400,7 @@ if config.AUTH_TYPE == "GOOGLE":
async def get_user_db(session: AsyncSession = Depends(get_async_session)): async def get_user_db(session: AsyncSession = Depends(get_async_session)):
yield SQLAlchemyUserDatabase(session, User, OAuthAccount) yield SQLAlchemyUserDatabase(session, User, OAuthAccount)
else: else:
async def get_user_db(session: AsyncSession = Depends(get_async_session)): async def get_user_db(session: AsyncSession = Depends(get_async_session)):

View file

@ -38,6 +38,7 @@ from app.schemas import (
from app.tasks.connectors_indexing_tasks import ( from app.tasks.connectors_indexing_tasks import (
index_discord_messages, index_discord_messages,
index_github_repos, index_github_repos,
index_jira_issues,
index_linear_issues, index_linear_issues,
index_notion_pages, index_notion_pages,
index_slack_messages, index_slack_messages,
@ -336,6 +337,7 @@ async def index_connector_content(
- NOTION_CONNECTOR: Indexes pages from all accessible Notion pages - NOTION_CONNECTOR: Indexes pages from all accessible Notion pages
- GITHUB_CONNECTOR: Indexes code and documentation from GitHub repositories - GITHUB_CONNECTOR: Indexes code and documentation from GitHub repositories
- LINEAR_CONNECTOR: Indexes issues and comments from Linear - LINEAR_CONNECTOR: Indexes issues and comments from Linear
- JIRA_CONNECTOR: Indexes issues and comments from Jira
- DISCORD_CONNECTOR: Indexes messages from all accessible Discord channels - DISCORD_CONNECTOR: Indexes messages from all accessible Discord channels
Args: Args:
@ -353,7 +355,9 @@ async def index_connector_content(
) )
# Check if the search space belongs to the user # Check if the search space belongs to the user
await check_ownership(session, SearchSpace, search_space_id, user) _search_space = await check_ownership(
session, SearchSpace, search_space_id, user
)
# Handle different connector types # Handle different connector types
response_message = "" response_message = ""
@ -438,6 +442,21 @@ async def index_connector_content(
) )
response_message = "Linear indexing started in the background." response_message = "Linear indexing started in the background."
elif connector.connector_type == SearchSourceConnectorType.JIRA_CONNECTOR:
# Run indexing in background
logger.info(
f"Triggering Jira indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
)
background_tasks.add_task(
run_jira_indexing_with_new_session,
connector_id,
search_space_id,
str(user.id),
indexing_from,
indexing_to,
)
response_message = "Jira indexing started in the background."
elif connector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR: elif connector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR:
# Run indexing in background # Run indexing in background
logger.info( logger.info(
@ -807,3 +826,61 @@ async def run_discord_indexing(
) )
except Exception as e: except Exception as e:
logger.error(f"Error in background Discord indexing task: {e!s}") logger.error(f"Error in background Discord indexing task: {e!s}")
# Add new helper functions for Jira indexing
async def run_jira_indexing_with_new_session(
connector_id: int,
search_space_id: int,
user_id: str,
start_date: str,
end_date: str,
):
"""Wrapper to run Jira indexing with its own database session."""
logger.info(
f"Background task started: Indexing Jira connector {connector_id} into space {search_space_id} from {start_date} to {end_date}"
)
async with async_session_maker() as session:
await run_jira_indexing(
session, connector_id, search_space_id, user_id, start_date, end_date
)
logger.info(f"Background task finished: Indexing Jira connector {connector_id}")
async def run_jira_indexing(
session: AsyncSession,
connector_id: int,
search_space_id: int,
user_id: str,
start_date: str,
end_date: str,
):
"""Runs the Jira indexing task and updates the timestamp."""
try:
indexed_count, error_message = await index_jira_issues(
session,
connector_id,
search_space_id,
user_id,
start_date,
end_date,
update_last_indexed=False,
)
if error_message:
logger.error(
f"Jira indexing failed for connector {connector_id}: {error_message}"
)
# Optionally update status in DB to indicate failure
else:
logger.info(
f"Jira indexing successful for connector {connector_id}. Indexed {indexed_count} documents."
)
# Update the last indexed timestamp only on success
await update_connector_last_indexed(session, connector_id)
await session.commit() # Commit timestamp update
except Exception as e:
logger.error(
f"Critical error in run_jira_indexing for connector {connector_id}: {e}",
exc_info=True,
)
# Optionally update status in DB to indicate failure

View file

@ -123,6 +123,25 @@ class SearchSourceConnectorBase(BaseModel):
# Ensure the bot token is not empty # Ensure the bot token is not empty
if not config.get("DISCORD_BOT_TOKEN"): if not config.get("DISCORD_BOT_TOKEN"):
raise ValueError("DISCORD_BOT_TOKEN cannot be empty") raise ValueError("DISCORD_BOT_TOKEN cannot be empty")
elif connector_type == SearchSourceConnectorType.JIRA_CONNECTOR:
# For JIRA_CONNECTOR, require JIRA_EMAIL, JIRA_API_TOKEN and JIRA_BASE_URL
allowed_keys = ["JIRA_EMAIL", "JIRA_API_TOKEN", "JIRA_BASE_URL"]
if set(config.keys()) != set(allowed_keys):
raise ValueError(
f"For JIRA_CONNECTOR connector type, config must only contain these keys: {allowed_keys}"
)
# Ensure the email is not empty
if not config.get("JIRA_EMAIL"):
raise ValueError("JIRA_EMAIL cannot be empty")
# Ensure the API token is not empty
if not config.get("JIRA_API_TOKEN"):
raise ValueError("JIRA_API_TOKEN cannot be empty")
# Ensure the base URL is not empty
if not config.get("JIRA_BASE_URL"):
raise ValueError("JIRA_BASE_URL cannot be empty")
return config return config

View file

@ -1,4 +1,5 @@
import asyncio import asyncio
from typing import Any
from linkup import LinkupClient from linkup import LinkupClient
from sqlalchemy import func from sqlalchemy import func
@ -204,7 +205,9 @@ class ConnectorService:
return result_object, files_chunks return result_object, files_chunks
def _transform_document_results(self, document_results: list[dict]) -> list[dict]: def _transform_document_results(
self, document_results: list[dict[str, Any]]
) -> list[dict[str, Any]]:
""" """
Transform results from document_retriever.hybrid_search() to match the format Transform results from document_retriever.hybrid_search() to match the format
expected by the processing code. expected by the processing code.
@ -608,6 +611,7 @@ class ConnectorService:
visit_duration = metadata.get( visit_duration = metadata.get(
"VisitedWebPageVisitDurationInMilliseconds", "" "VisitedWebPageVisitDurationInMilliseconds", ""
) )
_browsing_session_id = metadata.get("BrowsingSessionId", "")
# Create a more descriptive title for extension data # Create a more descriptive title for extension data
title = webpage_title title = webpage_title
@ -948,6 +952,127 @@ class ConnectorService:
return result_object, linear_chunks return result_object, linear_chunks
async def search_jira(
self,
user_query: str,
user_id: str,
search_space_id: int,
top_k: int = 20,
search_mode: SearchMode = SearchMode.CHUNKS,
) -> tuple:
"""
Search for Jira issues and comments and return both the source information and langchain documents
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
search_mode: Search mode (CHUNKS or DOCUMENTS)
Returns:
tuple: (sources_info, langchain_documents)
"""
if search_mode == SearchMode.CHUNKS:
jira_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="JIRA_CONNECTOR",
)
elif search_mode == SearchMode.DOCUMENTS:
jira_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="JIRA_CONNECTOR",
)
# Transform document retriever results to match expected format
jira_chunks = self._transform_document_results(jira_chunks)
# Early return if no results
if not jira_chunks:
return {
"id": 30,
"name": "Jira Issues",
"type": "JIRA_CONNECTOR",
"sources": [],
}, []
# Process each chunk and create sources directly without deduplication
sources_list = []
async with self.counter_lock:
for _i, chunk in enumerate(jira_chunks):
# Extract document metadata
document = chunk.get("document", {})
metadata = document.get("metadata", {})
# Extract Jira-specific metadata
issue_key = metadata.get("issue_key", "")
issue_title = metadata.get("issue_title", "Untitled Issue")
status = metadata.get("status", "")
priority = metadata.get("priority", "")
issue_type = metadata.get("issue_type", "")
comment_count = metadata.get("comment_count", 0)
# Create a more descriptive title for Jira issues
title = f"Jira: {issue_key} - {issue_title}"
if status:
title += f" ({status})"
# Create a more descriptive description for Jira issues
description = chunk.get("content", "")[:100]
if len(description) == 100:
description += "..."
# Add priority and type info to description
info_parts = []
if priority:
info_parts.append(f"Priority: {priority}")
if issue_type:
info_parts.append(f"Type: {issue_type}")
if comment_count:
info_parts.append(f"Comments: {comment_count}")
if info_parts:
if description:
description += f" | {' | '.join(info_parts)}"
else:
description = " | ".join(info_parts)
# For URL, we could construct a URL to the Jira issue if we have the base URL
# For now, use a generic placeholder
url = ""
if issue_key and metadata.get("base_url"):
url = f"{metadata.get('base_url')}/browse/{issue_key}"
source = {
"id": document.get("id", self.source_id_counter),
"title": title,
"description": description,
"url": url,
"issue_key": issue_key,
"status": status,
"priority": priority,
"issue_type": issue_type,
"comment_count": comment_count,
}
self.source_id_counter += 1
sources_list.append(source)
# Create result object
result_object = {
"id": 10, # Assign a unique ID for the Jira connector
"name": "Jira Issues",
"type": "JIRA_CONNECTOR",
"sources": sources_list,
}
return result_object, jira_chunks
async def search_linkup( async def search_linkup(
self, user_query: str, user_id: str, mode: str = "standard" self, user_query: str, user_id: str, mode: str = "standard"
) -> tuple: ) -> tuple:
@ -1013,12 +1138,12 @@ class ConnectorService:
# Create a source entry # Create a source entry
source = { source = {
"id": self.source_id_counter, "id": self.source_id_counter,
"title": result.name "title": (
if hasattr(result, "name") result.name if hasattr(result, "name") else "Linkup Result"
else "Linkup Result", ),
"description": result.content[:100] "description": (
if hasattr(result, "content") result.content[:100] if hasattr(result, "content") else ""
else "", ),
"url": result.url if hasattr(result, "url") else "", "url": result.url if hasattr(result, "url") else "",
} }
sources_list.append(source) sources_list.append(source)
@ -1030,9 +1155,11 @@ class ConnectorService:
"score": 1.0, # Default score since not provided by Linkup "score": 1.0, # Default score since not provided by Linkup
"document": { "document": {
"id": self.source_id_counter, "id": self.source_id_counter,
"title": result.name "title": (
result.name
if hasattr(result, "name") if hasattr(result, "name")
else "Linkup Result", else "Linkup Result"
),
"document_type": "LINKUP_API", "document_type": "LINKUP_API",
"metadata": { "metadata": {
"url": result.url if hasattr(result, "url") else "", "url": result.url if hasattr(result, "url") else "",

View file

@ -10,6 +10,7 @@ from sqlalchemy.future import select
from app.config import config from app.config import config
from app.connectors.discord_connector import DiscordConnector from app.connectors.discord_connector import DiscordConnector
from app.connectors.github_connector import GitHubConnector from app.connectors.github_connector import GitHubConnector
from app.connectors.jira_connector import JiraConnector
from app.connectors.linear_connector import LinearConnector from app.connectors.linear_connector import LinearConnector
from app.connectors.notion_history import NotionHistoryConnector from app.connectors.notion_history import NotionHistoryConnector
from app.connectors.slack_history import SlackHistory from app.connectors.slack_history import SlackHistory
@ -1374,9 +1375,9 @@ async def index_linear_issues(
# Process each issue # Process each issue
for issue in issues: for issue in issues:
try: try:
issue_id = issue.get("id") issue_id = issue.get("key")
issue_identifier = issue.get("identifier", "") issue_identifier = issue.get("id", "")
issue_title = issue.get("title", "") issue_title = issue.get("key", "")
if not issue_id or not issue_title: if not issue_id or not issue_title:
logger.warning( logger.warning(
@ -1978,3 +1979,353 @@ async def index_discord_messages(
) )
logger.error(f"Failed to index Discord messages: {e!s}", exc_info=True) logger.error(f"Failed to index Discord messages: {e!s}", exc_info=True)
return 0, f"Failed to index Discord messages: {e!s}" return 0, f"Failed to index Discord messages: {e!s}"
async def index_jira_issues(
session: AsyncSession,
connector_id: int,
search_space_id: int,
user_id: str,
start_date: str | None = None,
end_date: str | None = None,
update_last_indexed: bool = True,
) -> tuple[int, str | None]:
"""
Index Jira issues and comments.
Args:
session: Database session
connector_id: ID of the Jira connector
search_space_id: ID of the search space to store documents in
user_id: User ID
start_date: Start date for indexing (YYYY-MM-DD format)
end_date: End date for indexing (YYYY-MM-DD format)
update_last_indexed: Whether to update the last_indexed_at timestamp (default: True)
Returns:
Tuple containing (number of documents indexed, error message or None)
"""
task_logger = TaskLoggingService(session, search_space_id)
# Log task start
log_entry = await task_logger.log_task_start(
task_name="jira_issues_indexing",
source="connector_indexing_task",
message=f"Starting Jira issues indexing for connector {connector_id}",
metadata={
"connector_id": connector_id,
"user_id": str(user_id),
"start_date": start_date,
"end_date": end_date,
},
)
try:
# Get the connector from the database
result = await session.execute(
select(SearchSourceConnector).filter(
SearchSourceConnector.id == connector_id,
SearchSourceConnector.connector_type
== SearchSourceConnectorType.JIRA_CONNECTOR,
)
)
connector = result.scalars().first()
if not connector:
await task_logger.log_task_failure(
log_entry,
f"Connector with ID {connector_id} not found",
"Connector not found",
{"error_type": "ConnectorNotFound"},
)
return 0, f"Connector with ID {connector_id} not found"
# Get the Jira credentials from the connector config
jira_email = connector.config.get("JIRA_EMAIL")
jira_api_token = connector.config.get("JIRA_API_TOKEN")
jira_base_url = connector.config.get("JIRA_BASE_URL")
if not jira_email or not jira_api_token or not jira_base_url:
await task_logger.log_task_failure(
log_entry,
f"Jira credentials not found in connector config for connector {connector_id}",
"Missing Jira credentials",
{"error_type": "MissingCredentials"},
)
return 0, "Jira credentials not found in connector config"
# Initialize Jira client
await task_logger.log_task_progress(
log_entry,
f"Initializing Jira client for connector {connector_id}",
{"stage": "client_initialization"},
)
jira_client = JiraConnector(
base_url=jira_base_url, email=jira_email, api_token=jira_api_token
)
# Calculate date range
if start_date is None or end_date is None:
# Fall back to calculating dates based on last_indexed_at
calculated_end_date = datetime.now()
# Use last_indexed_at as start date if available, otherwise use 365 days ago
if connector.last_indexed_at:
# Convert dates to be comparable (both timezone-naive)
last_indexed_naive = (
connector.last_indexed_at.replace(tzinfo=None)
if connector.last_indexed_at.tzinfo
else connector.last_indexed_at
)
# Check if last_indexed_at is in the future or after end_date
if last_indexed_naive > calculated_end_date:
logger.warning(
f"Last indexed date ({last_indexed_naive.strftime('%Y-%m-%d')}) is in the future. Using 365 days ago instead."
)
calculated_start_date = calculated_end_date - timedelta(days=365)
else:
calculated_start_date = last_indexed_naive
logger.info(
f"Using last_indexed_at ({calculated_start_date.strftime('%Y-%m-%d')}) as start date"
)
else:
calculated_start_date = calculated_end_date - timedelta(
days=365
) # Use 365 days as default
logger.info(
f"No last_indexed_at found, using {calculated_start_date.strftime('%Y-%m-%d')} (365 days ago) as start date"
)
# Use calculated dates if not provided
start_date_str = (
start_date if start_date else calculated_start_date.strftime("%Y-%m-%d")
)
end_date_str = (
end_date if end_date else calculated_end_date.strftime("%Y-%m-%d")
)
else:
# Use provided dates
start_date_str = start_date
end_date_str = end_date
await task_logger.log_task_progress(
log_entry,
f"Fetching Jira issues from {start_date_str} to {end_date_str}",
{
"stage": "fetching_issues",
"start_date": start_date_str,
"end_date": end_date_str,
},
)
# Get issues within date range
try:
issues, error = jira_client.get_issues_by_date_range(
start_date=start_date_str, end_date=end_date_str, include_comments=True
)
if error:
logger.error(f"Failed to get Jira issues: {error}")
# Don't treat "No issues found" as an error that should stop indexing
if "No issues found" in error:
logger.info(
"No issues found is not a critical error, continuing with update"
)
if update_last_indexed:
connector.last_indexed_at = datetime.now()
await session.commit()
logger.info(
f"Updated last_indexed_at to {connector.last_indexed_at} despite no issues found"
)
await task_logger.log_task_success(
log_entry,
f"No Jira issues found in date range {start_date_str} to {end_date_str}",
{"issues_found": 0},
)
return 0, None
else:
await task_logger.log_task_failure(
log_entry,
f"Failed to get Jira issues: {error}",
"API Error",
{"error_type": "APIError"},
)
return 0, f"Failed to get Jira issues: {error}"
logger.info(f"Retrieved {len(issues)} issues from Jira API")
except Exception as e:
logger.error(f"Error fetching Jira issues: {e!s}", exc_info=True)
return 0, f"Error fetching Jira issues: {e!s}"
# Process and index each issue
documents_indexed = 0
skipped_issues = []
documents_skipped = 0
for issue in issues:
try:
issue_id = issue.get("key")
issue_identifier = issue.get("key", "")
issue_title = issue.get("id", "")
if not issue_id or not issue_title:
logger.warning(
f"Skipping issue with missing ID or title: {issue_id or 'Unknown'}"
)
skipped_issues.append(
f"{issue_identifier or 'Unknown'} (missing data)"
)
documents_skipped += 1
continue
# Format the issue for better readability
formatted_issue = jira_client.format_issue(issue)
# Convert to markdown
issue_content = jira_client.format_issue_to_markdown(formatted_issue)
if not issue_content:
logger.warning(
f"Skipping issue with no content: {issue_identifier} - {issue_title}"
)
skipped_issues.append(f"{issue_identifier} (no content)")
documents_skipped += 1
continue
# Create a simple summary
summary_content = f"Jira Issue {issue_identifier}: {issue_title}\n\nStatus: {formatted_issue.get('status', 'Unknown')}\n\n"
if formatted_issue.get("description"):
summary_content += (
f"Description: {formatted_issue.get('description')}\n\n"
)
# Add comment count
comment_count = len(formatted_issue.get("comments", []))
summary_content += f"Comments: {comment_count}"
# Generate content hash
content_hash = generate_content_hash(issue_content, search_space_id)
# Check if document already exists
existing_doc_by_hash_result = await session.execute(
select(Document).where(Document.content_hash == content_hash)
)
existing_document_by_hash = (
existing_doc_by_hash_result.scalars().first()
)
if existing_document_by_hash:
logger.info(
f"Document with content hash {content_hash} already exists for issue {issue_identifier}. Skipping processing."
)
documents_skipped += 1
continue
# Generate embedding for the summary
summary_embedding = config.embedding_model_instance.embed(
summary_content
)
# Process chunks - using the full issue content with comments
chunks = [
Chunk(
content=chunk.text,
embedding=config.embedding_model_instance.embed(chunk.text),
)
for chunk in config.chunker_instance.chunk(issue_content)
]
# Create and store new document
logger.info(
f"Creating new document for issue {issue_identifier} - {issue_title}"
)
document = Document(
search_space_id=search_space_id,
title=f"Jira - {issue_identifier}: {issue_title}",
document_type=DocumentType.JIRA_CONNECTOR,
document_metadata={
"issue_id": issue_id,
"issue_identifier": issue_identifier,
"issue_title": issue_title,
"state": formatted_issue.get("status", "Unknown"),
"comment_count": comment_count,
"indexed_at": datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
},
content=summary_content,
content_hash=content_hash,
embedding=summary_embedding,
chunks=chunks,
)
session.add(document)
documents_indexed += 1
logger.info(
f"Successfully indexed new issue {issue_identifier} - {issue_title}"
)
except Exception as e:
logger.error(
f"Error processing issue {issue.get('identifier', 'Unknown')}: {e!s}",
exc_info=True,
)
skipped_issues.append(
f"{issue.get('identifier', 'Unknown')} (processing error)"
)
documents_skipped += 1
continue # Skip this issue and continue with others
# Update the last_indexed_at timestamp for the connector only if requested
total_processed = documents_indexed
if update_last_indexed:
connector.last_indexed_at = datetime.now()
logger.info(f"Updated last_indexed_at to {connector.last_indexed_at}")
# Commit all changes
await session.commit()
logger.info("Successfully committed all JIRA document changes to database")
# Log success
await task_logger.log_task_success(
log_entry,
f"Successfully completed JIRA indexing for connector {connector_id}",
{
"issues_processed": total_processed,
"documents_indexed": documents_indexed,
"documents_skipped": documents_skipped,
"skipped_issues_count": len(skipped_issues),
},
)
logger.info(
f"JIRA indexing completed: {documents_indexed} new issues, {documents_skipped} skipped"
)
return (
total_processed,
None,
) # Return None as the error message to indicate success
except SQLAlchemyError as db_error:
await session.rollback()
await task_logger.log_task_failure(
log_entry,
f"Database error during JIRA indexing for connector {connector_id}",
str(db_error),
{"error_type": "SQLAlchemyError"},
)
logger.error(f"Database error: {db_error!s}", exc_info=True)
return 0, f"Database error: {db_error!s}"
except Exception as e:
await session.rollback()
await task_logger.log_task_failure(
log_entry,
f"Failed to index JIRA issues for connector {connector_id}",
str(e),
{"error_type": type(e).__name__},
)
logger.error(f"Failed to index JIRA issues: {e!s}", exc_info=True)
return 0, f"Failed to index JIRA issues: {e!s}"

View file

@ -181,6 +181,33 @@ export default function EditConnectorPage() {
/> />
)} )}
{/* == Jira == */}
{connector.connector_type === "JIRA_CONNECTOR" && (
<div className="space-y-4">
<EditSimpleTokenForm
control={editForm.control}
fieldName="JIRA_BASE_URL"
fieldLabel="Jira Base URL"
fieldDescription="Update your Jira instance URL if needed."
placeholder="https://yourcompany.atlassian.net"
/>
<EditSimpleTokenForm
control={editForm.control}
fieldName="JIRA_EMAIL"
fieldLabel="Jira Email"
fieldDescription="Update your Atlassian account email if needed."
placeholder="your.email@company.com"
/>
<EditSimpleTokenForm
control={editForm.control}
fieldName="JIRA_API_TOKEN"
fieldLabel="Jira API Token"
fieldDescription="Update your Jira API Token if needed."
placeholder="Your Jira API Token"
/>
</div>
)}
{/* == Linkup == */} {/* == Linkup == */}
{connector.connector_type === "LINKUP_API" && ( {connector.connector_type === "LINKUP_API" && (
<EditSimpleTokenForm <EditSimpleTokenForm
@ -202,7 +229,6 @@ export default function EditConnectorPage() {
placeholder="Bot token..." placeholder="Bot token..."
/> />
)} )}
</CardContent> </CardContent>
<CardFooter className="border-t pt-6"> <CardFooter className="border-t pt-6">
<Button <Button

View file

@ -9,7 +9,10 @@ import * as z from "zod";
import { toast } from "sonner"; import { toast } from "sonner";
import { ArrowLeft, Check, Info, Loader2 } from "lucide-react"; import { ArrowLeft, Check, Info, Loader2 } from "lucide-react";
import { useSearchSourceConnectors, SearchSourceConnector } from "@/hooks/useSearchSourceConnectors"; import {
useSearchSourceConnectors,
SearchSourceConnector,
} from "@/hooks/useSearchSourceConnectors";
import { import {
Form, Form,
FormControl, FormControl,
@ -28,11 +31,7 @@ import {
CardHeader, CardHeader,
CardTitle, CardTitle,
} from "@/components/ui/card"; } from "@/components/ui/card";
import { import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
Alert,
AlertDescription,
AlertTitle,
} from "@/components/ui/alert";
// Define the form schema with Zod // Define the form schema with Zod
const apiConnectorFormSchema = z.object({ const apiConnectorFormSchema = z.object({
@ -47,13 +46,15 @@ const apiConnectorFormSchema = z.object({
// Helper function to get connector type display name // Helper function to get connector type display name
const getConnectorTypeDisplay = (type: string): string => { const getConnectorTypeDisplay = (type: string): string => {
const typeMap: Record<string, string> = { const typeMap: Record<string, string> = {
"SERPER_API": "Serper API", SERPER_API: "Serper API",
"TAVILY_API": "Tavily API", TAVILY_API: "Tavily API",
"SLACK_CONNECTOR": "Slack Connector", SLACK_CONNECTOR: "Slack Connector",
"NOTION_CONNECTOR": "Notion Connector", NOTION_CONNECTOR: "Notion Connector",
"GITHUB_CONNECTOR": "GitHub Connector", GITHUB_CONNECTOR: "GitHub Connector",
"DISCORD_CONNECTOR": "Discord Connector", LINEAR_CONNECTOR: "Linear Connector",
"LINKUP_API": "Linkup", JIRA_CONNECTOR: "Jira Connector",
DISCORD_CONNECTOR: "Discord Connector",
LINKUP_API: "Linkup",
// Add other connector types here as needed // Add other connector types here as needed
}; };
return typeMap[type] || type; return typeMap[type] || type;
@ -69,7 +70,9 @@ export default function EditConnectorPage() {
const connectorId = parseInt(params.connector_id as string, 10); const connectorId = parseInt(params.connector_id as string, 10);
const { connectors, updateConnector } = useSearchSourceConnectors(); const { connectors, updateConnector } = useSearchSourceConnectors();
const [connector, setConnector] = useState<SearchSourceConnector | null>(null); const [connector, setConnector] = useState<SearchSourceConnector | null>(
null,
);
const [isLoading, setIsLoading] = useState(true); const [isLoading, setIsLoading] = useState(true);
const [isSubmitting, setIsSubmitting] = useState(false); const [isSubmitting, setIsSubmitting] = useState(false);
// console.log("connector", connector); // console.log("connector", connector);
@ -85,20 +88,20 @@ export default function EditConnectorPage() {
// Get API key field name based on connector type // Get API key field name based on connector type
const getApiKeyFieldName = (connectorType: string): string => { const getApiKeyFieldName = (connectorType: string): string => {
const fieldMap: Record<string, string> = { const fieldMap: Record<string, string> = {
"SERPER_API": "SERPER_API_KEY", SERPER_API: "SERPER_API_KEY",
"TAVILY_API": "TAVILY_API_KEY", TAVILY_API: "TAVILY_API_KEY",
"SLACK_CONNECTOR": "SLACK_BOT_TOKEN", SLACK_CONNECTOR: "SLACK_BOT_TOKEN",
"NOTION_CONNECTOR": "NOTION_INTEGRATION_TOKEN", NOTION_CONNECTOR: "NOTION_INTEGRATION_TOKEN",
"GITHUB_CONNECTOR": "GITHUB_PAT", GITHUB_CONNECTOR: "GITHUB_PAT",
"DISCORD_CONNECTOR": "DISCORD_BOT_TOKEN", DISCORD_CONNECTOR: "DISCORD_BOT_TOKEN",
"LINKUP_API": "LINKUP_API_KEY" LINKUP_API: "LINKUP_API_KEY",
}; };
return fieldMap[connectorType] || ""; return fieldMap[connectorType] || "";
}; };
// Find connector in the list // Find connector in the list
useEffect(() => { useEffect(() => {
const currentConnector = connectors.find(c => c.id === connectorId); const currentConnector = connectors.find((c) => c.id === connectorId);
if (currentConnector) { if (currentConnector) {
setConnector(currentConnector); setConnector(currentConnector);
@ -150,7 +153,9 @@ export default function EditConnectorPage() {
router.push(`/dashboard/${searchSpaceId}/connectors`); router.push(`/dashboard/${searchSpaceId}/connectors`);
} catch (error) { } catch (error) {
console.error("Error updating connector:", error); console.error("Error updating connector:", error);
toast.error(error instanceof Error ? error.message : "Failed to update connector"); toast.error(
error instanceof Error ? error.message : "Failed to update connector",
);
} finally { } finally {
setIsSubmitting(false); setIsSubmitting(false);
} }
@ -186,24 +191,30 @@ export default function EditConnectorPage() {
<Card className="border-2 border-border"> <Card className="border-2 border-border">
<CardHeader> <CardHeader>
<CardTitle className="text-2xl font-bold"> <CardTitle className="text-2xl font-bold">
Edit {connector ? getConnectorTypeDisplay(connector.connector_type) : ""} Connector Edit{" "}
{connector
? getConnectorTypeDisplay(connector.connector_type)
: ""}{" "}
Connector
</CardTitle> </CardTitle>
<CardDescription> <CardDescription>Update your connector settings.</CardDescription>
Update your connector settings.
</CardDescription>
</CardHeader> </CardHeader>
<CardContent> <CardContent>
<Alert className="mb-6 bg-muted"> <Alert className="mb-6 bg-muted">
<Info className="h-4 w-4" /> <Info className="h-4 w-4" />
<AlertTitle>API Key Security</AlertTitle> <AlertTitle>API Key Security</AlertTitle>
<AlertDescription> <AlertDescription>
Your API key is stored securely. For security reasons, we don't display your existing API key. Your API key is stored securely. For security reasons, we don't
If you don't update the API key field, your existing key will be preserved. display your existing API key. If you don't update the API key
field, your existing key will be preserved.
</AlertDescription> </AlertDescription>
</Alert> </Alert>
<Form {...form}> <Form {...form}>
<form onSubmit={form.handleSubmit(onSubmit)} className="space-y-6"> <form
onSubmit={form.handleSubmit(onSubmit)}
className="space-y-6"
>
<FormField <FormField
control={form.control} control={form.control}
name="name" name="name"
@ -245,7 +256,8 @@ export default function EditConnectorPage() {
? "Enter new Slack Bot Token (optional)" ? "Enter new Slack Bot Token (optional)"
: connector?.connector_type === "NOTION_CONNECTOR" : connector?.connector_type === "NOTION_CONNECTOR"
? "Enter new Notion Token (optional)" ? "Enter new Notion Token (optional)"
: connector?.connector_type === "GITHUB_CONNECTOR" : connector?.connector_type ===
"GITHUB_CONNECTOR"
? "Enter new GitHub PAT (optional)" ? "Enter new GitHub PAT (optional)"
: connector?.connector_type === "LINKUP_API" : connector?.connector_type === "LINKUP_API"
? "Enter new Linkup API Key (optional)" ? "Enter new Linkup API Key (optional)"

View file

@ -0,0 +1,472 @@
"use client";
import { useState } from "react";
import { useRouter, useParams } from "next/navigation";
import { motion } from "framer-motion";
import { zodResolver } from "@hookform/resolvers/zod";
import { useForm } from "react-hook-form";
import * as z from "zod";
import { toast } from "sonner";
import { ArrowLeft, Check, Info, Loader2 } from "lucide-react";
import { useSearchSourceConnectors } from "@/hooks/useSearchSourceConnectors";
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { Button } from "@/components/ui/button";
import {
Card,
CardContent,
CardDescription,
CardFooter,
CardHeader,
CardTitle,
} from "@/components/ui/card";
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
import {
Accordion,
AccordionContent,
AccordionItem,
AccordionTrigger,
} from "@/components/ui/accordion";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
// Define the form schema with Zod
const jiraConnectorFormSchema = z.object({
name: z.string().min(3, {
message: "Connector name must be at least 3 characters.",
}),
base_url: z
.string()
.url({
message:
"Please enter a valid Jira URL (e.g., https://yourcompany.atlassian.net)",
})
.refine(
(url) => {
return url.includes("atlassian.net") || url.includes("jira");
},
{
message: "Please enter a valid Jira instance URL",
},
),
email: z.string().email({
message: "Please enter a valid email address.",
}),
api_token: z.string().min(10, {
message: "Jira API Token is required and must be valid.",
}),
});
// Define the type for the form values
type JiraConnectorFormValues = z.infer<typeof jiraConnectorFormSchema>;
export default function JiraConnectorPage() {
const router = useRouter();
const params = useParams();
const searchSpaceId = params.search_space_id as string;
const [isSubmitting, setIsSubmitting] = useState(false);
const { createConnector } = useSearchSourceConnectors();
// Initialize the form
const form = useForm<JiraConnectorFormValues>({
resolver: zodResolver(jiraConnectorFormSchema),
defaultValues: {
name: "Jira Connector",
base_url: "",
email: "",
api_token: "",
},
});
// Handle form submission
const onSubmit = async (values: JiraConnectorFormValues) => {
setIsSubmitting(true);
try {
await createConnector({
name: values.name,
connector_type: "JIRA_CONNECTOR",
config: {
JIRA_BASE_URL: values.base_url,
JIRA_EMAIL: values.email,
JIRA_API_TOKEN: values.api_token,
},
is_indexable: true,
last_indexed_at: null,
});
toast.success("Jira connector created successfully!");
// Navigate back to connectors page
router.push(`/dashboard/${searchSpaceId}/connectors`);
} catch (error) {
console.error("Error creating connector:", error);
toast.error(
error instanceof Error ? error.message : "Failed to create connector",
);
} finally {
setIsSubmitting(false);
}
};
return (
<div className="container mx-auto py-8 max-w-3xl">
<Button
variant="ghost"
className="mb-6"
onClick={() =>
router.push(`/dashboard/${searchSpaceId}/connectors/add`)
}
>
<ArrowLeft className="mr-2 h-4 w-4" />
Back to Connectors
</Button>
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
transition={{ duration: 0.5 }}
>
<Tabs defaultValue="connect" className="w-full">
<TabsList className="grid w-full grid-cols-2 mb-6">
<TabsTrigger value="connect">Connect</TabsTrigger>
<TabsTrigger value="documentation">Documentation</TabsTrigger>
</TabsList>
<TabsContent value="connect">
<Card className="border-2 border-border">
<CardHeader>
<CardTitle className="text-2xl font-bold">
Connect Jira Instance
</CardTitle>
<CardDescription>
Integrate with Jira to search and retrieve information from
your issues, tickets, and comments. This connector can index
your Jira content for search.
</CardDescription>
</CardHeader>
<CardContent>
<Alert className="mb-6 bg-muted">
<Info className="h-4 w-4" />
<AlertTitle>Jira Personal Access Token Required</AlertTitle>
<AlertDescription>
You'll need a Jira Personal Access Token to use this
connector. You can create one from{" "}
<a
href="https://id.atlassian.com/manage-profile/security/api-tokens"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
Atlassian Account Settings
</a>
</AlertDescription>
</Alert>
<Form {...form}>
<form
onSubmit={form.handleSubmit(onSubmit)}
className="space-y-6"
>
<FormField
control={form.control}
name="name"
render={({ field }) => (
<FormItem>
<FormLabel>Connector Name</FormLabel>
<FormControl>
<Input placeholder="My Jira Connector" {...field} />
</FormControl>
<FormDescription>
A friendly name to identify this connector.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="base_url"
render={({ field }) => (
<FormItem>
<FormLabel>Jira Instance URL</FormLabel>
<FormControl>
<Input
placeholder="https://yourcompany.atlassian.net"
{...field}
/>
</FormControl>
<FormDescription>
Your Jira instance URL. For Atlassian Cloud, this is
typically https://yourcompany.atlassian.net
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="email"
render={({ field }) => (
<FormItem>
<FormLabel>Email Address</FormLabel>
<FormControl>
<Input
type="email"
placeholder="your.email@company.com"
{...field}
/>
</FormControl>
<FormDescription>
Your Atlassian account email address.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="api_token"
render={({ field }) => (
<FormItem>
<FormLabel>API Token</FormLabel>
<FormControl>
<Input
type="password"
placeholder="Your Jira API Token"
{...field}
/>
</FormControl>
<FormDescription>
Your Jira API Token will be encrypted and stored securely.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<div className="flex justify-end">
<Button
type="submit"
disabled={isSubmitting}
className="w-full sm:w-auto"
>
{isSubmitting ? (
<>
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
Connecting...
</>
) : (
<>
<Check className="mr-2 h-4 w-4" />
Connect Jira
</>
)}
</Button>
</div>
</form>
</Form>
</CardContent>
<CardFooter className="flex flex-col items-start border-t bg-muted/50 px-6 py-4">
<h4 className="text-sm font-medium">
What you get with Jira integration:
</h4>
<ul className="mt-2 list-disc pl-5 text-sm text-muted-foreground">
<li>Search through all your Jira issues and tickets</li>
<li>
Access issue descriptions, comments, and full discussion
threads
</li>
<li>
Connect your team's project management directly to your
search space
</li>
<li>
Keep your search results up-to-date with latest Jira content
</li>
<li>
Index your Jira issues for enhanced search capabilities
</li>
<li>
Search by issue keys, status, priority, and assignee
information
</li>
</ul>
</CardFooter>
</Card>
</TabsContent>
<TabsContent value="documentation">
<Card className="border-2 border-border">
<CardHeader>
<CardTitle className="text-2xl font-bold">
Jira Connector Documentation
</CardTitle>
<CardDescription>
Learn how to set up and use the Jira connector to index your
project management data.
</CardDescription>
</CardHeader>
<CardContent className="space-y-6">
<div>
<h3 className="text-xl font-semibold mb-2">How it works</h3>
<p className="text-muted-foreground">
The Jira connector uses the Jira REST API with Basic Authentication
to fetch all issues and comments that your account has
access to within your Jira instance.
</p>
<ul className="mt-2 list-disc pl-5 text-muted-foreground">
<li>
For follow up indexing runs, the connector retrieves
issues and comments that have been updated since the last
indexing attempt.
</li>
<li>
Indexing is configured to run periodically, so updates
should appear in your search results within minutes.
</li>
</ul>
</div>
<Accordion type="single" collapsible className="w-full">
<AccordionItem value="authorization">
<AccordionTrigger className="text-lg font-medium">
Authorization
</AccordionTrigger>
<AccordionContent className="space-y-4">
<Alert className="bg-muted">
<Info className="h-4 w-4" />
<AlertTitle>Read-Only Access is Sufficient</AlertTitle>
<AlertDescription>
You only need read access for this connector to work.
The API Token will only be used to read your Jira data.
</AlertDescription>
</Alert>
<div className="space-y-6">
<div>
<h4 className="font-medium mb-2">
Step 1: Create an API Token
</h4>
<ol className="list-decimal pl-5 space-y-3">
<li>Log in to your Atlassian account</li>
<li>
Navigate to{" "}
<a
href="https://id.atlassian.com/manage-profile/security/api-tokens"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
https://id.atlassian.com/manage-profile/security/api-tokens
</a>
</li>
<li>
Click <strong>Create API token</strong>
</li>
<li>
Enter a label for your token (like "SurfSense
Connector")
</li>
<li>
Click <strong>Create</strong>
</li>
<li>
Copy the generated token as it will only be shown
once
</li>
</ol>
</div>
<div>
<h4 className="font-medium mb-2">
Step 2: Grant necessary access
</h4>
<p className="text-muted-foreground mb-3">
The API Token will have access to all projects and
issues that your user account can see. Make sure your
account has appropriate permissions for the projects
you want to index.
</p>
<Alert className="bg-muted">
<Info className="h-4 w-4" />
<AlertTitle>Data Privacy</AlertTitle>
<AlertDescription>
Only issues, comments, and basic metadata will be
indexed. Jira attachments and linked files are not
indexed by this connector.
</AlertDescription>
</Alert>
</div>
</div>
</AccordionContent>
</AccordionItem>
<AccordionItem value="indexing">
<AccordionTrigger className="text-lg font-medium">
Indexing
</AccordionTrigger>
<AccordionContent className="space-y-4">
<ol className="list-decimal pl-5 space-y-3">
<li>
Navigate to the Connector Dashboard and select the{" "}
<strong>Jira</strong> Connector.
</li>
<li>
Enter your <strong>Jira Instance URL</strong> (e.g.,
https://yourcompany.atlassian.net)
</li>
<li>
Place your <strong>Personal Access Token</strong> in
the form field.
</li>
<li>
Click <strong>Connect</strong> to establish the
connection.
</li>
<li>
Once connected, your Jira issues will be indexed
automatically.
</li>
</ol>
<Alert className="bg-muted">
<Info className="h-4 w-4" />
<AlertTitle>What Gets Indexed</AlertTitle>
<AlertDescription>
<p className="mb-2">
The Jira connector indexes the following data:
</p>
<ul className="list-disc pl-5">
<li>Issue keys and summaries (e.g., PROJ-123)</li>
<li>Issue descriptions</li>
<li>Issue comments and discussion threads</li>
<li>
Issue status, priority, and type information
</li>
<li>Assignee and reporter information</li>
<li>Project information</li>
</ul>
</AlertDescription>
</Alert>
</AccordionContent>
</AccordionItem>
</Accordion>
</CardContent>
</Card>
</TabsContent>
</Tabs>
</motion.div>
</div>
);
}

View file

@ -1,8 +1,17 @@
"use client"; "use client";
import { Badge } from "@/components/ui/badge"; import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { Card, CardContent, CardFooter, CardHeader } from "@/components/ui/card"; import {
import { Collapsible, CollapsibleContent, CollapsibleTrigger } from "@/components/ui/collapsible"; Card,
CardContent,
CardFooter,
CardHeader,
} from "@/components/ui/card";
import {
Collapsible,
CollapsibleContent,
CollapsibleTrigger,
} from "@/components/ui/collapsible";
import { import {
IconBrandDiscord, IconBrandDiscord,
IconBrandGithub, IconBrandGithub,
@ -67,23 +76,26 @@ const connectorCategories: ConnectorCategory[] = [
{ {
id: "slack-connector", id: "slack-connector",
title: "Slack", title: "Slack",
description: "Connect to your Slack workspace to access messages and channels.", description:
"Connect to your Slack workspace to access messages and channels.",
icon: <IconBrandSlack className="h-6 w-6" />, icon: <IconBrandSlack className="h-6 w-6" />,
status: "available", status: "available",
}, },
{ {
id: "ms-teams", id: "ms-teams",
title: "Microsoft Teams", title: "Microsoft Teams",
description: "Connect to Microsoft Teams to access your team's conversations.", description:
"Connect to Microsoft Teams to access your team's conversations.",
icon: <IconBrandWindows className="h-6 w-6" />, icon: <IconBrandWindows className="h-6 w-6" />,
status: "coming-soon", status: "coming-soon",
}, },
{ {
id: "discord-connector", id: "discord-connector",
title: "Discord", title: "Discord",
description: "Connect to Discord servers to access messages and channels.", description:
"Connect to Discord servers to access messages and channels.",
icon: <IconBrandDiscord className="h-6 w-6" />, icon: <IconBrandDiscord className="h-6 w-6" />,
status: "available" status: "available",
}, },
], ],
}, },
@ -94,16 +106,18 @@ const connectorCategories: ConnectorCategory[] = [
{ {
id: "linear-connector", id: "linear-connector",
title: "Linear", title: "Linear",
description: "Connect to Linear to search issues, comments and project data.", description:
"Connect to Linear to search issues, comments and project data.",
icon: <IconLayoutKanban className="h-6 w-6" />, icon: <IconLayoutKanban className="h-6 w-6" />,
status: "available", status: "available",
}, },
{ {
id: "jira-connector", id: "jira-connector",
title: "Jira", title: "Jira",
description: "Connect to Jira to search issues, tickets and project data.", description:
"Connect to Jira to search issues, tickets and project data.",
icon: <IconTicket className="h-6 w-6" />, icon: <IconTicket className="h-6 w-6" />,
status: "coming-soon", status: "available",
}, },
], ],
}, },
@ -114,14 +128,16 @@ const connectorCategories: ConnectorCategory[] = [
{ {
id: "notion-connector", id: "notion-connector",
title: "Notion", title: "Notion",
description: "Connect to your Notion workspace to access pages and databases.", description:
"Connect to your Notion workspace to access pages and databases.",
icon: <IconBrandNotion className="h-6 w-6" />, icon: <IconBrandNotion className="h-6 w-6" />,
status: "available", status: "available",
}, },
{ {
id: "github-connector", id: "github-connector",
title: "GitHub", title: "GitHub",
description: "Connect a GitHub PAT to index code and docs from accessible repositories.", description:
"Connect a GitHub PAT to index code and docs from accessible repositories.",
icon: <IconBrandGithub className="h-6 w-6" />, icon: <IconBrandGithub className="h-6 w-6" />,
status: "available", status: "available",
}, },
@ -141,7 +157,8 @@ const connectorCategories: ConnectorCategory[] = [
{ {
id: "zoom", id: "zoom",
title: "Zoom", title: "Zoom",
description: "Connect to Zoom to access meeting recordings and transcripts.", description:
"Connect to Zoom to access meeting recordings and transcripts.",
icon: <IconBrandZoom className="h-6 w-6" />, icon: <IconBrandZoom className="h-6 w-6" />,
status: "coming-soon", status: "coming-soon",
}, },
@ -152,7 +169,7 @@ const connectorCategories: ConnectorCategory[] = [
// Animation variants // Animation variants
const fadeIn = { const fadeIn = {
hidden: { opacity: 0 }, hidden: { opacity: 0 },
visible: { opacity: 1, transition: { duration: 0.4 } } visible: { opacity: 1, transition: { duration: 0.4 } },
}; };
const staggerContainer = { const staggerContainer = {
@ -160,9 +177,9 @@ const staggerContainer = {
visible: { visible: {
opacity: 1, opacity: 1,
transition: { transition: {
staggerChildren: 0.1 staggerChildren: 0.1,
} },
} },
}; };
const cardVariants = { const cardVariants = {
@ -173,30 +190,36 @@ const cardVariants = {
transition: { transition: {
type: "spring", type: "spring",
stiffness: 260, stiffness: 260,
damping: 20 damping: 20,
} },
}, },
hover: { hover: {
scale: 1.02, scale: 1.02,
boxShadow: "0 10px 15px -3px rgba(0, 0, 0, 0.1), 0 4px 6px -2px rgba(0, 0, 0, 0.05)", boxShadow:
"0 10px 15px -3px rgba(0, 0, 0, 0.1), 0 4px 6px -2px rgba(0, 0, 0, 0.05)",
transition: { transition: {
type: "spring", type: "spring",
stiffness: 400, stiffness: 400,
damping: 10 damping: 10,
} },
} },
}; };
export default function ConnectorsPage() { export default function ConnectorsPage() {
const params = useParams(); const params = useParams();
const searchSpaceId = params.search_space_id as string; const searchSpaceId = params.search_space_id as string;
const [expandedCategories, setExpandedCategories] = useState<string[]>(["search-engines", "knowledge-bases", "project-management", "team-chats"]); const [expandedCategories, setExpandedCategories] = useState<string[]>([
"search-engines",
"knowledge-bases",
"project-management",
"team-chats",
]);
const toggleCategory = (categoryId: string) => { const toggleCategory = (categoryId: string) => {
setExpandedCategories(prev => setExpandedCategories((prev) =>
prev.includes(categoryId) prev.includes(categoryId)
? prev.filter(id => id !== categoryId) ? prev.filter((id) => id !== categoryId)
: [...prev, categoryId] : [...prev, categoryId],
); );
}; };
@ -207,7 +230,7 @@ export default function ConnectorsPage() {
animate={{ opacity: 1, y: 0 }} animate={{ opacity: 1, y: 0 }}
transition={{ transition={{
duration: 0.6, duration: 0.6,
ease: [0.22, 1, 0.36, 1] ease: [0.22, 1, 0.36, 1],
}} }}
className="mb-12 text-center" className="mb-12 text-center"
> >
@ -215,7 +238,8 @@ export default function ConnectorsPage() {
Connect Your Tools Connect Your Tools
</h1> </h1>
<p className="text-muted-foreground mt-3 text-lg max-w-2xl mx-auto"> <p className="text-muted-foreground mt-3 text-lg max-w-2xl mx-auto">
Integrate with your favorite services to enhance your research capabilities. Integrate with your favorite services to enhance your research
capabilities.
</p> </p>
</motion.div> </motion.div>
@ -239,9 +263,17 @@ export default function ConnectorsPage() {
<div className="flex items-center justify-between space-x-4 p-4"> <div className="flex items-center justify-between space-x-4 p-4">
<h3 className="text-xl font-semibold">{category.title}</h3> <h3 className="text-xl font-semibold">{category.title}</h3>
<CollapsibleTrigger asChild> <CollapsibleTrigger asChild>
<Button variant="ghost" size="sm" className="w-9 p-0 hover:bg-muted"> <Button
variant="ghost"
size="sm"
className="w-9 p-0 hover:bg-muted"
>
<motion.div <motion.div
animate={{ rotate: expandedCategories.includes(category.id) ? 180 : 0 }} animate={{
rotate: expandedCategories.includes(category.id)
? 180
: 0,
}}
transition={{ duration: 0.3, ease: "easeInOut" }} transition={{ duration: 0.3, ease: "easeInOut" }}
> >
<IconChevronDown className="h-5 w-5" /> <IconChevronDown className="h-5 w-5" />
@ -279,14 +311,22 @@ export default function ConnectorsPage() {
</div> </div>
<div> <div>
<div className="flex items-center gap-2"> <div className="flex items-center gap-2">
<h3 className="font-medium">{connector.title}</h3> <h3 className="font-medium">
{connector.title}
</h3>
{connector.status === "coming-soon" && ( {connector.status === "coming-soon" && (
<Badge variant="outline" className="text-xs bg-amber-100 dark:bg-amber-950 text-amber-800 dark:text-amber-300 border-amber-200 dark:border-amber-800"> <Badge
variant="outline"
className="text-xs bg-amber-100 dark:bg-amber-950 text-amber-800 dark:text-amber-300 border-amber-200 dark:border-amber-800"
>
Coming soon Coming soon
</Badge> </Badge>
)} )}
{connector.status === "connected" && ( {connector.status === "connected" && (
<Badge variant="outline" className="text-xs bg-green-100 dark:bg-green-950 text-green-800 dark:text-green-300 border-green-200 dark:border-green-800"> <Badge
variant="outline"
className="text-xs bg-green-100 dark:bg-green-950 text-green-800 dark:text-green-300 border-green-200 dark:border-green-800"
>
Connected Connected
</Badge> </Badge>
)} )}
@ -301,28 +341,45 @@ export default function ConnectorsPage() {
</CardContent> </CardContent>
<CardFooter className="mt-auto pt-2"> <CardFooter className="mt-auto pt-2">
{connector.status === 'available' && ( {connector.status === "available" && (
<Link href={`/dashboard/${searchSpaceId}/connectors/add/${connector.id}`} className="w-full"> <Link
<Button variant="default" className="w-full group"> href={`/dashboard/${searchSpaceId}/connectors/add/${connector.id}`}
className="w-full"
>
<Button
variant="default"
className="w-full group"
>
<span>Connect</span> <span>Connect</span>
<motion.div <motion.div
className="ml-1" className="ml-1"
initial={{ x: 0 }} initial={{ x: 0 }}
whileHover={{ x: 3 }} whileHover={{ x: 3 }}
transition={{ type: "spring", stiffness: 400, damping: 10 }} transition={{
type: "spring",
stiffness: 400,
damping: 10,
}}
> >
<IconChevronRight className="h-4 w-4" /> <IconChevronRight className="h-4 w-4" />
</motion.div> </motion.div>
</Button> </Button>
</Link> </Link>
)} )}
{connector.status === 'coming-soon' && ( {connector.status === "coming-soon" && (
<Button variant="outline" disabled className="w-full opacity-70"> <Button
variant="outline"
disabled
className="w-full opacity-70"
>
Coming Soon Coming Soon
</Button> </Button>
)} )}
{connector.status === 'connected' && ( {connector.status === "connected" && (
<Button variant="outline" className="w-full border-green-500 text-green-600 hover:bg-green-50 dark:hover:bg-green-950"> <Button
variant="outline"
className="w-full border-green-500 text-green-600 hover:bg-green-50 dark:hover:bg-green-950"
>
Manage Manage
</Button> </Button>
)} )}

View file

@ -26,8 +26,16 @@ import {
} from "@/components/ui/dropdown-menu"; } from "@/components/ui/dropdown-menu";
import { Input } from "@/components/ui/input"; import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label"; import { Label } from "@/components/ui/label";
import { Pagination, PaginationContent, PaginationItem } from "@/components/ui/pagination"; import {
import { Popover, PopoverContent, PopoverTrigger } from "@/components/ui/popover"; Pagination,
PaginationContent,
PaginationItem,
} from "@/components/ui/pagination";
import {
Popover,
PopoverContent,
PopoverTrigger,
} from "@/components/ui/popover";
import { import {
Select, Select,
SelectContent, SelectContent,
@ -45,7 +53,15 @@ import {
} from "@/components/ui/table"; } from "@/components/ui/table";
import { useDocuments } from "@/hooks/use-documents"; import { useDocuments } from "@/hooks/use-documents";
import { cn } from "@/lib/utils"; import { cn } from "@/lib/utils";
import { IconBrandDiscord, IconBrandGithub, IconBrandNotion, IconBrandSlack, IconBrandYoutube, IconLayoutKanban } from "@tabler/icons-react"; import {
IconBrandDiscord,
IconBrandGithub,
IconBrandNotion,
IconBrandSlack,
IconBrandYoutube,
IconLayoutKanban,
IconTicket,
} from "@tabler/icons-react";
import { import {
ColumnDef, ColumnDef,
ColumnFiltersState, ColumnFiltersState,
@ -81,10 +97,17 @@ import {
ListFilter, ListFilter,
MoreHorizontal, MoreHorizontal,
Trash, Trash,
Webhook Webhook,
} from "lucide-react"; } from "lucide-react";
import { useParams } from "next/navigation"; import { useParams } from "next/navigation";
import React, { useContext, useEffect, useId, useMemo, useRef, useState } from "react"; import React, {
useContext,
useEffect,
useId,
useMemo,
useRef,
useState,
} from "react";
import ReactMarkdown from "react-markdown"; import ReactMarkdown from "react-markdown";
import rehypeRaw from "rehype-raw"; import rehypeRaw from "rehype-raw";
import rehypeSanitize from "rehype-sanitize"; import rehypeSanitize from "rehype-sanitize";
@ -97,19 +120,27 @@ const fadeInScale = {
visible: { visible: {
opacity: 1, opacity: 1,
scale: 1, scale: 1,
transition: { type: "spring", stiffness: 300, damping: 30 } transition: { type: "spring", stiffness: 300, damping: 30 },
}, },
exit: { exit: {
opacity: 0, opacity: 0,
scale: 0.95, scale: 0.95,
transition: { duration: 0.15 } transition: { duration: 0.15 },
} },
}; };
type Document = { type Document = {
id: number; id: number;
title: string; title: string;
document_type: "EXTENSION" | "CRAWLED_URL" | "SLACK_CONNECTOR" | "NOTION_CONNECTOR" | "FILE" | "YOUTUBE_VIDEO" | "LINEAR_CONNECTOR" | "DISCORD_CONNECTOR"; document_type:
| "EXTENSION"
| "CRAWLED_URL"
| "SLACK_CONNECTOR"
| "NOTION_CONNECTOR"
| "FILE"
| "YOUTUBE_VIDEO"
| "LINEAR_CONNECTOR"
| "DISCORD_CONNECTOR";
document_metadata: any; document_metadata: any;
content: string; content: string;
created_at: string; created_at: string;
@ -117,13 +148,21 @@ type Document = {
}; };
// Custom filter function for multi-column searching // Custom filter function for multi-column searching
const multiColumnFilterFn: FilterFn<Document> = (row, columnId, filterValue) => { const multiColumnFilterFn: FilterFn<Document> = (
row,
columnId,
filterValue,
) => {
const searchableRowContent = `${row.original.title}`.toLowerCase(); const searchableRowContent = `${row.original.title}`.toLowerCase();
const searchTerm = (filterValue ?? "").toLowerCase(); const searchTerm = (filterValue ?? "").toLowerCase();
return searchableRowContent.includes(searchTerm); return searchableRowContent.includes(searchTerm);
}; };
const statusFilterFn: FilterFn<Document> = (row, columnId, filterValue: string[]) => { const statusFilterFn: FilterFn<Document> = (
row,
columnId,
filterValue: string[],
) => {
if (!filterValue?.length) return true; if (!filterValue?.length) return true;
const status = row.getValue(columnId) as string; const status = row.getValue(columnId) as string;
return filterValue.includes(status); return filterValue.includes(status);
@ -139,6 +178,7 @@ const documentTypeIcons = {
YOUTUBE_VIDEO: IconBrandYoutube, YOUTUBE_VIDEO: IconBrandYoutube,
GITHUB_CONNECTOR: IconBrandGithub, GITHUB_CONNECTOR: IconBrandGithub,
LINEAR_CONNECTOR: IconLayoutKanban, LINEAR_CONNECTOR: IconLayoutKanban,
JIRA_CONNECTOR: IconTicket,
DISCORD_CONNECTOR: IconBrandDiscord, DISCORD_CONNECTOR: IconBrandDiscord,
} as const; } as const;
@ -148,7 +188,8 @@ const columns: ColumnDef<Document>[] = [
header: ({ table }) => ( header: ({ table }) => (
<Checkbox <Checkbox
checked={ checked={
table.getIsAllPageRowsSelected() || (table.getIsSomePageRowsSelected() && "indeterminate") table.getIsAllPageRowsSelected() ||
(table.getIsSomePageRowsSelected() && "indeterminate")
} }
onCheckedChange={(value) => table.toggleAllPageRowsSelected(!!value)} onCheckedChange={(value) => table.toggleAllPageRowsSelected(!!value)}
aria-label="Select all" aria-label="Select all"
@ -175,7 +216,7 @@ const columns: ColumnDef<Document>[] = [
className="flex items-center gap-2 font-medium" className="flex items-center gap-2 font-medium"
whileHover={{ scale: 1.02 }} whileHover={{ scale: 1.02 }}
transition={{ type: "spring", stiffness: 300 }} transition={{ type: "spring", stiffness: 300 }}
style={{ display: 'flex' }} style={{ display: "flex" }}
> >
<Icon size={16} className="text-muted-foreground shrink-0" /> <Icon size={16} className="text-muted-foreground shrink-0" />
<span>{row.getValue("title")}</span> <span>{row.getValue("title")}</span>
@ -188,7 +229,9 @@ const columns: ColumnDef<Document>[] = [
header: "Type", header: "Type",
accessorKey: "document_type", accessorKey: "document_type",
cell: ({ row }) => { cell: ({ row }) => {
const type = row.getValue("document_type") as keyof typeof documentTypeIcons; const type = row.getValue(
"document_type",
) as keyof typeof documentTypeIcons;
const Icon = documentTypeIcons[type]; const Icon = documentTypeIcons[type];
return ( return (
<div className="flex items-center gap-2"> <div className="flex items-center gap-2">
@ -196,7 +239,10 @@ const columns: ColumnDef<Document>[] = [
<Icon size={20} className="text-primary" /> <Icon size={20} className="text-primary" />
</div> </div>
<span className="font-medium text-xs"> <span className="font-medium text-xs">
{type.split('_').map(word => word.charAt(0) + word.slice(1).toLowerCase()).join(' ')} {type
.split("_")
.map((word) => word.charAt(0) + word.slice(1).toLowerCase())
.join(" ")}
</span> </span>
</div> </div>
); );
@ -211,9 +257,8 @@ const columns: ColumnDef<Document>[] = [
const title = row.getValue("title") as string; const title = row.getValue("title") as string;
// Create a truncated preview (first 150 characters) // Create a truncated preview (first 150 characters)
const previewContent = content.length > 150 const previewContent =
? content.substring(0, 150) + "..." content.length > 150 ? content.substring(0, 150) + "..." : content;
: content;
return ( return (
<div className="flex flex-col gap-2"> <div className="flex flex-col gap-2">
@ -223,17 +268,37 @@ const columns: ColumnDef<Document>[] = [
remarkPlugins={[remarkGfm]} remarkPlugins={[remarkGfm]}
components={{ components={{
// Define custom components for markdown elements // Define custom components for markdown elements
p: ({node, ...props}) => <p className="markdown-paragraph" {...props} />, p: ({ node, ...props }) => (
a: ({node, ...props}) => <a className="text-primary hover:underline" {...props} />, <p className="markdown-paragraph" {...props} />
ul: ({node, ...props}) => <ul className="list-disc pl-5" {...props} />, ),
ol: ({node, ...props}) => <ol className="list-decimal pl-5" {...props} />, a: ({ node, ...props }) => (
code: ({node, className, children, ...props}: any) => { <a className="text-primary hover:underline" {...props} />
const match = /language-(\w+)/.exec(className || ''); ),
ul: ({ node, ...props }) => (
<ul className="list-disc pl-5" {...props} />
),
ol: ({ node, ...props }) => (
<ol className="list-decimal pl-5" {...props} />
),
code: ({ node, className, children, ...props }: any) => {
const match = /language-(\w+)/.exec(className || "");
const isInline = !match; const isInline = !match;
return isInline return isInline ? (
? <code className="bg-muted px-1 py-0.5 rounded text-xs" {...props}>{children}</code> <code
: <code className="block bg-muted p-2 rounded text-xs" {...props}>{children}</code> className="bg-muted px-1 py-0.5 rounded text-xs"
} {...props}
>
{children}
</code>
) : (
<code
className="block bg-muted p-2 rounded text-xs"
{...props}
>
{children}
</code>
);
},
}} }}
> >
{previewContent} {previewContent}
@ -281,7 +346,8 @@ export default function DocumentsTable() {
const id = useId(); const id = useId();
const params = useParams(); const params = useParams();
const searchSpaceId = Number(params.search_space_id); const searchSpaceId = Number(params.search_space_id);
const { documents, loading, error, refreshDocuments, deleteDocument } = useDocuments(searchSpaceId); const { documents, loading, error, refreshDocuments, deleteDocument } =
useDocuments(searchSpaceId);
// console.log("Search Space ID:", searchSpaceId); // console.log("Search Space ID:", searchSpaceId);
// console.log("Documents loaded:", documents?.length); // console.log("Documents loaded:", documents?.length);
@ -323,7 +389,7 @@ export default function DocumentsTable() {
} }
// Create an array of promises for each delete operation // Create an array of promises for each delete operation
const deletePromises = selectedRows.map(row => { const deletePromises = selectedRows.map((row) => {
// console.log("Deleting row with ID:", row.original.id); // console.log("Deleting row with ID:", row.original.id);
return deleteDocument(row.original.id); return deleteDocument(row.original.id);
}); });
@ -334,10 +400,12 @@ export default function DocumentsTable() {
// console.log("Delete results:", results); // console.log("Delete results:", results);
// Check if all deletions were successful // Check if all deletions were successful
const allSuccessful = results.every(result => result === true); const allSuccessful = results.every((result) => result === true);
if (allSuccessful) { if (allSuccessful) {
toast.success(`Successfully deleted ${selectedRows.length} document(s)`); toast.success(
`Successfully deleted ${selectedRows.length} document(s)`,
);
} else { } else {
toast.error("Some documents could not be deleted"); toast.error("Some documents could not be deleted");
} }
@ -391,12 +459,16 @@ export default function DocumentsTable() {
}, [table.getColumn("document_type")?.getFacetedUniqueValues()]); }, [table.getColumn("document_type")?.getFacetedUniqueValues()]);
const selectedStatuses = useMemo(() => { const selectedStatuses = useMemo(() => {
const filterValue = table.getColumn("document_type")?.getFilterValue() as string[]; const filterValue = table
.getColumn("document_type")
?.getFilterValue() as string[];
return filterValue ?? []; return filterValue ?? [];
}, [table.getColumn("document_type")?.getFilterValue()]); }, [table.getColumn("document_type")?.getFilterValue()]);
const handleStatusChange = (checked: boolean, value: string) => { const handleStatusChange = (checked: boolean, value: string) => {
const filterValue = table.getColumn("document_type")?.getFilterValue() as string[]; const filterValue = table
.getColumn("document_type")
?.getFilterValue() as string[];
const newFilterValue = filterValue ? [...filterValue] : []; const newFilterValue = filterValue ? [...filterValue] : [];
if (checked) { if (checked) {
@ -408,14 +480,18 @@ export default function DocumentsTable() {
} }
} }
table.getColumn("document_type")?.setFilterValue(newFilterValue.length ? newFilterValue : undefined); table
.getColumn("document_type")
?.setFilterValue(newFilterValue.length ? newFilterValue : undefined);
}; };
return ( return (
<DocumentsContext.Provider value={{ <DocumentsContext.Provider
value={{
deleteDocument: deleteDocument || (() => Promise.resolve(false)), deleteDocument: deleteDocument || (() => Promise.resolve(false)),
refreshDocuments: refreshDocuments || (() => Promise.resolve()) refreshDocuments: refreshDocuments || (() => Promise.resolve()),
}}> }}
>
<motion.div <motion.div
initial={{ opacity: 0, y: 20 }} initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }} animate={{ opacity: 1, y: 0 }}
@ -431,7 +507,7 @@ export default function DocumentsTable() {
type: "spring", type: "spring",
stiffness: 300, stiffness: 300,
damping: 30, damping: 30,
delay: 0.1 delay: 0.1,
}} }}
> >
<div className="flex items-center gap-3"> <div className="flex items-center gap-3">
@ -449,8 +525,12 @@ export default function DocumentsTable() {
"peer min-w-60 ps-9", "peer min-w-60 ps-9",
Boolean(table.getColumn("title")?.getFilterValue()) && "pe-9", Boolean(table.getColumn("title")?.getFilterValue()) && "pe-9",
)} )}
value={(table.getColumn("title")?.getFilterValue() ?? "") as string} value={
onChange={(e) => table.getColumn("title")?.setFilterValue(e.target.value)} (table.getColumn("title")?.getFilterValue() ?? "") as string
}
onChange={(e) =>
table.getColumn("title")?.setFilterValue(e.target.value)
}
placeholder="Filter by title..." placeholder="Filter by title..."
type="text" type="text"
aria-label="Filter by title" aria-label="Filter by title"
@ -519,7 +599,9 @@ export default function DocumentsTable() {
variants={fadeInScale} variants={fadeInScale}
> >
<div className="space-y-3"> <div className="space-y-3">
<div className="text-xs font-medium text-muted-foreground">Filters</div> <div className="text-xs font-medium text-muted-foreground">
Filters
</div>
<div className="space-y-3"> <div className="space-y-3">
<AnimatePresence> <AnimatePresence>
{uniqueStatusValues.map((value, i) => ( {uniqueStatusValues.map((value, i) => (
@ -534,7 +616,9 @@ export default function DocumentsTable() {
<Checkbox <Checkbox
id={`${id}-${i}`} id={`${id}-${i}`}
checked={selectedStatuses.includes(value)} checked={selectedStatuses.includes(value)}
onCheckedChange={(checked: boolean) => handleStatusChange(checked, value)} onCheckedChange={(checked: boolean) =>
handleStatusChange(checked, value)
}
/> />
<Label <Label
htmlFor={`${id}-${i}`} htmlFor={`${id}-${i}`}
@ -589,7 +673,9 @@ export default function DocumentsTable() {
key={column.id} key={column.id}
className="capitalize" className="capitalize"
checked={column.getIsVisible()} checked={column.getIsVisible()}
onCheckedChange={(value) => column.toggleVisibility(!!value)} onCheckedChange={(value) =>
column.toggleVisibility(!!value)
}
onSelect={(event) => event.preventDefault()} onSelect={(event) => event.preventDefault()}
> >
{column.id} {column.id}
@ -624,20 +710,32 @@ export default function DocumentsTable() {
className="flex size-9 shrink-0 items-center justify-center rounded-full border border-border" className="flex size-9 shrink-0 items-center justify-center rounded-full border border-border"
aria-hidden="true" aria-hidden="true"
> >
<CircleAlert className="opacity-80" size={16} strokeWidth={2} /> <CircleAlert
className="opacity-80"
size={16}
strokeWidth={2}
/>
</div> </div>
<AlertDialogHeader> <AlertDialogHeader>
<AlertDialogTitle>Are you absolutely sure?</AlertDialogTitle> <AlertDialogTitle>
Are you absolutely sure?
</AlertDialogTitle>
<AlertDialogDescription> <AlertDialogDescription>
This action cannot be undone. This will permanently delete{" "} This action cannot be undone. This will permanently
{table.getSelectedRowModel().rows.length} selected{" "} delete {table.getSelectedRowModel().rows.length}{" "}
{table.getSelectedRowModel().rows.length === 1 ? "row" : "rows"}. selected{" "}
{table.getSelectedRowModel().rows.length === 1
? "row"
: "rows"}
.
</AlertDialogDescription> </AlertDialogDescription>
</AlertDialogHeader> </AlertDialogHeader>
</div> </div>
<AlertDialogFooter> <AlertDialogFooter>
<AlertDialogCancel>Cancel</AlertDialogCancel> <AlertDialogCancel>Cancel</AlertDialogCancel>
<AlertDialogAction onClick={handleDeleteRows}>Delete</AlertDialogAction> <AlertDialogAction onClick={handleDeleteRows}>
Delete
</AlertDialogAction>
</AlertDialogFooter> </AlertDialogFooter>
</AlertDialogContent> </AlertDialogContent>
</AlertDialog> </AlertDialog>
@ -659,21 +757,25 @@ export default function DocumentsTable() {
type: "spring", type: "spring",
stiffness: 300, stiffness: 300,
damping: 30, damping: 30,
delay: 0.2 delay: 0.2,
}} }}
> >
{loading ? ( {loading ? (
<div className="flex h-[400px] w-full items-center justify-center"> <div className="flex h-[400px] w-full items-center justify-center">
<div className="flex flex-col items-center gap-2"> <div className="flex flex-col items-center gap-2">
<div className="h-8 w-8 animate-spin rounded-full border-b-2 border-primary"></div> <div className="h-8 w-8 animate-spin rounded-full border-b-2 border-primary"></div>
<p className="text-sm text-muted-foreground">Loading documents...</p> <p className="text-sm text-muted-foreground">
Loading documents...
</p>
</div> </div>
</div> </div>
) : error ? ( ) : error ? (
<div className="flex h-[400px] w-full items-center justify-center"> <div className="flex h-[400px] w-full items-center justify-center">
<div className="flex flex-col items-center gap-2"> <div className="flex flex-col items-center gap-2">
<AlertCircle className="h-8 w-8 text-destructive" /> <AlertCircle className="h-8 w-8 text-destructive" />
<p className="text-sm text-destructive">Error loading documents</p> <p className="text-sm text-destructive">
Error loading documents
</p>
<Button <Button
variant="outline" variant="outline"
size="sm" size="sm"
@ -688,14 +790,19 @@ export default function DocumentsTable() {
<div className="flex h-[400px] w-full items-center justify-center"> <div className="flex h-[400px] w-full items-center justify-center">
<div className="flex flex-col items-center gap-2"> <div className="flex flex-col items-center gap-2">
<FileX className="h-8 w-8 text-muted-foreground" /> <FileX className="h-8 w-8 text-muted-foreground" />
<p className="text-sm text-muted-foreground">No documents found</p> <p className="text-sm text-muted-foreground">
No documents found
</p>
</div> </div>
</div> </div>
) : ( ) : (
<Table className="table-fixed w-full"> <Table className="table-fixed w-full">
<TableHeader> <TableHeader>
{table.getHeaderGroups().map((headerGroup) => ( {table.getHeaderGroups().map((headerGroup) => (
<TableRow key={headerGroup.id} className="hover:bg-transparent"> <TableRow
key={headerGroup.id}
className="hover:bg-transparent"
>
{headerGroup.headers.map((header) => { {headerGroup.headers.map((header) => {
return ( return (
<TableHead <TableHead
@ -720,9 +827,14 @@ export default function DocumentsTable() {
header.column.getToggleSortingHandler()?.(e); header.column.getToggleSortingHandler()?.(e);
} }
}} }}
tabIndex={header.column.getCanSort() ? 0 : undefined} tabIndex={
header.column.getCanSort() ? 0 : undefined
}
> >
{flexRender(header.column.columnDef.header, header.getContext())} {flexRender(
header.column.columnDef.header,
header.getContext(),
)}
{{ {{
asc: ( asc: (
<ChevronUp <ChevronUp
@ -743,7 +855,10 @@ export default function DocumentsTable() {
}[header.column.getIsSorted() as string] ?? null} }[header.column.getIsSorted() as string] ?? null}
</div> </div>
) : ( ) : (
flexRender(header.column.columnDef.header, header.getContext()) flexRender(
header.column.columnDef.header,
header.getContext(),
)
)} )}
</TableHead> </TableHead>
); );
@ -765,25 +880,34 @@ export default function DocumentsTable() {
type: "spring", type: "spring",
stiffness: 300, stiffness: 300,
damping: 30, damping: 30,
delay: index * 0.03 delay: index * 0.03,
} },
}} }}
exit={{ opacity: 0, y: -10 }} exit={{ opacity: 0, y: -10 }}
className={cn( className={cn(
"border-b transition-colors hover:bg-muted/50", "border-b transition-colors hover:bg-muted/50",
row.getIsSelected() ? "bg-muted/50" : "" row.getIsSelected() ? "bg-muted/50" : "",
)} )}
> >
{row.getVisibleCells().map((cell) => ( {row.getVisibleCells().map((cell) => (
<TableCell key={cell.id} className="px-4 py-3 last:py-3"> <TableCell
{flexRender(cell.column.columnDef.cell, cell.getContext())} key={cell.id}
className="px-4 py-3 last:py-3"
>
{flexRender(
cell.column.columnDef.cell,
cell.getContext(),
)}
</TableCell> </TableCell>
))} ))}
</motion.tr> </motion.tr>
)) ))
) : ( ) : (
<TableRow> <TableRow>
<TableCell colSpan={columns.length} className="h-24 text-center p-6"> <TableCell
colSpan={columns.length}
className="h-24 text-center p-6"
>
No documents found. No documents found.
</TableCell> </TableCell>
</TableRow> </TableRow>
@ -823,7 +947,7 @@ export default function DocumentsTable() {
animate={{ animate={{
opacity: 1, opacity: 1,
y: 0, y: 0,
transition: { delay: index * 0.05 } transition: { delay: index * 0.05 },
}} }}
> >
<SelectItem value={pageSize.toString()}> <SelectItem value={pageSize.toString()}>
@ -841,19 +965,29 @@ export default function DocumentsTable() {
animate={{ opacity: 1 }} animate={{ opacity: 1 }}
transition={{ delay: 0.2 }} transition={{ delay: 0.2 }}
> >
<p className="whitespace-nowrap text-sm text-muted-foreground" aria-live="polite"> <p
className="whitespace-nowrap text-sm text-muted-foreground"
aria-live="polite"
>
<span className="text-foreground"> <span className="text-foreground">
{table.getState().pagination.pageIndex * table.getState().pagination.pageSize + 1}- {table.getState().pagination.pageIndex *
table.getState().pagination.pageSize +
1}
-
{Math.min( {Math.min(
Math.max( Math.max(
table.getState().pagination.pageIndex * table.getState().pagination.pageSize + table.getState().pagination.pageIndex *
table.getState().pagination.pageSize +
table.getState().pagination.pageSize, table.getState().pagination.pageSize,
0, 0,
), ),
table.getRowCount(), table.getRowCount(),
)} )}
</span>{" "} </span>{" "}
of <span className="text-foreground">{table.getRowCount().toString()}</span> of{" "}
<span className="text-foreground">
{table.getRowCount().toString()}
</span>
</p> </p>
</motion.div> </motion.div>
@ -876,7 +1010,11 @@ export default function DocumentsTable() {
disabled={!table.getCanPreviousPage()} disabled={!table.getCanPreviousPage()}
aria-label="Go to first page" aria-label="Go to first page"
> >
<ChevronFirst size={16} strokeWidth={2} aria-hidden="true" /> <ChevronFirst
size={16}
strokeWidth={2}
aria-hidden="true"
/>
</Button> </Button>
</motion.div> </motion.div>
</PaginationItem> </PaginationItem>
@ -895,7 +1033,11 @@ export default function DocumentsTable() {
disabled={!table.getCanPreviousPage()} disabled={!table.getCanPreviousPage()}
aria-label="Go to previous page" aria-label="Go to previous page"
> >
<ChevronLeft size={16} strokeWidth={2} aria-hidden="true" /> <ChevronLeft
size={16}
strokeWidth={2}
aria-hidden="true"
/>
</Button> </Button>
</motion.div> </motion.div>
</PaginationItem> </PaginationItem>
@ -914,7 +1056,11 @@ export default function DocumentsTable() {
disabled={!table.getCanNextPage()} disabled={!table.getCanNextPage()}
aria-label="Go to next page" aria-label="Go to next page"
> >
<ChevronRight size={16} strokeWidth={2} aria-hidden="true" /> <ChevronRight
size={16}
strokeWidth={2}
aria-hidden="true"
/>
</Button> </Button>
</motion.div> </motion.div>
</PaginationItem> </PaginationItem>
@ -933,7 +1079,11 @@ export default function DocumentsTable() {
disabled={!table.getCanNextPage()} disabled={!table.getCanNextPage()}
aria-label="Go to last page" aria-label="Go to last page"
> >
<ChevronLast size={16} strokeWidth={2} aria-hidden="true" /> <ChevronLast
size={16}
strokeWidth={2}
aria-hidden="true"
/>
</Button> </Button>
</motion.div> </motion.div>
</PaginationItem> </PaginationItem>
@ -1003,7 +1153,8 @@ function RowActions({ row }: { row: Row<Document> }) {
<AlertDialogHeader> <AlertDialogHeader>
<AlertDialogTitle>Are you sure?</AlertDialogTitle> <AlertDialogTitle>Are you sure?</AlertDialogTitle>
<AlertDialogDescription> <AlertDialogDescription>
This action cannot be undone. This will permanently delete the document. This action cannot be undone. This will permanently delete the
document.
</AlertDialogDescription> </AlertDialogDescription>
</AlertDialogHeader> </AlertDialogHeader>
<AlertDialogFooter> <AlertDialogFooter>
@ -1027,4 +1178,3 @@ function RowActions({ row }: { row: Row<Document> }) {
} }
export { DocumentsTable }; export { DocumentsTable };

View file

@ -103,6 +103,7 @@ type DocumentType =
| "YOUTUBE_VIDEO" | "YOUTUBE_VIDEO"
| "GITHUB_CONNECTOR" | "GITHUB_CONNECTOR"
| "LINEAR_CONNECTOR" | "LINEAR_CONNECTOR"
| "JIRA_CONNECTOR"
| "DISCORD_CONNECTOR"; | "DISCORD_CONNECTOR";
/** /**
@ -982,12 +983,14 @@ const ChatPage = () => {
if (!message.annotations) return null; if (!message.annotations) return null;
// Get all TERMINAL_INFO annotations content // Get all TERMINAL_INFO annotations content
const terminalInfoAnnotations = (message.annotations as any[]).map(item => { const terminalInfoAnnotations = (message.annotations as any[])
if(item.type === "TERMINAL_INFO") { .map((item) => {
return item.content.map((a: any) => a.text) if (item.type === "TERMINAL_INFO") {
return item.content.map((a: any) => a.text);
} }
}).flat().filter(Boolean) })
.flat()
.filter(Boolean);
// Render the content of the latest TERMINAL_INFO annotation // Render the content of the latest TERMINAL_INFO annotation
return terminalInfoAnnotations.map((item: any, idx: number) => ( return terminalInfoAnnotations.map((item: any, idx: number) => (
@ -1328,29 +1331,41 @@ const ChatPage = () => {
} }
// Fallback to the message content if no ANSWER annotation is available // Fallback to the message content if no ANSWER annotation is available
return <MarkdownViewer return (
<MarkdownViewer
content={message.content} content={message.content}
getCitationSource={(id) => getCitationSource(id, index)} getCitationSource={(id) =>
getCitationSource(id, index)
}
type="ai" type="ai"
/>; />
);
})()} })()}
</div> </div>
} }
</div> </div>
{/* Further Questions Section */} {/* Further Questions Section */}
{message.annotations && (() => { {message.annotations &&
(() => {
// Get all FURTHER_QUESTIONS annotations // Get all FURTHER_QUESTIONS annotations
const furtherQuestionsAnnotations = (message.annotations as any[]) const furtherQuestionsAnnotations = (
.filter(a => a.type === 'FURTHER_QUESTIONS'); message.annotations as any[]
).filter((a) => a.type === "FURTHER_QUESTIONS");
// Get the latest FURTHER_QUESTIONS annotation // Get the latest FURTHER_QUESTIONS annotation
const latestFurtherQuestions = furtherQuestionsAnnotations.length > 0 const latestFurtherQuestions =
? furtherQuestionsAnnotations[furtherQuestionsAnnotations.length - 1] furtherQuestionsAnnotations.length > 0
? furtherQuestionsAnnotations[
furtherQuestionsAnnotations.length - 1
]
: null; : null;
// Only render if we have questions // Only render if we have questions
if (!latestFurtherQuestions?.content || latestFurtherQuestions.content.length === 0) { if (
!latestFurtherQuestions?.content ||
latestFurtherQuestions.content.length === 0
) {
return null; return null;
} }
@ -1364,13 +1379,24 @@ const ChatPage = () => {
<div className="bg-muted/50 border-b border-border/40 px-4 py-2.5"> <div className="bg-muted/50 border-b border-border/40 px-4 py-2.5">
<div className="flex items-center justify-between"> <div className="flex items-center justify-between">
<h3 className="text-sm font-medium text-muted-foreground flex items-center gap-2"> <h3 className="text-sm font-medium text-muted-foreground flex items-center gap-2">
<svg className="h-4 w-4" fill="none" stroke="currentColor" viewBox="0 0 24 24"> <svg
<path strokeLinecap="round" strokeLinejoin="round" strokeWidth={2} d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z" /> className="h-4 w-4"
fill="none"
stroke="currentColor"
viewBox="0 0 24 24"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M8.228 9c.549-1.165 2.03-2 3.772-2 2.21 0 4 1.343 4 3 0 1.4-1.278 2.575-3.006 2.907-.542.104-.994.54-.994 1.093m0 3h.01M21 12a9 9 0 11-18 0 9 9 0 0118 0z"
/>
</svg> </svg>
Follow-up Questions Follow-up Questions
</h3> </h3>
<span className="text-xs text-muted-foreground bg-background/60 px-2 py-1 rounded-full border border-border/40"> <span className="text-xs text-muted-foreground bg-background/60 px-2 py-1 rounded-full border border-border/40">
{furtherQuestions.length} suggestion{furtherQuestions.length !== 1 ? 's' : ''} {furtherQuestions.length} suggestion
{furtherQuestions.length !== 1 ? "s" : ""}
</span> </span>
</div> </div>
</div> </div>
@ -1387,7 +1413,8 @@ const ChatPage = () => {
{/* Scrollable container */} {/* Scrollable container */}
<div className="overflow-x-auto scrollbar-hover"> <div className="overflow-x-auto scrollbar-hover">
<div className="flex gap-2 py-1 px-6"> <div className="flex gap-2 py-1 px-6">
{furtherQuestions.map((question: any, qIndex: number) => ( {furtherQuestions.map(
(question: any, qIndex: number) => (
<Button <Button
key={question.id || qIndex} key={question.id || qIndex}
variant="outline" variant="outline"
@ -1396,18 +1423,26 @@ const ChatPage = () => {
onClick={() => { onClick={() => {
// Set the input value and submit // Set the input value and submit
handleInputChange({ handleInputChange({
target: { value: question.question } target: {
value: question.question,
},
} as React.ChangeEvent<HTMLInputElement>); } as React.ChangeEvent<HTMLInputElement>);
// Small delay to ensure input is updated, then submit // Small delay to ensure input is updated, then submit
setTimeout(() => { setTimeout(() => {
const form = document.querySelector('form') as HTMLFormElement; const form =
if (form && status === 'ready') { document.querySelector(
"form",
) as HTMLFormElement;
if (
form &&
status === "ready"
) {
form.requestSubmit(); form.requestSubmit();
} }
}, 50); }, 50);
}} }}
disabled={status !== 'ready'} disabled={status !== "ready"}
> >
<span className="text-foreground group-hover:text-primary transition-colors"> <span className="text-foreground group-hover:text-primary transition-colors">
{question.question} {question.question}
@ -1427,7 +1462,8 @@ const ChatPage = () => {
/> />
</svg> </svg>
</Button> </Button>
))} ),
)}
</div> </div>
</div> </div>
</div> </div>

View file

@ -1,4 +1,4 @@
import React from 'react'; import React from "react";
import { import {
ChevronDown, ChevronDown,
Plus, Plus,
@ -12,77 +12,98 @@ import {
Webhook, Webhook,
MessageCircle, MessageCircle,
FileText, FileText,
} from 'lucide-react'; } from "lucide-react";
import { IconBrandNotion, IconBrandSlack, IconBrandYoutube, IconBrandGithub, IconLayoutKanban, IconLinkPlus, IconBrandDiscord } from "@tabler/icons-react"; import {
import { Button } from '@/components/ui/button'; IconBrandNotion,
import { Connector, ResearchMode } from './types'; IconBrandSlack,
IconBrandYoutube,
IconBrandGithub,
IconLayoutKanban,
IconLinkPlus,
IconBrandDiscord,
IconTicket,
} from "@tabler/icons-react";
import { Button } from "@/components/ui/button";
import { Connector, ResearchMode } from "./types";
// Helper function to get connector icon // Helper function to get connector icon
export const getConnectorIcon = (connectorType: string) => { export const getConnectorIcon = (connectorType: string) => {
const iconProps = { className: "h-4 w-4" }; const iconProps = { className: "h-4 w-4" };
switch(connectorType) { switch (connectorType) {
case 'LINKUP_API': case "LINKUP_API":
return <IconLinkPlus {...iconProps} />; return <IconLinkPlus {...iconProps} />;
case 'LINEAR_CONNECTOR': case "LINEAR_CONNECTOR":
return <IconLayoutKanban {...iconProps} />; return <IconLayoutKanban {...iconProps} />;
case 'GITHUB_CONNECTOR': case "GITHUB_CONNECTOR":
return <IconBrandGithub {...iconProps} />; return <IconBrandGithub {...iconProps} />;
case 'YOUTUBE_VIDEO': case "YOUTUBE_VIDEO":
return <IconBrandYoutube {...iconProps} />; return <IconBrandYoutube {...iconProps} />;
case 'CRAWLED_URL': case "CRAWLED_URL":
return <Globe {...iconProps} />; return <Globe {...iconProps} />;
case 'FILE': case "FILE":
return <File {...iconProps} />; return <File {...iconProps} />;
case 'EXTENSION': case "EXTENSION":
return <Webhook {...iconProps} />; return <Webhook {...iconProps} />;
case 'SERPER_API': case "SERPER_API":
case 'TAVILY_API': case "TAVILY_API":
return <Link {...iconProps} />; return <Link {...iconProps} />;
case 'SLACK_CONNECTOR': case "SLACK_CONNECTOR":
return <IconBrandSlack {...iconProps} />; return <IconBrandSlack {...iconProps} />;
case 'NOTION_CONNECTOR': case "NOTION_CONNECTOR":
return <IconBrandNotion {...iconProps} />; return <IconBrandNotion {...iconProps} />;
case 'DISCORD_CONNECTOR': case "DISCORD_CONNECTOR":
return <IconBrandDiscord {...iconProps} />; return <IconBrandDiscord {...iconProps} />;
case 'DEEP': case "JIRA_CONNECTOR":
return <IconTicket {...iconProps} />;
case "DEEP":
return <Sparkles {...iconProps} />; return <Sparkles {...iconProps} />;
case 'DEEPER': case "DEEPER":
return <Microscope {...iconProps} />; return <Microscope {...iconProps} />;
case 'DEEPEST': case "DEEPEST":
return <Telescope {...iconProps} />; return <Telescope {...iconProps} />;
default: default:
return <Search {...iconProps} />; return <Search {...iconProps} />;
} }
}; };
export const researcherOptions: { value: ResearchMode; label: string; icon: React.ReactNode }[] = [ export const researcherOptions: {
value: ResearchMode;
label: string;
icon: React.ReactNode;
}[] = [
{ {
value: 'QNA', value: "QNA",
label: 'Q/A', label: "Q/A",
icon: getConnectorIcon('GENERAL') icon: getConnectorIcon("GENERAL"),
}, },
{ {
value: 'REPORT_GENERAL', value: "REPORT_GENERAL",
label: 'General', label: "General",
icon: getConnectorIcon('GENERAL') icon: getConnectorIcon("GENERAL"),
}, },
{ {
value: 'REPORT_DEEP', value: "REPORT_DEEP",
label: 'Deep', label: "Deep",
icon: getConnectorIcon('DEEP') icon: getConnectorIcon("DEEP"),
}, },
{ {
value: 'REPORT_DEEPER', value: "REPORT_DEEPER",
label: 'Deeper', label: "Deeper",
icon: getConnectorIcon('DEEPER') icon: getConnectorIcon("DEEPER"),
}, },
] ];
/** /**
* Displays a small icon for a connector type * Displays a small icon for a connector type
*/ */
export const ConnectorIcon = ({ type, index = 0 }: { type: string; index?: number }) => ( export const ConnectorIcon = ({
type,
index = 0,
}: {
type: string;
index?: number;
}) => (
<div <div
className="w-4 h-4 rounded-full flex items-center justify-center bg-muted border border-background" className="w-4 h-4 rounded-full flex items-center justify-center bg-muted border border-background"
style={{ zIndex: 10 - index }} style={{ zIndex: 10 - index }}
@ -109,15 +130,21 @@ type ConnectorButtonProps = {
/** /**
* Button that displays selected connectors and opens connector selection dialog * Button that displays selected connectors and opens connector selection dialog
*/ */
export const ConnectorButton = ({ selectedConnectors, onClick, connectorSources }: ConnectorButtonProps) => { export const ConnectorButton = ({
selectedConnectors,
onClick,
connectorSources,
}: ConnectorButtonProps) => {
const totalConnectors = connectorSources.length; const totalConnectors = connectorSources.length;
const selectedCount = selectedConnectors.length; const selectedCount = selectedConnectors.length;
const progressPercentage = (selectedCount / totalConnectors) * 100; const progressPercentage = (selectedCount / totalConnectors) * 100;
// Get the name of a single selected connector // Get the name of a single selected connector
const getSingleConnectorName = () => { const getSingleConnectorName = () => {
const connector = connectorSources.find(c => c.type === selectedConnectors[0]); const connector = connectorSources.find(
return connector?.name || ''; (c) => c.type === selectedConnectors[0],
);
return connector?.name || "";
}; };
// Get display text based on selection count // Get display text based on selection count
@ -158,14 +185,18 @@ export const ConnectorButton = ({ selectedConnectors, onClick, connectorSources
variant="outline" variant="outline"
className="h-8 px-2 text-xs font-medium rounded-md border-border relative overflow-hidden group" className="h-8 px-2 text-xs font-medium rounded-md border-border relative overflow-hidden group"
onClick={onClick} onClick={onClick}
aria-label={selectedCount === 0 ? "Select Connectors" : `${selectedCount} connectors selected`} aria-label={
selectedCount === 0
? "Select Connectors"
: `${selectedCount} connectors selected`
}
> >
{/* Progress indicator */} {/* Progress indicator */}
<div <div
className="absolute bottom-0 left-0 h-1 bg-primary" className="absolute bottom-0 left-0 h-1 bg-primary"
style={{ style={{
width: `${progressPercentage}%`, width: `${progressPercentage}%`,
transition: 'width 0.3s ease' transition: "width 0.3s ease",
}} }}
/> />
@ -183,29 +214,32 @@ type ResearchModeControlProps = {
onChange: (value: ResearchMode) => void; onChange: (value: ResearchMode) => void;
}; };
export const ResearchModeControl = ({ value, onChange }: ResearchModeControlProps) => { export const ResearchModeControl = ({
value,
onChange,
}: ResearchModeControlProps) => {
// Determine if we're in Q/A mode or Report mode // Determine if we're in Q/A mode or Report mode
const isQnaMode = value === 'QNA'; const isQnaMode = value === "QNA";
const isReportMode = value.startsWith('REPORT_'); const isReportMode = value.startsWith("REPORT_");
// Get the current report sub-mode // Get the current report sub-mode
const getCurrentReportMode = () => { const getCurrentReportMode = () => {
if (!isReportMode) return 'GENERAL'; if (!isReportMode) return "GENERAL";
return value.replace('REPORT_', '') as 'GENERAL' | 'DEEP' | 'DEEPER'; return value.replace("REPORT_", "") as "GENERAL" | "DEEP" | "DEEPER";
}; };
const reportSubOptions = [ const reportSubOptions = [
{ value: 'GENERAL', label: 'General', icon: getConnectorIcon('GENERAL') }, { value: "GENERAL", label: "General", icon: getConnectorIcon("GENERAL") },
{ value: 'DEEP', label: 'Deep', icon: getConnectorIcon('DEEP') }, { value: "DEEP", label: "Deep", icon: getConnectorIcon("DEEP") },
{ value: 'DEEPER', label: 'Deeper', icon: getConnectorIcon('DEEPER') }, { value: "DEEPER", label: "Deeper", icon: getConnectorIcon("DEEPER") },
]; ];
const handleModeToggle = (mode: 'QNA' | 'REPORT') => { const handleModeToggle = (mode: "QNA" | "REPORT") => {
if (mode === 'QNA') { if (mode === "QNA") {
onChange('QNA'); onChange("QNA");
} else { } else {
// Default to GENERAL for Report mode // Default to GENERAL for Report mode
onChange('REPORT_GENERAL'); onChange("REPORT_GENERAL");
} }
}; };
@ -220,10 +254,10 @@ export const ResearchModeControl = ({ value, onChange }: ResearchModeControlProp
<button <button
className={`flex h-full items-center gap-1 px-3 text-xs font-medium transition-colors whitespace-nowrap ${ className={`flex h-full items-center gap-1 px-3 text-xs font-medium transition-colors whitespace-nowrap ${
isQnaMode isQnaMode
? 'bg-primary text-primary-foreground' ? "bg-primary text-primary-foreground"
: 'hover:bg-muted text-muted-foreground hover:text-foreground' : "hover:bg-muted text-muted-foreground hover:text-foreground"
}`} }`}
onClick={() => handleModeToggle('QNA')} onClick={() => handleModeToggle("QNA")}
aria-pressed={isQnaMode} aria-pressed={isQnaMode}
> >
<MessageCircle className="h-3 w-3" /> <MessageCircle className="h-3 w-3" />
@ -232,10 +266,10 @@ export const ResearchModeControl = ({ value, onChange }: ResearchModeControlProp
<button <button
className={`flex h-full items-center gap-1 px-3 text-xs font-medium transition-colors whitespace-nowrap ${ className={`flex h-full items-center gap-1 px-3 text-xs font-medium transition-colors whitespace-nowrap ${
isReportMode isReportMode
? 'bg-primary text-primary-foreground' ? "bg-primary text-primary-foreground"
: 'hover:bg-muted text-muted-foreground hover:text-foreground' : "hover:bg-muted text-muted-foreground hover:text-foreground"
}`} }`}
onClick={() => handleModeToggle('REPORT')} onClick={() => handleModeToggle("REPORT")}
aria-pressed={isReportMode} aria-pressed={isReportMode}
> >
<FileText className="h-3 w-3" /> <FileText className="h-3 w-3" />
@ -251,8 +285,8 @@ export const ResearchModeControl = ({ value, onChange }: ResearchModeControlProp
key={option.value} key={option.value}
className={`flex h-full items-center gap-1 px-2 text-xs font-medium transition-colors whitespace-nowrap ${ className={`flex h-full items-center gap-1 px-2 text-xs font-medium transition-colors whitespace-nowrap ${
getCurrentReportMode() === option.value getCurrentReportMode() === option.value
? 'bg-primary text-primary-foreground' ? "bg-primary text-primary-foreground"
: 'hover:bg-muted text-muted-foreground hover:text-foreground' : "hover:bg-muted text-muted-foreground hover:text-foreground"
}`} }`}
onClick={() => handleReportSubModeChange(option.value)} onClick={() => handleReportSubModeChange(option.value)}
aria-pressed={getCurrentReportMode() === option.value} aria-pressed={getCurrentReportMode() === option.value}

View file

@ -1,14 +1,15 @@
// Helper function to get connector type display name // Helper function to get connector type display name
export const getConnectorTypeDisplay = (type: string): string => { export const getConnectorTypeDisplay = (type: string): string => {
const typeMap: Record<string, string> = { const typeMap: Record<string, string> = {
"SERPER_API": "Serper API", SERPER_API: "Serper API",
"TAVILY_API": "Tavily API", TAVILY_API: "Tavily API",
"SLACK_CONNECTOR": "Slack", SLACK_CONNECTOR: "Slack",
"NOTION_CONNECTOR": "Notion", NOTION_CONNECTOR: "Notion",
"GITHUB_CONNECTOR": "GitHub", GITHUB_CONNECTOR: "GitHub",
"LINEAR_CONNECTOR": "Linear", LINEAR_CONNECTOR: "Linear",
"DISCORD_CONNECTOR": "Discord", JIRA_CONNECTOR: "Jira",
"LINKUP_API": "Linkup", DISCORD_CONNECTOR: "Discord",
LINKUP_API: "Linkup",
}; };
return typeMap[type] || type; return typeMap[type] || type;
}; };