Merge branch 'MODSetter:main' into main

This commit is contained in:
Anshul Sharma 2025-06-03 11:37:09 +05:30 committed by GitHub
commit 5315406e36
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
23 changed files with 2963 additions and 1684 deletions

View file

@ -5,7 +5,7 @@
# SurfSense
While tools like NotebookLM and Perplexity are impressive and highly effective for conducting research on any topic/query, SurfSense elevates this capability by integrating with your personal knowledge base. It is a highly customizable AI research agent, connected to external sources such as search engines (Tavily, LinkUp), Slack, Linear, Notion, YouTube, GitHub and more to come.
While tools like NotebookLM and Perplexity are impressive and highly effective for conducting research on any topic/query, SurfSense elevates this capability by integrating with your personal knowledge base. It is a highly customizable AI research agent, connected to external sources such as search engines (Tavily, LinkUp), Slack, Linear, Notion, YouTube, GitHub, Discord and more to come.
<div align="center">
<a href="https://trendshift.io/repositories/13606" target="_blank"><img src="https://trendshift.io/api/badge/repositories/13606" alt="MODSetter%2FSurfSense | Trendshift" style="width: 250px; height: 55px;" width="250" height="55"/></a>
@ -62,6 +62,7 @@ Open source and easy to deploy locally.
- Notion
- Youtube Videos
- GitHub
- Discord
- and more to come.....
## 📄 **Supported File Extensions**
@ -106,7 +107,12 @@ Open source and easy to deploy locally.
Join the [SurfSense Discord](https://discord.gg/ejRNvftDp9) and help shape the future of SurfSense!
## 🚀 Roadmap
Stay up to date with our development progress and upcoming features!
Check out our public roadmap and contribute your ideas or feedback:
**View the Roadmap:** [SurfSense Roadmap on GitHub Projects](https://github.com/users/MODSetter/projects/2)
## How to get started?
@ -150,7 +156,7 @@ Before installation, make sure to complete the [prerequisite setup steps](https:
**Agent Chat**
![chat](https://github.com/user-attachments/assets/bb352d52-1c6d-4020-926b-722d0b98b491)
![git_chat](https://github.com/user-attachments/assets/bb352d52-1c6d-4020-926b-722d0b98b491)
**Browser Extension**

View file

@ -0,0 +1,112 @@
"""Add DISCORD_CONNECTOR to SearchSourceConnectorType and DocumentType enums
Revision ID: 9
Revises: 8
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "9"
down_revision: Union[str, None] = "8"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
# Define the ENUM type name and the new value
CONNECTOR_ENUM = "searchsourceconnectortype"
CONNECTOR_NEW_VALUE = "DISCORD_CONNECTOR"
DOCUMENT_ENUM = "documenttype"
DOCUMENT_NEW_VALUE = "DISCORD_CONNECTOR"
def upgrade() -> None:
"""Upgrade schema - add DISCORD_CONNECTOR to connector and document enum."""
# Add DISCORD_CONNECTOR to searchsourceconnectortype
op.execute(f"ALTER TYPE {CONNECTOR_ENUM} ADD VALUE '{CONNECTOR_NEW_VALUE}'")
# Add DISCORD_CONNECTOR to documenttype
op.execute(f"ALTER TYPE {DOCUMENT_ENUM} ADD VALUE '{DOCUMENT_NEW_VALUE}'")
def downgrade() -> None:
"""Downgrade schema - remove DISCORD_CONNECTOR from connector and document enum."""
# Old enum name
old_connector_enum_name = f"{CONNECTOR_ENUM}_old"
old_document_enum_name = f"{DOCUMENT_ENUM}_old"
old_connector_values = (
"SERPER_API",
"TAVILY_API",
"LINKUP_API",
"SLACK_CONNECTOR",
"NOTION_CONNECTOR",
"GITHUB_CONNECTOR",
"LINEAR_CONNECTOR",
)
old_document_values = (
"EXTENSION",
"CRAWLED_URL",
"FILE",
"SLACK_CONNECTOR",
"NOTION_CONNECTOR",
"YOUTUBE_VIDEO",
"GITHUB_CONNECTOR",
"LINEAR_CONNECTOR",
)
old_connector_values_sql = ", ".join([f"'{v}'" for v in old_connector_values])
old_document_values_sql = ", ".join([f"'{v}'" for v in old_document_values])
# Table and column names (adjust if different)
connector_table_name = "search_source_connectors"
connector_column_name = "connector_type"
document_table_name = "documents"
document_column_name = "document_type"
# Connector Enum Downgrade Steps
# 1. Rename the current connector enum type
op.execute(f"ALTER TYPE {CONNECTOR_ENUM} RENAME TO {old_connector_enum_name}")
# 2. Create the new connector enum type with the old values
op.execute(f"CREATE TYPE {CONNECTOR_ENUM} AS ENUM({old_connector_values_sql})")
# 3. Update the connector table:
op.execute(
f"ALTER TABLE {connector_table_name} "
f"ALTER COLUMN {connector_column_name} "
f"TYPE {CONNECTOR_ENUM} "
f"USING {connector_column_name}::text::{CONNECTOR_ENUM}"
)
# 4. Drop the old connector enum type
op.execute(f"DROP TYPE {old_connector_enum_name}")
# Document Enum Downgrade Steps
# 1. Rename the current document enum type
op.execute(f"ALTER TYPE {DOCUMENT_ENUM} RENAME TO {old_document_enum_name}")
# 2. Create the new document enum type with the old values
op.execute(f"CREATE TYPE {DOCUMENT_ENUM} AS ENUM({old_document_values_sql})")
# 3. Delete rows with the new value from the documents table
op.execute(
f"DELETE FROM {document_table_name} WHERE {document_column_name}::text = '{DOCUMENT_NEW_VALUE}'"
)
# 4. Alter the document table to use the new enum type (casting old values)
op.execute(
f"ALTER TABLE {document_table_name} "
f"ALTER COLUMN {document_column_name} "
f"TYPE {DOCUMENT_ENUM} "
f"USING {document_column_name}::text::{DOCUMENT_ENUM}"
)
# 5. Drop the old enum types
op.execute(f"DROP TYPE {old_document_enum_name}")
# ### end Alembic commands ###

View file

@ -401,6 +401,23 @@ async def fetch_relevant_documents(
streaming_service.only_update_terminal(f"🔗 Found {len(linkup_chunks)} Linkup results related to your query")
writer({"yeild_value": streaming_service._format_annotations()})
elif connector == "DISCORD_CONNECTOR":
source_object, discord_chunks = await connector_service.search_discord(
user_query=reformulated_query,
user_id=user_id,
search_space_id=search_space_id,
top_k=top_k,
search_mode=search_mode
)
# Add to sources and raw documents
if source_object:
all_sources.append(source_object)
all_raw_documents.extend(discord_chunks)
# Stream found document count
if streaming_service and writer:
streaming_service.only_update_terminal(f"🗨️ Found {len(discord_chunks)} Discord messages related to your query")
writer({"yeild_value": streaming_service._format_annotations()})
except Exception as e:
error_message = f"Error searching connector {connector}: {str(e)}"

View file

@ -15,6 +15,7 @@ You are SurfSense, an advanced AI research assistant that synthesizes informatio
- YOUTUBE_VIDEO: "YouTube video transcripts and metadata" (personally saved videos)
- GITHUB_CONNECTOR: "GitHub repository content and issues" (personal repositories and interactions)
- LINEAR_CONNECTOR: "Linear project issues and discussions" (personal project management)
- DISCORD_CONNECTOR: "Discord server messages and channels" (personal community interactions)
- TAVILY_API: "Tavily search API results" (personalized search results)
- LINKUP_API: "Linkup search API results" (personalized search results)
</knowledge_sources>

View file

@ -0,0 +1,310 @@
"""
Discord Connector
A module for interacting with Discord's HTTP API to retrieve guilds, channels, and message history.
Requires a Discord bot token.
"""
import logging
import discord
from discord.ext import commands
import datetime
import asyncio
logger = logging.getLogger(__name__)
class DiscordConnector(commands.Bot):
"""Class for retrieving guild, channel, and message history from Discord."""
def __init__(self, token: str = None):
"""
Initialize the DiscordConnector with a bot token.
Args:
token (str): The Discord bot token.
"""
intents = discord.Intents.default()
intents.guilds = True # Required to fetch guilds and channels
intents.messages = True # Required to fetch messages
intents.message_content = True # Required to read message content
intents.members = True # Required to fetch member information
super().__init__(command_prefix="!", intents=intents) # command_prefix is required but not strictly used here
self.token = token
self._bot_task = None # Holds the async bot task
self._is_running = False # Flag to track if the bot is running
# Event to confirm bot is ready
@self.event
async def on_ready():
logger.info(f"Logged in as {self.user} (ID: {self.user.id})")
self._is_running = True
@self.event
async def on_connect():
logger.debug("Bot connected to Discord gateway.")
@self.event
async def on_disconnect():
logger.debug("Bot disconnected from Discord gateway.")
self._is_running = False # Reset flag on disconnect
@self.event
async def on_resumed():
logger.debug("Bot resumed connection to Discord gateway.")
async def start_bot(self):
"""Starts the bot to connect to Discord."""
logger.info("Starting Discord bot...")
if not self.token:
raise ValueError("Discord bot token not set. Call set_token(token) first.")
try:
if self._is_running:
logger.warning("Bot is already running. Use close_bot() to stop it before starting again.")
return
await self.start(self.token)
logger.info("Discord bot started successfully.")
except discord.LoginFailure:
logger.error("Failed to log in: Invalid token was provided. Please check your bot token.")
self._is_running = False
raise
except discord.PrivilegedIntentsRequired as e:
logger.error(f"Privileged Intents Required: {e}. Make sure all required intents are enabled in your bot's application page.")
self._is_running = False
raise
except discord.ConnectionClosed as e:
logger.error(f"Discord connection closed unexpectedly: {e}")
self._is_running = False
raise
except Exception as e:
logger.error(f"An unexpected error occurred while starting the bot: {e}")
self._is_running = False
raise
async def close_bot(self):
"""Closes the bot's connection to Discord."""
if self._is_running:
logger.info("Closing Discord bot connection...")
await self.close()
logger.info("Discord bot connection closed.")
self._is_running = False
else:
logger.info("Bot is not running or already disconnected.")
def set_token(self, token: str) -> None:
"""
Set the discord bot token.
Args:
token (str): The Discord bot token.
"""
logger.info("Setting Discord bot token.")
self.token = token
logger.info("Token set successfully. You can now start the bot with start_bot().")
async def _wait_until_ready(self):
"""Helper to wait until the bot is connected and ready."""
logger.info("Waiting for the bot to be ready...")
# Give the event loop a chance to switch to the bot's startup task.
# This allows self.start() to begin initializing the client.
# Terrible solution, but necessary to avoid blocking the event loop.
await asyncio.sleep(1) # Yield control to the event loop
try:
await asyncio.wait_for(self.wait_until_ready(), timeout=60.0)
logger.info("Bot is ready.")
except asyncio.TimeoutError:
logger.error(f"Bot did not become ready within 60 seconds. Connection may have failed.")
raise
except Exception as e:
logger.error(f"An unexpected error occurred while waiting for the bot to be ready: {e}")
raise
async def get_guilds(self) -> list[dict]:
"""
Fetch all guilds (servers) the bot is in.
Returns:
list[dict]: A list of guilds with their ID, name, and member count.
Each guild is represented as a dictionary.
Raises:
ValueError: If the token is not set.
"""
await self._wait_until_ready()
logger.info("Fetching guilds...")
guilds_data = []
for guild in self.guilds:
member_count = guild.member_count if guild.member_count is not None else "N/A"
guilds_data.append(
{
"id": str(guild.id),
"name": guild.name,
"member_count": member_count,
}
)
logger.info(f"Fetched {len(guilds_data)} guilds.")
return guilds_data
async def get_text_channels(self, guild_id: str) -> list[dict]:
"""
Fetch all text channels in a guild.
Args:
guild_id (str): The ID of the guild to fetch channels from.
Returns:
list[dict]: A list of text channels with their ID, name, and type.
Each channel is represented as a dictionary.
Raises:
discord.NotFound: If the guild is not found.
"""
await self._wait_until_ready()
logger.info(f"Fetching text channels for guild ID: {guild_id}")
guild = self.get_guild(int(guild_id))
if not guild:
logger.warning(f"Guild with ID {guild_id} not found.")
raise discord.NotFound(f"Guild with ID {guild_id} not found.")
channels_data = []
for channel in guild.channels:
if isinstance(channel, discord.TextChannel):
channels_data.append(
{"id": str(channel.id), "name": channel.name, "type": "text"}
)
logger.info(f"Fetched {len(channels_data)} text channels from guild {guild_id}.")
return channels_data
async def get_channel_history(
self,
channel_id: str,
start_date: str = None,
end_date: str = None,
) -> list[dict]:
"""
Fetch message history from a text channel.
Args:
channel_id (str): The ID of the channel to fetch messages from.
start_date (str): Optional start date in ISO format (YYYY-MM-DD).
end_date (str): Optional end date in ISO format (YYYY-MM-DD).
Returns:
list[dict]: A list of messages with their ID, author ID, author name,
content, and creation timestamp.
Each message is represented as a dictionary.
Raises:
discord.NotFound: If the channel is not found.
discord.Forbidden: If the bot does not have permissions to read history in the channel.
"""
await self._wait_until_ready()
logger.info(f"Fetching message history for channel ID: {channel_id}")
channel = self.get_channel(int(channel_id))
if not channel:
logger.warning(f"Channel with ID {channel_id} not found.")
raise discord.NotFound(f"Channel with ID {channel_id} not found.")
if not isinstance(channel, discord.TextChannel):
logger.warning(f"Channel {channel_id} is not a text channel.")
return []
messages_data = []
after = None
before = None
if start_date:
try:
start_datetime = datetime.datetime.fromisoformat(start_date).replace(tzinfo=datetime.timezone.utc)
after = start_datetime
except ValueError:
logger.warning(f"Invalid start_date format: {start_date}. Ignoring.")
if end_date:
try:
end_datetime = datetime.datetime.fromisoformat(f"{end_date}").replace(tzinfo=datetime.timezone.utc)
before = end_datetime
except ValueError:
logger.warning(f"Invalid end_date format: {end_date}. Ignoring.")
try:
async for message in channel.history(limit=None, before=before, after=after):
messages_data.append(
{
"id": str(message.id),
"author_id": str(message.author.id),
"author_name": message.author.name,
"content": message.content,
"created_at": message.created_at.isoformat(),
}
)
except discord.Forbidden:
logger.error(f"Bot does not have permissions to read message history in channel {channel_id}.")
raise
except discord.HTTPException as e:
logger.error(f"Failed to fetch messages from channel {channel_id}: {e}")
return []
logger.info(f"Fetched {len(messages_data)} messages from channel {channel_id}.")
return messages_data
async def get_user_info(self, guild_id: str, user_id: str) -> dict | None:
"""
Get information about a user in a guild.
Args:
guild_id (str): The ID of the guild.
user_id (str): The ID of the user.
Returns:
dict | None: A dictionary with user information (ID, name, joined_at, roles)
or None if the user is not found.
Raises:
discord.NotFound: If the guild or user is not found.
discord.Forbidden: If the bot does not have the GUILD_MEMBERS intent or
permissions to view members.
"""
await self._wait_until_ready()
logger.info(f"Fetching user info for user ID: {user_id} in guild ID: {guild_id}")
guild = self.get_guild(int(guild_id))
if not guild:
logger.warning(f"Guild with ID {guild_id} not found.")
raise discord.NotFound(f"Guild with ID {guild_id} not found.")
try:
member = await guild.fetch_member(int(user_id))
if member:
roles = [role.name for role in member.roles if role.name != "@everyone"]
logger.info(f"User {user_id} found in guild {guild_id}.")
return {
"id": str(member.id),
"name": member.name,
"joined_at": member.joined_at.isoformat() if member.joined_at else None,
"roles": roles,
}
logger.warning(f"User {user_id} not found in guild {guild_id}.")
return None
except discord.NotFound:
logger.warning(f"User {user_id} not found in guild {guild_id}.")
return None
except discord.Forbidden:
logger.error(f"Bot does not have permissions to fetch members in guild {guild_id}. Ensure GUILD_MEMBERS intent is enabled.")
raise
except discord.HTTPException as e:
logger.error(f"Failed to fetch user info for {user_id} in guild {guild_id}: {e}")
return None

View file

@ -50,6 +50,7 @@ class DocumentType(str, Enum):
YOUTUBE_VIDEO = "YOUTUBE_VIDEO"
GITHUB_CONNECTOR = "GITHUB_CONNECTOR"
LINEAR_CONNECTOR = "LINEAR_CONNECTOR"
DISCORD_CONNECTOR = "DISCORD_CONNECTOR"
class SearchSourceConnectorType(str, Enum):
SERPER_API = "SERPER_API" # NOT IMPLEMENTED YET : DON'T REMEMBER WHY : MOST PROBABLY BECAUSE WE NEED TO CRAWL THE RESULTS RETURNED BY IT
@ -59,6 +60,7 @@ class SearchSourceConnectorType(str, Enum):
NOTION_CONNECTOR = "NOTION_CONNECTOR"
GITHUB_CONNECTOR = "GITHUB_CONNECTOR"
LINEAR_CONNECTOR = "LINEAR_CONNECTOR"
DISCORD_CONNECTOR = "DISCORD_CONNECTOR"
class ChatType(str, Enum):
GENERAL = "GENERAL"

View file

@ -7,7 +7,7 @@ PUT /search-source-connectors/{connector_id} - Update a specific connector
DELETE /search-source-connectors/{connector_id} - Delete a specific connector
POST /search-source-connectors/{connector_id}/index - Index content from a connector to a search space
Note: Each user can have only one connector of each type (SERPER_API, TAVILY_API, SLACK_CONNECTOR, NOTION_CONNECTOR, GITHUB_CONNECTOR, LINEAR_CONNECTOR).
Note: Each user can have only one connector of each type (SERPER_API, TAVILY_API, SLACK_CONNECTOR, NOTION_CONNECTOR, GITHUB_CONNECTOR, LINEAR_CONNECTOR, DISCORD_CONNECTOR).
"""
from fastapi import APIRouter, Depends, HTTPException, Query, BackgroundTasks, Body
from sqlalchemy.ext.asyncio import AsyncSession
@ -19,7 +19,7 @@ from app.schemas import SearchSourceConnectorCreate, SearchSourceConnectorUpdate
from app.users import current_active_user
from app.utils.check_ownership import check_ownership
from pydantic import BaseModel, Field, ValidationError
from app.tasks.connectors_indexing_tasks import index_slack_messages, index_notion_pages, index_github_repos, index_linear_issues
from app.tasks.connectors_indexing_tasks import index_slack_messages, index_notion_pages, index_github_repos, index_linear_issues, index_discord_messages
from app.connectors.github_connector import GitHubConnector
from datetime import datetime, timedelta
import logging
@ -282,6 +282,7 @@ async def index_connector_content(
- NOTION_CONNECTOR: Indexes pages from all accessible Notion pages
- GITHUB_CONNECTOR: Indexes code and documentation from GitHub repositories
- LINEAR_CONNECTOR: Indexes issues and comments from Linear
- DISCORD_CONNECTOR: Indexes messages from all accessible Discord channels
Args:
connector_id: ID of the connector to use
@ -378,6 +379,30 @@ async def index_connector_content(
background_tasks.add_task(run_linear_indexing_with_new_session, connector_id, search_space_id)
response_message = "Linear indexing started in the background."
elif connector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR:
# Determine the time range that will be indexed
if not connector.last_indexed_at:
start_date = "365 days ago"
else:
today = datetime.now().date()
if connector.last_indexed_at.date() == today:
# If last indexed today, go back 1 day to ensure we don't miss anything
start_date = (today - timedelta(days=1)).strftime("%Y-%m-%d")
else:
start_date = connector.last_indexed_at.strftime("%Y-%m-%d")
indexing_from = start_date
indexing_to = today_str
# Run indexing in background
logger.info(
f"Triggering Discord indexing for connector {connector_id} into search space {search_space_id}"
)
background_tasks.add_task(
run_discord_indexing_with_new_session, connector_id, search_space_id
)
response_message = "Discord indexing started in the background."
else:
raise HTTPException(
status_code=400,
@ -577,3 +602,45 @@ async def run_linear_indexing(
await session.rollback()
logger.error(f"Critical error in run_linear_indexing for connector {connector_id}: {e}", exc_info=True)
# Optionally update status in DB to indicate failure
# Add new helper functions for discord indexing
async def run_discord_indexing_with_new_session(
connector_id: int,
search_space_id: int
):
"""
Create a new session and run the Discord indexing task.
This prevents session leaks by creating a dedicated session for the background task.
"""
async with async_session_maker() as session:
await run_discord_indexing(session, connector_id, search_space_id)
async def run_discord_indexing(
session: AsyncSession,
connector_id: int,
search_space_id: int
):
"""
Background task to run Discord indexing.
Args:
session: Database session
connector_id: ID of the Discord connector
search_space_id: ID of the search space
"""
try:
# Index Discord messages without updating last_indexed_at (we'll do it separately)
documents_processed, error_or_warning = await index_discord_messages(
session=session,
connector_id=connector_id,
search_space_id=search_space_id,
update_last_indexed=False # Don't update timestamp in the indexing function
)
# Only update last_indexed_at if indexing was successful (either new docs or updated docs)
if documents_processed > 0:
await update_connector_last_indexed(session, connector_id)
logger.info(f"Discord indexing completed successfully: {documents_processed} documents processed")
else:
logger.error(f"Discord indexing failed or no documents processed: {error_or_warning}")
except Exception as e:
logger.error(f"Error in background Discord indexing task: {str(e)}")

View file

@ -81,6 +81,7 @@ class SearchSourceConnectorBase(BaseModel):
repo_full_names = config.get("repo_full_names")
if not isinstance(repo_full_names, list) or not repo_full_names:
raise ValueError("repo_full_names must be a non-empty list of strings")
elif connector_type == SearchSourceConnectorType.LINEAR_CONNECTOR:
# For LINEAR_CONNECTOR, only allow LINEAR_API_KEY
allowed_keys = ["LINEAR_API_KEY"]
@ -91,6 +92,16 @@ class SearchSourceConnectorBase(BaseModel):
if not config.get("LINEAR_API_KEY"):
raise ValueError("LINEAR_API_KEY cannot be empty")
elif connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR:
# For DISCORD_CONNECTOR, only allow DISCORD_BOT_TOKEN
allowed_keys = ["DISCORD_BOT_TOKEN"]
if set(config.keys()) != set(allowed_keys):
raise ValueError(f"For DISCORD_CONNECTOR connector type, config must only contain these keys: {allowed_keys}")
# Ensure the bot token is not empty
if not config.get("DISCORD_BOT_TOKEN"):
raise ValueError("DISCORD_BOT_TOKEN cannot be empty")
return config
class SearchSourceConnectorCreate(SearchSourceConnectorBase):

View file

@ -11,8 +11,11 @@ from app.connectors.slack_history import SlackHistory
from app.connectors.notion_history import NotionHistoryConnector
from app.connectors.github_connector import GitHubConnector
from app.connectors.linear_connector import LinearConnector
from app.connectors.discord_connector import DiscordConnector
from slack_sdk.errors import SlackApiError
import logging
import asyncio
from concurrent.futures import ThreadPoolExecutor
from app.utils.document_converters import generate_content_hash
@ -912,3 +915,257 @@ async def index_linear_issues(
await session.rollback()
logger.error(f"Failed to index Linear issues: {str(e)}", exc_info=True)
return 0, f"Failed to index Linear issues: {str(e)}"
async def index_discord_messages(
session: AsyncSession,
connector_id: int,
search_space_id: int,
update_last_indexed: bool = True
) -> Tuple[int, Optional[str]]:
"""
Index Discord messages from all accessible channels.
Args:
session: Database session
connector_id: ID of the Discord connector
search_space_id: ID of the search space to store documents in
update_last_indexed: Whether to update the last_indexed_at timestamp (default: True)
Returns:
Tuple containing (number of documents indexed, error message or None)
"""
try:
# Get the connector
result = await session.execute(
select(SearchSourceConnector)
.filter(
SearchSourceConnector.id == connector_id,
SearchSourceConnector.connector_type == SearchSourceConnectorType.DISCORD_CONNECTOR
)
)
connector = result.scalars().first()
if not connector:
return 0, f"Connector with ID {connector_id} not found or is not a Discord connector"
# Get the Discord token from the connector config
discord_token = connector.config.get("DISCORD_BOT_TOKEN")
if not discord_token:
return 0, "Discord token not found in connector config"
logger.info(f"Starting Discord indexing for connector {connector_id}")
# Initialize Discord client
discord_client = DiscordConnector(token=discord_token)
# Calculate date range
end_date = datetime.now(timezone.utc)
# Use last_indexed_at as start date if available, otherwise use 365 days ago
if connector.last_indexed_at:
start_date = connector.last_indexed_at.replace(tzinfo=timezone.utc)
logger.info(f"Using last_indexed_at ({start_date.strftime('%Y-%m-%d')}) as start date")
else:
start_date = end_date - timedelta(days=365)
logger.info(f"No last_indexed_at found, using {start_date.strftime('%Y-%m-%d')} (365 days ago) as start date")
# Format dates for Discord API
start_date_str = start_date.isoformat()
end_date_str = end_date.isoformat()
documents_indexed = 0
documents_skipped = 0
skipped_channels = []
try:
logger.info("Starting Discord bot to fetch guilds")
discord_client._bot_task = asyncio.create_task(discord_client.start_bot())
await discord_client._wait_until_ready()
logger.info("Fetching Discord guilds")
guilds = await discord_client.get_guilds()
logger.info(f"Found {len(guilds)} guilds")
except Exception as e:
logger.error(f"Failed to get Discord guilds: {str(e)}", exc_info=True)
await discord_client.close_bot()
return 0, f"Failed to get Discord guilds: {str(e)}"
if not guilds:
logger.info("No Discord guilds found to index")
await discord_client.close_bot()
return 0, "No Discord guilds found"
# Process each guild and channel
for guild in guilds:
guild_id = guild["id"]
guild_name = guild["name"]
logger.info(f"Processing guild: {guild_name} ({guild_id})")
try:
channels = await discord_client.get_text_channels(guild_id)
if not channels:
logger.info(f"No channels found in guild {guild_name}. Skipping.")
skipped_channels.append(f"{guild_name} (no channels)")
documents_skipped += 1
continue
for channel in channels:
channel_id = channel["id"]
channel_name = channel["name"]
try:
messages = await discord_client.get_channel_history(
channel_id=channel_id,
start_date=start_date_str,
end_date=end_date_str,
)
except Exception as e:
logger.error(f"Failed to get messages for channel {channel_name}: {str(e)}")
skipped_channels.append(f"{guild_name}#{channel_name} (fetch error)")
documents_skipped += 1
continue
if not messages:
logger.info(f"No messages found in channel {channel_name} for the specified date range.")
documents_skipped += 1
continue
# Format messages
formatted_messages = []
for msg in messages:
# Skip system messages if needed (Discord has some types)
if msg.get("type") in ["system"]:
continue
formatted_messages.append(msg)
if not formatted_messages:
logger.info(f"No valid messages found in channel {channel_name} after filtering.")
documents_skipped += 1
continue
# Convert messages to markdown format
channel_content = f"# Discord Channel: {guild_name} / {channel_name}\n\n"
for msg in formatted_messages:
user_name = msg.get("author_name", "Unknown User")
timestamp = msg.get("created_at", "Unknown Time")
text = msg.get("content", "")
channel_content += f"## {user_name} ({timestamp})\n\n{text}\n\n---\n\n"
# Format document metadata
metadata_sections = [
("METADATA", [
f"GUILD_NAME: {guild_name}",
f"GUILD_ID: {guild_id}",
f"CHANNEL_NAME: {channel_name}",
f"CHANNEL_ID: {channel_id}",
f"MESSAGE_COUNT: {len(formatted_messages)}"
]),
("CONTENT", [
"FORMAT: markdown",
"TEXT_START",
channel_content,
"TEXT_END"
])
]
# Build the document string
document_parts = []
document_parts.append("<DOCUMENT>")
for section_title, section_content in metadata_sections:
document_parts.append(f"<{section_title}>")
document_parts.extend(section_content)
document_parts.append(f"</{section_title}>")
document_parts.append("</DOCUMENT>")
combined_document_string = '\n'.join(document_parts)
content_hash = generate_content_hash(combined_document_string)
# Check if document with this content hash already exists
existing_doc_by_hash_result = await session.execute(
select(Document).where(Document.content_hash == content_hash)
)
existing_document_by_hash = existing_doc_by_hash_result.scalars().first()
if existing_document_by_hash:
logger.info(f"Document with content hash {content_hash} already exists for channel {guild_name}#{channel_name}. Skipping processing.")
documents_skipped += 1
continue
# Generate summary using summary_chain
summary_chain = SUMMARY_PROMPT_TEMPLATE | config.long_context_llm_instance
summary_result = await summary_chain.ainvoke({"document": combined_document_string})
summary_content = summary_result.content
summary_embedding = await asyncio.to_thread(
config.embedding_model_instance.embed, summary_content
)
# Process chunks
raw_chunks = await asyncio.to_thread(
config.chunker_instance.chunk,
channel_content
)
chunk_texts = [chunk.text for chunk in raw_chunks if chunk.text.strip()]
chunk_embeddings = await asyncio.to_thread(
lambda texts: [config.embedding_model_instance.embed(t) for t in texts],
chunk_texts
)
chunks = [
Chunk(content=raw_chunk.text, embedding=embedding)
for raw_chunk, embedding in zip(raw_chunks, chunk_embeddings)
]
# Create and store new document
document = Document(
search_space_id=search_space_id,
title=f"Discord - {guild_name}#{channel_name}",
document_type=DocumentType.DISCORD_CONNECTOR,
document_metadata={
"guild_name": guild_name,
"guild_id": guild_id,
"channel_name": channel_name,
"channel_id": channel_id,
"message_count": len(formatted_messages),
"start_date": start_date_str,
"end_date": end_date_str,
"indexed_at": datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
},
content=summary_content,
content_hash=content_hash,
embedding=summary_embedding,
chunks=chunks
)
session.add(document)
documents_indexed += 1
logger.info(f"Successfully indexed new channel {guild_name}#{channel_name} with {len(formatted_messages)} messages")
except Exception as e:
logger.error(f"Error processing guild {guild_name}: {str(e)}", exc_info=True)
skipped_channels.append(f"{guild_name} (processing error)")
documents_skipped += 1
continue
if update_last_indexed and documents_indexed > 0:
connector.last_indexed_at = datetime.now(timezone.utc)
logger.info(f"Updated last_indexed_at to {connector.last_indexed_at}")
await session.commit()
await discord_client.close_bot()
# Prepare result message
result_message = None
if skipped_channels:
result_message = f"Processed {documents_indexed} channels. Skipped {len(skipped_channels)} channels: {', '.join(skipped_channels)}"
else:
result_message = f"Processed {documents_indexed} channels."
logger.info(f"Discord indexing completed: {documents_indexed} new channels, {documents_skipped} skipped")
return documents_indexed, result_message
except SQLAlchemyError as db_error:
await session.rollback()
logger.error(f"Database error during Discord indexing: {str(db_error)}", exc_info=True)
return 0, f"Database error: {str(db_error)}"
except Exception as e:
await session.rollback()
logger.error(f"Failed to index Discord messages: {str(e)}", exc_info=True)
return 0, f"Failed to index Discord messages: {str(e)}"

View file

@ -959,3 +959,99 @@ class ConnectorService:
"type": "LINKUP_API",
"sources": [],
}, []
async def search_discord(self, user_query: str, user_id: str, search_space_id: int, top_k: int = 20, search_mode: SearchMode = SearchMode.CHUNKS) -> tuple:
"""
Search for Discord messages and return both the source information and langchain documents
Args:
user_query: The user's query
user_id: The user's ID
search_space_id: The search space ID to search in
top_k: Maximum number of results to return
Returns:
tuple: (sources_info, langchain_documents)
"""
if search_mode == SearchMode.CHUNKS:
discord_chunks = await self.chunk_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="DISCORD_CONNECTOR"
)
elif search_mode == SearchMode.DOCUMENTS:
discord_chunks = await self.document_retriever.hybrid_search(
query_text=user_query,
top_k=top_k,
user_id=user_id,
search_space_id=search_space_id,
document_type="DISCORD_CONNECTOR"
)
# Transform document retriever results to match expected format
discord_chunks = self._transform_document_results(discord_chunks)
# Early return if no results
if not discord_chunks:
return {
"id": 11,
"name": "Discord",
"type": "DISCORD_CONNECTOR",
"sources": [],
}, []
# Process each chunk and create sources directly without deduplication
sources_list = []
async with self.counter_lock:
for i, chunk in enumerate(discord_chunks):
# Fix for UI
discord_chunks[i]['document']['id'] = self.source_id_counter
# Extract document metadata
document = chunk.get('document', {})
metadata = document.get('metadata', {})
# Create a mapped source entry with Discord-specific metadata
channel_name = metadata.get('channel_name', 'Unknown Channel')
channel_id = metadata.get('channel_id', '')
message_date = metadata.get('start_date', '')
# Create a more descriptive title for Discord messages
title = f"Discord: {channel_name}"
if message_date:
title += f" ({message_date})"
# Create a more descriptive description for Discord messages
description = chunk.get('content', '')[:100]
if len(description) == 100:
description += "..."
url = ""
guild_id = metadata.get('guild_id', '')
if guild_id and channel_id:
url = f"https://discord.com/channels/{guild_id}/{channel_id}"
elif channel_id:
# Fallback for DM channels or when guild_id is not available
url = f"https://discord.com/channels/@me/{channel_id}"
source = {
"id": self.source_id_counter,
"title": title,
"description": description,
"url": url,
}
self.source_id_counter += 1
sources_list.append(source)
# Create result object
result_object = {
"id": 11,
"name": "Discord",
"type": "DISCORD_CONNECTOR",
"sources": sources_list,
}
return result_object, discord_chunks

View file

@ -8,6 +8,7 @@ dependencies = [
"alembic>=1.13.0",
"asyncpg>=0.30.0",
"chonkie[all]>=1.0.6",
"discord-py>=2.5.2",
"fastapi>=0.115.8",
"fastapi-users[oauth,sqlalchemy]>=14.0.1",
"firecrawl-py>=1.12.0",

3395
surfsense_backend/uv.lock generated

File diff suppressed because it is too large Load diff

View file

@ -191,6 +191,18 @@ export default function EditConnectorPage() {
placeholder="Begins with linkup_..."
/>
)}
{/* == Discord == */}
{connector.connector_type === "DISCORD_CONNECTOR" && (
<EditSimpleTokenForm
control={editForm.control}
fieldName="DISCORD_BOT_TOKEN"
fieldLabel="Discord Bot Token"
fieldDescription="Update the Discord Bot Token if needed."
placeholder="Bot token..."
/>
)}
</CardContent>
<CardFooter className="border-t pt-6">
<Button

View file

@ -52,6 +52,7 @@ const getConnectorTypeDisplay = (type: string): string => {
"SLACK_CONNECTOR": "Slack Connector",
"NOTION_CONNECTOR": "Notion Connector",
"GITHUB_CONNECTOR": "GitHub Connector",
"DISCORD_CONNECTOR": "Discord Connector",
"LINKUP_API": "Linkup",
// Add other connector types here as needed
};
@ -89,6 +90,7 @@ export default function EditConnectorPage() {
"SLACK_CONNECTOR": "SLACK_BOT_TOKEN",
"NOTION_CONNECTOR": "NOTION_INTEGRATION_TOKEN",
"GITHUB_CONNECTOR": "GITHUB_PAT",
"DISCORD_CONNECTOR": "DISCORD_BOT_TOKEN",
"LINKUP_API": "LINKUP_API_KEY"
};
return fieldMap[connectorType] || "";

View file

@ -0,0 +1,315 @@
"use client";
import { useState } from "react";
import { useRouter, useParams } from "next/navigation";
import { motion } from "framer-motion";
import { zodResolver } from "@hookform/resolvers/zod";
import { useForm } from "react-hook-form";
import * as z from "zod";
import { toast } from "sonner";
import { ArrowLeft, Check, Info, Loader2 } from "lucide-react";
import { useSearchSourceConnectors } from "@/hooks/useSearchSourceConnectors";
import {
Form,
FormControl,
FormDescription,
FormField,
FormItem,
FormLabel,
FormMessage,
} from "@/components/ui/form";
import { Input } from "@/components/ui/input";
import { Button } from "@/components/ui/button";
import {
Card,
CardContent,
CardDescription,
CardFooter,
CardHeader,
CardTitle,
} from "@/components/ui/card";
import {
Alert,
AlertDescription,
AlertTitle,
} from "@/components/ui/alert";
import {
Accordion,
AccordionContent,
AccordionItem,
AccordionTrigger,
} from "@/components/ui/accordion";
import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
// Define the form schema with Zod
const discordConnectorFormSchema = z.object({
name: z.string().min(3, {
message: "Connector name must be at least 3 characters.",
}),
bot_token: z.string()
.min(50, { message: "Discord Bot Token appears to be too short." })
.regex(/^[A-Za-z0-9._-]+$/, { message: "Discord Bot Token contains invalid characters." }),
});
// Define the type for the form values
type DiscordConnectorFormValues = z.infer<typeof discordConnectorFormSchema>;
export default function DiscordConnectorPage() {
const router = useRouter();
const params = useParams();
const searchSpaceId = params.search_space_id as string;
const [isSubmitting, setIsSubmitting] = useState(false);
const { createConnector } = useSearchSourceConnectors();
// Initialize the form
const form = useForm<DiscordConnectorFormValues>({
resolver: zodResolver(discordConnectorFormSchema),
defaultValues: {
name: "Discord Connector",
bot_token: "",
},
});
// Handle form submission
const onSubmit = async (values: DiscordConnectorFormValues) => {
setIsSubmitting(true);
try {
await createConnector({
name: values.name,
connector_type: "DISCORD_CONNECTOR",
config: {
DISCORD_BOT_TOKEN: values.bot_token,
},
is_indexable: true,
last_indexed_at: null,
});
toast.success("Discord connector created successfully!");
router.push(`/dashboard/${searchSpaceId}/connectors`);
} catch (error) {
console.error("Error creating connector:", error);
toast.error(error instanceof Error ? error.message : "Failed to create connector");
} finally {
setIsSubmitting(false);
}
};
return (
<div className="container mx-auto py-8 max-w-3xl">
<Button
variant="ghost"
className="mb-6"
onClick={() => router.push(`/dashboard/${searchSpaceId}/connectors/add`)}
>
<ArrowLeft className="mr-2 h-4 w-4" />
Back to Connectors
</Button>
<motion.div
initial={{ opacity: 0, y: 20 }}
animate={{ opacity: 1, y: 0 }}
transition={{ duration: 0.5 }}
>
<Tabs defaultValue="connect" className="w-full">
<TabsList className="grid w-full grid-cols-2 mb-6">
<TabsTrigger value="connect">Connect</TabsTrigger>
<TabsTrigger value="documentation">Documentation</TabsTrigger>
</TabsList>
<TabsContent value="connect">
<Card className="border-2 border-border">
<CardHeader>
<CardTitle className="text-2xl font-bold">Connect Discord Server</CardTitle>
<CardDescription>
Integrate with Discord to search and retrieve information from your servers and channels. This connector can index your Discord messages for search.
</CardDescription>
</CardHeader>
<CardContent>
<Alert className="mb-6 bg-muted">
<Info className="h-4 w-4" />
<AlertTitle>Bot Token Required</AlertTitle>
<AlertDescription>
You'll need a Discord Bot Token to use this connector. You can create a Discord bot and get the token from the{" "}
<a
href="https://discord.com/developers/applications"
target="_blank"
rel="noopener noreferrer"
className="font-medium underline underline-offset-4"
>
Discord Developer Portal
</a>.
</AlertDescription>
</Alert>
<Form {...form}>
<form onSubmit={form.handleSubmit(onSubmit)} className="space-y-6">
<FormField
control={form.control}
name="name"
render={({ field }) => (
<FormItem>
<FormLabel>Connector Name</FormLabel>
<FormControl>
<Input placeholder="My Discord Connector" {...field} />
</FormControl>
<FormDescription>
A friendly name to identify this connector.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<FormField
control={form.control}
name="bot_token"
render={({ field }) => (
<FormItem>
<FormLabel>Discord Bot Token</FormLabel>
<FormControl>
<Input
type="password"
placeholder="Bot Token..."
{...field}
/>
</FormControl>
<FormDescription>
Your Discord Bot Token will be encrypted and stored securely. You can find it in the Bot section of your application in the Discord Developer Portal.
</FormDescription>
<FormMessage />
</FormItem>
)}
/>
<div className="flex justify-end">
<Button
type="submit"
disabled={isSubmitting}
className="w-full sm:w-auto"
>
{isSubmitting ? (
<>
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
Connecting...
</>
) : (
<>
<Check className="mr-2 h-4 w-4" />
Connect Discord
</>
)}
</Button>
</div>
</form>
</Form>
</CardContent>
<CardFooter className="flex flex-col items-start border-t bg-muted/50 px-6 py-4">
<h4 className="text-sm font-medium">What you get with Discord integration:</h4>
<ul className="mt-2 list-disc pl-5 text-sm text-muted-foreground">
<li>Search through your Discord servers and channels</li>
<li>Access historical messages and shared files</li>
<li>Connect your team's knowledge directly to your search space</li>
<li>Keep your search results up-to-date with latest communications</li>
<li>Index your Discord messages for enhanced search capabilities</li>
</ul>
</CardFooter>
</Card>
</TabsContent>
<TabsContent value="documentation">
<Card className="border-2 border-border">
<CardHeader>
<CardTitle className="text-2xl font-bold">Discord Connector Documentation</CardTitle>
<CardDescription>
Learn how to set up and use the Discord connector to index your server data.
</CardDescription>
</CardHeader>
<CardContent className="space-y-6">
<div>
<h3 className="text-xl font-semibold mb-2">How it works</h3>
<p className="text-muted-foreground">
The Discord connector indexes all accessible channels for a given bot in your servers.
</p>
<ul className="mt-2 list-disc pl-5 text-muted-foreground">
<li>Upcoming: Support for private channels by granting the bot access.</li>
</ul>
</div>
<Accordion type="single" collapsible className="w-full">
<AccordionItem value="authorization">
<AccordionTrigger className="text-lg font-medium">Authorization</AccordionTrigger>
<AccordionContent className="space-y-4">
<Alert className="bg-muted">
<Info className="h-4 w-4" />
<AlertTitle>Bot Setup Required</AlertTitle>
<AlertDescription>
You must create a Discord bot and add it to your server with the correct permissions.
</AlertDescription>
</Alert>
<ol className="list-decimal pl-5 space-y-3">
<li>Go to <a href="https://discord.com/developers/applications" target="_blank" rel="noopener noreferrer" className="font-medium underline underline-offset-4">https://discord.com/developers/applications</a>.</li>
<li>Create a new application and add a bot to it.</li>
<li>Copy the Bot Token from the Bot section.</li>
<li>Invite the bot to your server with the following OAuth2 scopes and permissions:
<ul className="list-disc pl-5 mt-1">
<li>Scopes: <code>bot</code></li>
<li>Bot Permissions: <code>Read Messages/View Channels</code>, <code>Read Message History</code>, <code>Send Messages</code></li>
</ul>
</li>
<li>Paste the Bot Token above to connect.</li>
</ol>
</AccordionContent>
</AccordionItem>
<AccordionItem value="indexing">
<AccordionTrigger className="text-lg font-medium">Indexing</AccordionTrigger>
<AccordionContent className="space-y-4">
<ol className="list-decimal pl-5 space-y-3">
<li>Navigate to the Connector Dashboard and select the <strong>Discord</strong> Connector.</li>
<li>Place the <strong>Bot Token</strong> under <strong>Step 1 Provide Credentials</strong>.</li>
<li>Click <strong>Connect</strong> to establish the connection.</li>
</ol>
<Alert className="bg-muted">
<Info className="h-4 w-4" />
<AlertTitle>Important: Bot Channel Access</AlertTitle>
<AlertDescription>
After connecting, ensure the bot has access to all channels you want to index. You may need to adjust channel permissions in Discord.
</AlertDescription>
</Alert>
<Alert className="bg-muted mt-4">
<Info className="h-4 w-4" />
<AlertTitle>First Indexing</AlertTitle>
<AlertDescription>
The first indexing pulls all accessible channels and may take longer than future updates. Only channels where the bot has access will be indexed.
</AlertDescription>
</Alert>
<div className="mt-4">
<h4 className="font-medium mb-2">Troubleshooting:</h4>
<ul className="list-disc pl-5 space-y-2 text-muted-foreground">
<li>
<strong>Missing messages:</strong> If you don't see messages from a channel, check the bot's permissions for that channel.
</li>
<li>
<strong>Bot not responding:</strong> Make sure the bot is online and the token is correct.
</li>
<li>
<strong>Private channels:</strong> The bot must be explicitly granted access to private channels.
</li>
</ul>
</div>
</AccordionContent>
</AccordionItem>
</Accordion>
</CardContent>
</Card>
</TabsContent>
</Tabs>
</motion.div>
</div>
);
}

View file

@ -79,11 +79,11 @@ const connectorCategories: ConnectorCategory[] = [
status: "coming-soon",
},
{
id: "discord",
id: "discord-connector",
title: "Discord",
description: "Connect to Discord servers to access messages and channels.",
icon: <IconBrandDiscord className="h-6 w-6" />,
status: "coming-soon",
status: "available"
},
],
},
@ -190,7 +190,7 @@ const cardVariants = {
export default function ConnectorsPage() {
const params = useParams();
const searchSpaceId = params.search_space_id as string;
const [expandedCategories, setExpandedCategories] = useState<string[]>(["search-engines", "knowledge-bases", "project-management"]);
const [expandedCategories, setExpandedCategories] = useState<string[]>(["search-engines", "knowledge-bases", "project-management", "team-chats"]);
const toggleCategory = (categoryId: string) => {
setExpandedCategories(prev =>

View file

@ -45,7 +45,7 @@ import {
} from "@/components/ui/table";
import { useDocuments } from "@/hooks/use-documents";
import { cn } from "@/lib/utils";
import { IconBrandGithub, IconBrandNotion, IconBrandSlack, IconBrandYoutube, IconLayoutKanban } from "@tabler/icons-react";
import { IconBrandDiscord, IconBrandGithub, IconBrandNotion, IconBrandSlack, IconBrandYoutube, IconLayoutKanban } from "@tabler/icons-react";
import {
ColumnDef,
ColumnFiltersState,
@ -109,7 +109,7 @@ const fadeInScale = {
type Document = {
id: number;
title: string;
document_type: "EXTENSION" | "CRAWLED_URL" | "SLACK_CONNECTOR" | "NOTION_CONNECTOR" | "FILE" | "YOUTUBE_VIDEO" | "LINEAR_CONNECTOR";
document_type: "EXTENSION" | "CRAWLED_URL" | "SLACK_CONNECTOR" | "NOTION_CONNECTOR" | "FILE" | "YOUTUBE_VIDEO" | "LINEAR_CONNECTOR" | "DISCORD_CONNECTOR";
document_metadata: any;
content: string;
created_at: string;
@ -139,6 +139,7 @@ const documentTypeIcons = {
YOUTUBE_VIDEO: IconBrandYoutube,
GITHUB_CONNECTOR: IconBrandGithub,
LINEAR_CONNECTOR: IconLayoutKanban,
DISCORD_CONNECTOR: IconBrandDiscord,
} as const;
const columns: ColumnDef<Document>[] = [

View file

@ -47,7 +47,7 @@ export function ModernHeroWithGradients() {
</h1>
</div>
<p className="mx-auto max-w-3xl py-6 text-center text-base text-gray-600 dark:text-neutral-300 md:text-lg lg:text-xl">
A Customizable AI Research Agent just like NotebookLM or Perplexity, but connected to external sources such as search engines (Tavily, LinkUp), Slack, Linear, Notion, YouTube, GitHub and more.
A Customizable AI Research Agent just like NotebookLM or Perplexity, but connected to external sources such as search engines (Tavily, LinkUp), Slack, Linear, Notion, YouTube, GitHub, Discord, and more.
</p>
<div className="flex flex-col items-center gap-6 py-6 sm:flex-row">
<Link

View file

@ -11,7 +11,7 @@ import {
Link,
Webhook,
} from 'lucide-react';
import { IconBrandNotion, IconBrandSlack, IconBrandYoutube, IconBrandGithub, IconLayoutKanban, IconLinkPlus } from "@tabler/icons-react";
import { IconBrandNotion, IconBrandSlack, IconBrandYoutube, IconBrandGithub, IconLayoutKanban, IconLinkPlus, IconBrandDiscord } from "@tabler/icons-react";
import { Button } from '@/components/ui/button';
import { Connector, ResearchMode } from './types';
@ -41,6 +41,8 @@ export const getConnectorIcon = (connectorType: string) => {
return <IconBrandSlack {...iconProps} />;
case 'NOTION_CONNECTOR':
return <IconBrandNotion {...iconProps} />;
case 'DISCORD_CONNECTOR':
return <IconBrandDiscord {...iconProps} />;
case 'DEEP':
return <Sparkles {...iconProps} />;
case 'DEEPER':

View file

@ -31,5 +31,6 @@ export const editConnectorSchema = z.object({
TAVILY_API_KEY: z.string().optional(),
LINEAR_API_KEY: z.string().optional(),
LINKUP_API_KEY: z.string().optional(),
DISCORD_BOT_TOKEN: z.string().optional(),
});
export type EditConnectorFormValues = z.infer<typeof editConnectorSchema>;

View file

@ -218,7 +218,7 @@ export function AppSidebar({
</div>
<div className="grid flex-1 text-left text-sm leading-tight">
<span className="truncate font-medium">SurfSense</span>
<span className="truncate text-xs">beta v0.0.6</span>
<span className="truncate text-xs">beta v0.0.7</span>
</div>
</div>
</SidebarMenuButton>

View file

@ -41,7 +41,8 @@ export function useConnectorEditPage(connectorId: number, searchSpaceId: string)
NOTION_INTEGRATION_TOKEN: "",
SERPER_API_KEY: "",
TAVILY_API_KEY: "",
LINEAR_API_KEY: ""
LINEAR_API_KEY: "",
DISCORD_BOT_TOKEN: "",
},
});
@ -60,7 +61,8 @@ export function useConnectorEditPage(connectorId: number, searchSpaceId: string)
SERPER_API_KEY: config.SERPER_API_KEY || "",
TAVILY_API_KEY: config.TAVILY_API_KEY || "",
LINEAR_API_KEY: config.LINEAR_API_KEY || "",
LINKUP_API_KEY: config.LINKUP_API_KEY || ""
LINKUP_API_KEY: config.LINKUP_API_KEY || "",
DISCORD_BOT_TOKEN: config.DISCORD_BOT_TOKEN || "",
});
if (currentConnector.connector_type === 'GITHUB_CONNECTOR') {
const savedRepos = config.repo_full_names || [];
@ -171,6 +173,12 @@ export function useConnectorEditPage(connectorId: number, searchSpaceId: string)
newConfig = { LINKUP_API_KEY: formData.LINKUP_API_KEY };
}
break;
case 'DISCORD_CONNECTOR':
if (formData.DISCORD_BOT_TOKEN !== originalConfig.DISCORD_BOT_TOKEN) {
if (!formData.DISCORD_BOT_TOKEN) { toast.error("Discord Bot Token cannot be empty."); setIsSaving(false); return; }
newConfig = { DISCORD_BOT_TOKEN: formData.DISCORD_BOT_TOKEN };
}
break;
}
if (newConfig !== null) {
@ -212,6 +220,8 @@ export function useConnectorEditPage(connectorId: number, searchSpaceId: string)
editForm.setValue('LINEAR_API_KEY', newlySavedConfig.LINEAR_API_KEY || "");
} else if(connector.connector_type === 'LINKUP_API') {
editForm.setValue('LINKUP_API_KEY', newlySavedConfig.LINKUP_API_KEY || "");
} else if(connector.connector_type === 'DISCORD_CONNECTOR') {
editForm.setValue('DISCORD_BOT_TOKEN', newlySavedConfig.DISCORD_BOT_TOKEN || "");
}
}
if (connector.connector_type === 'GITHUB_CONNECTOR') {

View file

@ -7,6 +7,7 @@ export const getConnectorTypeDisplay = (type: string): string => {
"NOTION_CONNECTOR": "Notion",
"GITHUB_CONNECTOR": "GitHub",
"LINEAR_CONNECTOR": "Linear",
"DISCORD_CONNECTOR": "Discord",
"LINKUP_API": "Linkup",
};
return typeMap[type] || type;