mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-05 23:42:21 +00:00
Merge pull request #724 from manojag115/feature/obsidian
Add obsidian connector to surfsense
This commit is contained in:
commit
da165e6f53
26 changed files with 1594 additions and 13 deletions
|
|
@ -45,6 +45,8 @@ services:
|
|||
volumes:
|
||||
- ./surfsense_backend/app:/app/app
|
||||
- shared_temp:/tmp
|
||||
# Uncomment and edit the line below to enable Obsidian vault indexing
|
||||
# - /path/to/your/obsidian/vault:/obsidian-vault:ro
|
||||
env_file:
|
||||
- ./surfsense_backend/.env
|
||||
environment:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,33 @@
|
|||
"""Add Obsidian connector enums
|
||||
|
||||
Revision ID: 78
|
||||
Revises: 77
|
||||
Create Date: 2026-01-21
|
||||
|
||||
"""
|
||||
|
||||
from collections.abc import Sequence
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "78"
|
||||
down_revision: str | None = "77"
|
||||
branch_labels: str | Sequence[str] | None = None
|
||||
depends_on: str | Sequence[str] | None = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add OBSIDIAN_CONNECTOR to documenttype enum
|
||||
op.execute("ALTER TYPE documenttype ADD VALUE IF NOT EXISTS 'OBSIDIAN_CONNECTOR'")
|
||||
|
||||
# Add OBSIDIAN_CONNECTOR to searchsourceconnectortype enum
|
||||
op.execute(
|
||||
"ALTER TYPE searchsourceconnectortype ADD VALUE IF NOT EXISTS 'OBSIDIAN_CONNECTOR'"
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Note: PostgreSQL doesn't support removing enum values directly.
|
||||
# The values will remain in the enum type but won't be used.
|
||||
pass
|
||||
|
|
@ -34,6 +34,12 @@ You have access to the following tools:
|
|||
- Returns: Documentation content with chunk IDs for citations (prefixed with 'doc-', e.g., [citation:doc-123])
|
||||
|
||||
1. search_knowledge_base: Search the user's personal knowledge base for relevant information.
|
||||
- IMPORTANT: When searching for information (meetings, schedules, notes, tasks, etc.), ALWAYS search broadly
|
||||
across ALL sources first by omitting connectors_to_search. The user may store information in various places
|
||||
including calendar apps, note-taking apps (Obsidian, Notion), chat apps (Slack, Discord), and more.
|
||||
- Only narrow to specific connectors if the user explicitly asks (e.g., "check my Slack" or "in my calendar").
|
||||
- Personal notes in Obsidian, Notion, or NOTE often contain schedules, meeting times, reminders, and other
|
||||
important information that may not be in calendars.
|
||||
- Args:
|
||||
- query: The search query - be specific and include key terms
|
||||
- top_k: Number of results to retrieve (default: 10)
|
||||
|
|
@ -157,6 +163,13 @@ You have access to the following tools:
|
|||
stating "Based on your memory..." - integrate the context seamlessly.
|
||||
</tools>
|
||||
<tool_call_examples>
|
||||
- User: "What time is the team meeting today?"
|
||||
- Call: `search_knowledge_base(query="team meeting time today")` (searches ALL sources - calendar, notes, Obsidian, etc.)
|
||||
- DO NOT limit to just calendar - the info might be in notes!
|
||||
|
||||
- User: "When is my gym session?"
|
||||
- Call: `search_knowledge_base(query="gym session time schedule")` (searches ALL sources)
|
||||
|
||||
- User: "How do I install SurfSense?"
|
||||
- Call: `search_surfsense_docs(query="installation setup")`
|
||||
|
||||
|
|
@ -175,6 +188,12 @@ You have access to the following tools:
|
|||
- User: "What did I discuss on Slack last week about the React migration?"
|
||||
- Call: `search_knowledge_base(query="React migration", connectors_to_search=["SLACK_CONNECTOR"], start_date="YYYY-MM-DD", end_date="YYYY-MM-DD")`
|
||||
|
||||
- User: "Check my Obsidian notes for meeting notes"
|
||||
- Call: `search_knowledge_base(query="meeting notes", connectors_to_search=["OBSIDIAN_CONNECTOR"])`
|
||||
|
||||
- User: "What's in my Obsidian vault about project ideas?"
|
||||
- Call: `search_knowledge_base(query="project ideas", connectors_to_search=["OBSIDIAN_CONNECTOR"])`
|
||||
|
||||
- User: "Remember that I prefer TypeScript over JavaScript"
|
||||
- Call: `save_memory(content="User prefers TypeScript over JavaScript for development", category="preference")`
|
||||
|
||||
|
|
|
|||
|
|
@ -49,6 +49,7 @@ _ALL_CONNECTORS: list[str] = [
|
|||
"BOOKSTACK_CONNECTOR",
|
||||
"CRAWLED_URL",
|
||||
"CIRCLEBACK",
|
||||
"OBSIDIAN_CONNECTOR",
|
||||
]
|
||||
|
||||
|
||||
|
|
@ -508,6 +509,16 @@ async def search_knowledge_base_async(
|
|||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
elif connector == "OBSIDIAN_CONNECTOR":
|
||||
_, chunks = await connector_service.search_obsidian(
|
||||
user_query=query,
|
||||
search_space_id=search_space_id,
|
||||
top_k=top_k,
|
||||
start_date=resolved_start_date,
|
||||
end_date=resolved_end_date,
|
||||
)
|
||||
all_documents.extend(chunks)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error searching connector {connector}: {e}")
|
||||
continue
|
||||
|
|
@ -596,6 +607,7 @@ def create_search_knowledge_base_tool(
|
|||
- WEBCRAWLER_CONNECTOR: "Webpages indexed by SurfSense" (personally selected websites)
|
||||
- BOOKSTACK_CONNECTOR: "BookStack pages" (personal documentation)
|
||||
- CIRCLEBACK: "Circleback meeting notes, transcripts, and action items" (personal meeting records)
|
||||
- OBSIDIAN_CONNECTOR: "Obsidian vault notes and markdown files" (personal notes and knowledge management)
|
||||
|
||||
NOTE: `WEBCRAWLER_CONNECTOR` is mapped internally to the canonical document type `CRAWLED_URL`.
|
||||
|
||||
|
|
|
|||
|
|
@ -61,6 +61,21 @@ class Config:
|
|||
"FFmpeg is not installed on the system. Please install it to use the Surfsense Podcaster."
|
||||
)
|
||||
|
||||
# Deployment Mode (self-hosted or cloud)
|
||||
# self-hosted: Full access to local file system connectors (Obsidian, etc.)
|
||||
# cloud: Only cloud-based connectors available
|
||||
DEPLOYMENT_MODE = os.getenv("SURFSENSE_DEPLOYMENT_MODE", "self-hosted")
|
||||
|
||||
@classmethod
|
||||
def is_self_hosted(cls) -> bool:
|
||||
"""Check if running in self-hosted mode."""
|
||||
return cls.DEPLOYMENT_MODE == "self-hosted"
|
||||
|
||||
@classmethod
|
||||
def is_cloud(cls) -> bool:
|
||||
"""Check if running in cloud mode."""
|
||||
return cls.DEPLOYMENT_MODE == "cloud"
|
||||
|
||||
# Database
|
||||
DATABASE_URL = os.getenv("DATABASE_URL")
|
||||
|
||||
|
|
|
|||
|
|
@ -53,6 +53,7 @@ class DocumentType(str, Enum):
|
|||
ELASTICSEARCH_CONNECTOR = "ELASTICSEARCH_CONNECTOR"
|
||||
BOOKSTACK_CONNECTOR = "BOOKSTACK_CONNECTOR"
|
||||
CIRCLEBACK = "CIRCLEBACK"
|
||||
OBSIDIAN_CONNECTOR = "OBSIDIAN_CONNECTOR"
|
||||
NOTE = "NOTE"
|
||||
COMPOSIO_CONNECTOR = "COMPOSIO_CONNECTOR" # Generic Composio integration
|
||||
|
||||
|
|
@ -81,6 +82,7 @@ class SearchSourceConnectorType(str, Enum):
|
|||
WEBCRAWLER_CONNECTOR = "WEBCRAWLER_CONNECTOR"
|
||||
BOOKSTACK_CONNECTOR = "BOOKSTACK_CONNECTOR"
|
||||
CIRCLEBACK_CONNECTOR = "CIRCLEBACK_CONNECTOR"
|
||||
OBSIDIAN_CONNECTOR = "OBSIDIAN_CONNECTOR" # Self-hosted only - Local Obsidian vault indexing
|
||||
MCP_CONNECTOR = "MCP_CONNECTOR" # Model Context Protocol - User-defined API tools
|
||||
COMPOSIO_CONNECTOR = (
|
||||
"COMPOSIO_CONNECTOR" # Generic Composio integration (Google, Slack, etc.)
|
||||
|
|
|
|||
|
|
@ -868,6 +868,25 @@ async def index_connector_content(
|
|||
)
|
||||
response_message = "Web page indexing started in the background."
|
||||
|
||||
elif connector.connector_type == SearchSourceConnectorType.OBSIDIAN_CONNECTOR:
|
||||
from app.config import config as app_config
|
||||
from app.tasks.celery_tasks.connector_tasks import index_obsidian_vault_task
|
||||
|
||||
# Obsidian connector only available in self-hosted mode
|
||||
if not app_config.is_self_hosted():
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Obsidian connector is only available in self-hosted mode",
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Triggering Obsidian vault indexing for connector {connector_id} into search space {search_space_id} from {indexing_from} to {indexing_to}"
|
||||
)
|
||||
index_obsidian_vault_task.delay(
|
||||
connector_id, search_space_id, str(user.id), indexing_from, indexing_to
|
||||
)
|
||||
response_message = "Obsidian vault indexing started in the background."
|
||||
|
||||
elif connector.connector_type == SearchSourceConnectorType.COMPOSIO_CONNECTOR:
|
||||
from app.tasks.celery_tasks.connector_tasks import (
|
||||
index_composio_connector_task,
|
||||
|
|
@ -2086,6 +2105,60 @@ async def run_bookstack_indexing(
|
|||
)
|
||||
|
||||
|
||||
# Add new helper functions for Obsidian indexing
|
||||
async def run_obsidian_indexing_with_new_session(
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""Wrapper to run Obsidian indexing with its own database session."""
|
||||
logger.info(
|
||||
f"Background task started: Indexing Obsidian connector {connector_id} into space {search_space_id} from {start_date} to {end_date}"
|
||||
)
|
||||
async with async_session_maker() as session:
|
||||
await run_obsidian_indexing(
|
||||
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||
)
|
||||
logger.info(
|
||||
f"Background task finished: Indexing Obsidian connector {connector_id}"
|
||||
)
|
||||
|
||||
|
||||
async def run_obsidian_indexing(
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""
|
||||
Background task to run Obsidian vault indexing.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
connector_id: ID of the Obsidian connector
|
||||
search_space_id: ID of the search space
|
||||
user_id: ID of the user
|
||||
start_date: Start date for indexing
|
||||
end_date: End date for indexing
|
||||
"""
|
||||
from app.tasks.connector_indexers import index_obsidian_vault
|
||||
|
||||
await _run_indexing_with_notifications(
|
||||
session=session,
|
||||
connector_id=connector_id,
|
||||
search_space_id=search_space_id,
|
||||
user_id=user_id,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
indexing_function=index_obsidian_vault,
|
||||
update_timestamp_func=_update_connector_timestamp_by_id,
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# MCP Connector Routes
|
||||
# =============================================================================
|
||||
|
|
|
|||
59
surfsense_backend/app/schemas/obsidian_auth_credentials.py
Normal file
59
surfsense_backend/app/schemas/obsidian_auth_credentials.py
Normal file
|
|
@ -0,0 +1,59 @@
|
|||
"""
|
||||
Obsidian Connector Credentials Schema.
|
||||
|
||||
Obsidian is a local-first note-taking app that stores notes as markdown files.
|
||||
This connector supports indexing from local file system (self-hosted only).
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel, field_validator
|
||||
|
||||
|
||||
class ObsidianAuthCredentialsBase(BaseModel):
|
||||
"""
|
||||
Credentials/configuration for the Obsidian connector.
|
||||
|
||||
Since Obsidian vaults are local directories, this schema primarily
|
||||
holds the vault path and configuration options rather than API tokens.
|
||||
"""
|
||||
|
||||
vault_path: str
|
||||
vault_name: str | None = None
|
||||
exclude_folders: list[str] | None = None
|
||||
include_attachments: bool = False
|
||||
|
||||
@field_validator("vault_path")
|
||||
@classmethod
|
||||
def validate_vault_path(cls, v: str) -> str:
|
||||
"""Ensure vault path is provided and stripped of whitespace."""
|
||||
if not v or not v.strip():
|
||||
raise ValueError("Vault path is required")
|
||||
return v.strip()
|
||||
|
||||
@field_validator("exclude_folders", mode="before")
|
||||
@classmethod
|
||||
def parse_exclude_folders(cls, v):
|
||||
"""Parse exclude_folders from string if needed."""
|
||||
if v is None:
|
||||
return [".trash", ".obsidian", "templates"]
|
||||
if isinstance(v, str):
|
||||
return [f.strip() for f in v.split(",") if f.strip()]
|
||||
return v
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Convert credentials to dictionary for storage."""
|
||||
return {
|
||||
"vault_path": self.vault_path,
|
||||
"vault_name": self.vault_name,
|
||||
"exclude_folders": self.exclude_folders,
|
||||
"include_attachments": self.include_attachments,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> "ObsidianAuthCredentialsBase":
|
||||
"""Create credentials from dictionary."""
|
||||
return cls(
|
||||
vault_path=data.get("vault_path", ""),
|
||||
vault_name=data.get("vault_name"),
|
||||
exclude_folders=data.get("exclude_folders"),
|
||||
include_attachments=data.get("include_attachments", False),
|
||||
)
|
||||
|
|
@ -2780,3 +2780,94 @@ class ConnectorService:
|
|||
}
|
||||
|
||||
return result_object, circleback_docs
|
||||
|
||||
async def search_obsidian(
|
||||
self,
|
||||
user_query: str,
|
||||
search_space_id: int,
|
||||
top_k: int = 20,
|
||||
start_date: datetime | None = None,
|
||||
end_date: datetime | None = None,
|
||||
) -> tuple:
|
||||
"""
|
||||
Search for Obsidian vault notes and return both the source information and langchain documents.
|
||||
|
||||
Uses combined chunk-level and document-level hybrid search with RRF fusion.
|
||||
|
||||
Args:
|
||||
user_query: The user's query
|
||||
search_space_id: The search space ID to search in
|
||||
top_k: Maximum number of results to return
|
||||
start_date: Optional start date for filtering documents by updated_at
|
||||
end_date: Optional end date for filtering documents by updated_at
|
||||
|
||||
Returns:
|
||||
tuple: (sources_info, langchain_documents)
|
||||
"""
|
||||
obsidian_docs = await self._combined_rrf_search(
|
||||
query_text=user_query,
|
||||
search_space_id=search_space_id,
|
||||
document_type="OBSIDIAN_CONNECTOR",
|
||||
top_k=top_k,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
)
|
||||
|
||||
# Early return if no results
|
||||
if not obsidian_docs:
|
||||
return {
|
||||
"id": 53,
|
||||
"name": "Obsidian Vault",
|
||||
"type": "OBSIDIAN_CONNECTOR",
|
||||
"sources": [],
|
||||
}, []
|
||||
|
||||
def _title_fn(doc_info: dict[str, Any], metadata: dict[str, Any]) -> str:
|
||||
return doc_info.get("title", "Untitled Note")
|
||||
|
||||
def _url_fn(doc_info: dict[str, Any], metadata: dict[str, Any]) -> str:
|
||||
# Obsidian URL format: obsidian://vault_name/path
|
||||
return doc_info.get("url", "")
|
||||
|
||||
def _description_fn(
|
||||
chunk: dict[str, Any], _doc_info: dict[str, Any], metadata: dict[str, Any]
|
||||
) -> str:
|
||||
description = self._chunk_preview(chunk.get("content", ""), limit=200)
|
||||
info_parts = []
|
||||
vault_name = metadata.get("vault_name")
|
||||
tags = metadata.get("tags", [])
|
||||
if vault_name:
|
||||
info_parts.append(f"Vault: {vault_name}")
|
||||
if tags and isinstance(tags, list) and len(tags) > 0:
|
||||
info_parts.append(f"Tags: {', '.join(tags[:3])}")
|
||||
if info_parts:
|
||||
description = (description + " | " + " | ".join(info_parts)).strip(" |")
|
||||
return description
|
||||
|
||||
def _extra_fields_fn(
|
||||
_chunk: dict[str, Any], _doc_info: dict[str, Any], metadata: dict[str, Any]
|
||||
) -> dict[str, Any]:
|
||||
return {
|
||||
"vault_name": metadata.get("vault_name", ""),
|
||||
"file_path": metadata.get("file_path", ""),
|
||||
"tags": metadata.get("tags", []),
|
||||
"outgoing_links": metadata.get("outgoing_links", []),
|
||||
}
|
||||
|
||||
sources_list = self._build_chunk_sources_from_documents(
|
||||
obsidian_docs,
|
||||
title_fn=_title_fn,
|
||||
url_fn=_url_fn,
|
||||
description_fn=_description_fn,
|
||||
extra_fields_fn=_extra_fields_fn,
|
||||
)
|
||||
|
||||
# Create result object
|
||||
result_object = {
|
||||
"id": 53,
|
||||
"name": "Obsidian Vault",
|
||||
"type": "OBSIDIAN_CONNECTOR",
|
||||
"sources": sources_list,
|
||||
}
|
||||
|
||||
return result_object, obsidian_docs
|
||||
|
|
|
|||
|
|
@ -761,6 +761,49 @@ async def _index_bookstack_pages(
|
|||
)
|
||||
|
||||
|
||||
@celery_app.task(name="index_obsidian_vault", bind=True)
|
||||
def index_obsidian_vault_task(
|
||||
self,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""Celery task to index Obsidian vault notes."""
|
||||
import asyncio
|
||||
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
try:
|
||||
loop.run_until_complete(
|
||||
_index_obsidian_vault(
|
||||
connector_id, search_space_id, user_id, start_date, end_date
|
||||
)
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
|
||||
async def _index_obsidian_vault(
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
):
|
||||
"""Index Obsidian vault with new session."""
|
||||
from app.routes.search_source_connectors_routes import (
|
||||
run_obsidian_indexing,
|
||||
)
|
||||
|
||||
async with get_celery_session_maker()() as session:
|
||||
await run_obsidian_indexing(
|
||||
session, connector_id, search_space_id, user_id, start_date, end_date
|
||||
)
|
||||
|
||||
|
||||
@celery_app.task(name="index_composio_connector", bind=True)
|
||||
def index_composio_connector_task(
|
||||
self,
|
||||
|
|
|
|||
|
|
@ -46,6 +46,7 @@ from .luma_indexer import index_luma_events
|
|||
|
||||
# Documentation and knowledge management
|
||||
from .notion_indexer import index_notion_pages
|
||||
from .obsidian_indexer import index_obsidian_vault
|
||||
from .slack_indexer import index_slack_messages
|
||||
from .webcrawler_indexer import index_crawled_urls
|
||||
|
||||
|
|
@ -68,6 +69,7 @@ __all__ = [ # noqa: RUF022
|
|||
"index_linear_issues",
|
||||
# Documentation and knowledge management
|
||||
"index_notion_pages",
|
||||
"index_obsidian_vault",
|
||||
"index_crawled_urls",
|
||||
# Communication platforms
|
||||
"index_slack_messages",
|
||||
|
|
|
|||
|
|
@ -0,0 +1,516 @@
|
|||
"""
|
||||
Obsidian connector indexer.
|
||||
|
||||
Indexes markdown notes from a local Obsidian vault.
|
||||
This connector is only available in self-hosted mode.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
from datetime import UTC, datetime
|
||||
from pathlib import Path
|
||||
|
||||
import yaml
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import config
|
||||
from app.db import Document, DocumentType, SearchSourceConnectorType
|
||||
from app.services.llm_service import get_user_long_context_llm
|
||||
from app.services.task_logging_service import TaskLoggingService
|
||||
from app.utils.document_converters import (
|
||||
create_document_chunks,
|
||||
generate_content_hash,
|
||||
generate_document_summary,
|
||||
generate_unique_identifier_hash,
|
||||
)
|
||||
|
||||
from .base import (
|
||||
build_document_metadata_string,
|
||||
check_document_by_unique_identifier,
|
||||
get_connector_by_id,
|
||||
get_current_timestamp,
|
||||
logger,
|
||||
update_connector_last_indexed,
|
||||
)
|
||||
|
||||
|
||||
def parse_frontmatter(content: str) -> tuple[dict | None, str]:
|
||||
"""
|
||||
Parse YAML frontmatter from markdown content.
|
||||
|
||||
Args:
|
||||
content: The full markdown content
|
||||
|
||||
Returns:
|
||||
Tuple of (frontmatter dict or None, content without frontmatter)
|
||||
"""
|
||||
if not content.startswith("---"):
|
||||
return None, content
|
||||
|
||||
# Find the closing ---
|
||||
end_match = re.search(r"\n---\n", content[3:])
|
||||
if not end_match:
|
||||
return None, content
|
||||
|
||||
frontmatter_str = content[3 : end_match.start() + 3]
|
||||
remaining_content = content[end_match.end() + 3 :]
|
||||
|
||||
try:
|
||||
frontmatter = yaml.safe_load(frontmatter_str)
|
||||
return frontmatter, remaining_content.strip()
|
||||
except yaml.YAMLError:
|
||||
return None, content
|
||||
|
||||
|
||||
def extract_wiki_links(content: str) -> list[str]:
|
||||
"""
|
||||
Extract [[wiki-style links]] from content.
|
||||
|
||||
Args:
|
||||
content: Markdown content
|
||||
|
||||
Returns:
|
||||
List of linked note names
|
||||
"""
|
||||
# Match [[link]] or [[link|alias]]
|
||||
pattern = r"\[\[([^\]|]+)(?:\|[^\]]+)?\]\]"
|
||||
matches = re.findall(pattern, content)
|
||||
return list(set(matches))
|
||||
|
||||
|
||||
def extract_tags(content: str) -> list[str]:
|
||||
"""
|
||||
Extract #tags from content (both inline and frontmatter).
|
||||
|
||||
Args:
|
||||
content: Markdown content
|
||||
|
||||
Returns:
|
||||
List of tags (without # prefix)
|
||||
"""
|
||||
# Match #tag but not ## headers
|
||||
pattern = r"(?<!\S)#([a-zA-Z][a-zA-Z0-9_/-]*)"
|
||||
matches = re.findall(pattern, content)
|
||||
return list(set(matches))
|
||||
|
||||
|
||||
def scan_vault(
|
||||
vault_path: str,
|
||||
exclude_folders: list[str] | None = None,
|
||||
) -> list[dict]:
|
||||
"""
|
||||
Scan an Obsidian vault for markdown files.
|
||||
|
||||
Args:
|
||||
vault_path: Path to the Obsidian vault
|
||||
exclude_folders: List of folder names to exclude
|
||||
|
||||
Returns:
|
||||
List of file info dicts with path, name, modified time
|
||||
"""
|
||||
if exclude_folders is None:
|
||||
exclude_folders = [".trash", ".obsidian", "templates"]
|
||||
|
||||
vault = Path(vault_path)
|
||||
if not vault.exists():
|
||||
raise ValueError(f"Vault path does not exist: {vault_path}")
|
||||
|
||||
files = []
|
||||
for md_file in vault.rglob("*.md"):
|
||||
# Check if file is in an excluded folder
|
||||
relative_path = md_file.relative_to(vault)
|
||||
parts = relative_path.parts
|
||||
|
||||
if any(excluded in parts for excluded in exclude_folders):
|
||||
continue
|
||||
|
||||
try:
|
||||
stat = md_file.stat()
|
||||
files.append(
|
||||
{
|
||||
"path": str(md_file),
|
||||
"relative_path": str(relative_path),
|
||||
"name": md_file.stem,
|
||||
"modified_at": datetime.fromtimestamp(stat.st_mtime, tz=UTC),
|
||||
"created_at": datetime.fromtimestamp(stat.st_ctime, tz=UTC),
|
||||
"size": stat.st_size,
|
||||
}
|
||||
)
|
||||
except OSError as e:
|
||||
logger.warning(f"Could not stat file {md_file}: {e}")
|
||||
|
||||
return files
|
||||
|
||||
|
||||
async def index_obsidian_vault(
|
||||
session: AsyncSession,
|
||||
connector_id: int,
|
||||
search_space_id: int,
|
||||
user_id: str,
|
||||
start_date: str | None = None,
|
||||
end_date: str | None = None,
|
||||
update_last_indexed: bool = True,
|
||||
) -> tuple[int, str | None]:
|
||||
"""
|
||||
Index notes from a local Obsidian vault.
|
||||
|
||||
This indexer is only available in self-hosted mode as it requires
|
||||
direct file system access to the user's Obsidian vault.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
connector_id: ID of the Obsidian connector
|
||||
search_space_id: ID of the search space to store documents in
|
||||
user_id: ID of the user
|
||||
start_date: Start date for filtering (YYYY-MM-DD format) - optional
|
||||
end_date: End date for filtering (YYYY-MM-DD format) - optional
|
||||
update_last_indexed: Whether to update the last_indexed_at timestamp
|
||||
|
||||
Returns:
|
||||
Tuple containing (number of documents indexed, error message or None)
|
||||
"""
|
||||
task_logger = TaskLoggingService(session, search_space_id)
|
||||
|
||||
# Check if self-hosted mode
|
||||
if not config.is_self_hosted():
|
||||
return 0, "Obsidian connector is only available in self-hosted mode"
|
||||
|
||||
# Log task start
|
||||
log_entry = await task_logger.log_task_start(
|
||||
task_name="obsidian_vault_indexing",
|
||||
source="connector_indexing_task",
|
||||
message=f"Starting Obsidian vault indexing for connector {connector_id}",
|
||||
metadata={
|
||||
"connector_id": connector_id,
|
||||
"user_id": str(user_id),
|
||||
"start_date": start_date,
|
||||
"end_date": end_date,
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
# Get the connector
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Retrieving Obsidian connector {connector_id} from database",
|
||||
{"stage": "connector_retrieval"},
|
||||
)
|
||||
|
||||
connector = await get_connector_by_id(
|
||||
session, connector_id, SearchSourceConnectorType.OBSIDIAN_CONNECTOR
|
||||
)
|
||||
|
||||
if not connector:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Connector with ID {connector_id} not found or is not an Obsidian connector",
|
||||
"Connector not found",
|
||||
{"error_type": "ConnectorNotFound"},
|
||||
)
|
||||
return (
|
||||
0,
|
||||
f"Connector with ID {connector_id} not found or is not an Obsidian connector",
|
||||
)
|
||||
|
||||
# Get vault path from connector config
|
||||
vault_path = connector.config.get("vault_path")
|
||||
if not vault_path:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
"Vault path not configured for this connector",
|
||||
"Missing vault path",
|
||||
{"error_type": "MissingVaultPath"},
|
||||
)
|
||||
return 0, "Vault path not configured for this connector"
|
||||
|
||||
# Validate vault path exists
|
||||
if not os.path.exists(vault_path):
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Vault path does not exist: {vault_path}",
|
||||
"Vault path not found",
|
||||
{"error_type": "VaultNotFound", "vault_path": vault_path},
|
||||
)
|
||||
return 0, f"Vault path does not exist: {vault_path}"
|
||||
|
||||
# Get configuration options
|
||||
exclude_folders = connector.config.get(
|
||||
"exclude_folders", [".trash", ".obsidian", "templates"]
|
||||
)
|
||||
vault_name = connector.config.get("vault_name") or os.path.basename(vault_path)
|
||||
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Scanning Obsidian vault: {vault_name}",
|
||||
{"stage": "vault_scan", "vault_path": vault_path},
|
||||
)
|
||||
|
||||
# Scan vault for markdown files
|
||||
try:
|
||||
files = scan_vault(vault_path, exclude_folders)
|
||||
except Exception as e:
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Failed to scan vault: {e}",
|
||||
"Vault scan error",
|
||||
{"error_type": "VaultScanError"},
|
||||
)
|
||||
return 0, f"Failed to scan vault: {e}"
|
||||
|
||||
logger.info(f"Found {len(files)} markdown files in vault")
|
||||
|
||||
await task_logger.log_task_progress(
|
||||
log_entry,
|
||||
f"Found {len(files)} markdown files to process",
|
||||
{"stage": "files_discovered", "file_count": len(files)},
|
||||
)
|
||||
|
||||
# Filter by date if provided (handle "undefined" string from frontend)
|
||||
# Also handle inverted dates (start > end) by skipping filtering
|
||||
start_dt = None
|
||||
end_dt = None
|
||||
|
||||
if start_date and start_date != "undefined":
|
||||
start_dt = datetime.strptime(start_date, "%Y-%m-%d").replace(tzinfo=UTC)
|
||||
|
||||
if end_date and end_date != "undefined":
|
||||
# Make end_date inclusive (end of day)
|
||||
end_dt = datetime.strptime(end_date, "%Y-%m-%d").replace(tzinfo=UTC)
|
||||
end_dt = end_dt.replace(hour=23, minute=59, second=59)
|
||||
|
||||
# Only apply date filtering if dates are valid and in correct order
|
||||
if start_dt and end_dt and start_dt > end_dt:
|
||||
logger.warning(
|
||||
f"start_date ({start_date}) is after end_date ({end_date}), skipping date filter"
|
||||
)
|
||||
else:
|
||||
if start_dt:
|
||||
files = [f for f in files if f["modified_at"] >= start_dt]
|
||||
logger.info(
|
||||
f"After start_date filter ({start_date}): {len(files)} files"
|
||||
)
|
||||
if end_dt:
|
||||
files = [f for f in files if f["modified_at"] <= end_dt]
|
||||
logger.info(f"After end_date filter ({end_date}): {len(files)} files")
|
||||
|
||||
logger.info(f"Processing {len(files)} files after date filtering")
|
||||
|
||||
# Get LLM for summarization
|
||||
long_context_llm = await get_user_long_context_llm(
|
||||
session, user_id, search_space_id
|
||||
)
|
||||
|
||||
indexed_count = 0
|
||||
skipped_count = 0
|
||||
|
||||
for file_info in files:
|
||||
try:
|
||||
file_path = file_info["path"]
|
||||
relative_path = file_info["relative_path"]
|
||||
|
||||
# Read file content
|
||||
try:
|
||||
with open(file_path, encoding="utf-8") as f:
|
||||
content = f.read()
|
||||
except UnicodeDecodeError:
|
||||
logger.warning(f"Could not decode file {file_path}, skipping")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
if not content.strip():
|
||||
logger.debug(f"Empty file {file_path}, skipping")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Parse frontmatter and extract metadata
|
||||
frontmatter, body_content = parse_frontmatter(content)
|
||||
wiki_links = extract_wiki_links(content)
|
||||
tags = extract_tags(content)
|
||||
|
||||
# Get title from frontmatter or filename
|
||||
title = file_info["name"]
|
||||
if frontmatter:
|
||||
title = frontmatter.get("title", title)
|
||||
# Also extract tags from frontmatter
|
||||
fm_tags = frontmatter.get("tags", [])
|
||||
if isinstance(fm_tags, list):
|
||||
tags = list({*tags, *fm_tags})
|
||||
elif isinstance(fm_tags, str):
|
||||
tags = list({*tags, fm_tags})
|
||||
|
||||
# Generate unique identifier using vault name and relative path
|
||||
unique_identifier = f"{vault_name}:{relative_path}"
|
||||
unique_identifier_hash = generate_unique_identifier_hash(
|
||||
DocumentType.OBSIDIAN_CONNECTOR,
|
||||
unique_identifier,
|
||||
search_space_id,
|
||||
)
|
||||
|
||||
# Check for existing document
|
||||
existing_document = await check_document_by_unique_identifier(
|
||||
session, unique_identifier_hash
|
||||
)
|
||||
|
||||
# Generate content hash
|
||||
content_hash = generate_content_hash(content, search_space_id)
|
||||
|
||||
# Build metadata
|
||||
document_metadata = {
|
||||
"vault_name": vault_name,
|
||||
"file_path": relative_path,
|
||||
"tags": tags,
|
||||
"outgoing_links": wiki_links,
|
||||
"frontmatter": frontmatter,
|
||||
"modified_at": file_info["modified_at"].isoformat(),
|
||||
"created_at": file_info["created_at"].isoformat(),
|
||||
"word_count": len(body_content.split()),
|
||||
}
|
||||
|
||||
# Build document content with metadata
|
||||
metadata_sections = [
|
||||
(
|
||||
"METADATA",
|
||||
[
|
||||
f"Title: {title}",
|
||||
f"Vault: {vault_name}",
|
||||
f"Path: {relative_path}",
|
||||
f"Tags: {', '.join(tags) if tags else 'None'}",
|
||||
f"Links to: {', '.join(wiki_links) if wiki_links else 'None'}",
|
||||
],
|
||||
),
|
||||
("CONTENT", [body_content]),
|
||||
]
|
||||
document_string = build_document_metadata_string(metadata_sections)
|
||||
|
||||
if existing_document:
|
||||
# Check if content has changed
|
||||
if existing_document.content_hash == content_hash:
|
||||
logger.debug(f"Note {title} unchanged, skipping")
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Update existing document
|
||||
logger.info(f"Updating note: {title}")
|
||||
|
||||
# Generate new summary if content changed
|
||||
if long_context_llm:
|
||||
new_summary, _ = await generate_document_summary(
|
||||
document_string,
|
||||
long_context_llm,
|
||||
document_metadata,
|
||||
)
|
||||
# Store summary in metadata
|
||||
document_metadata["summary"] = new_summary
|
||||
|
||||
# Add URL and connector_id to metadata
|
||||
document_metadata["url"] = (
|
||||
f"obsidian://{vault_name}/{relative_path}"
|
||||
)
|
||||
document_metadata["connector_id"] = connector_id
|
||||
|
||||
existing_document.content = document_string
|
||||
existing_document.content_hash = content_hash
|
||||
existing_document.document_metadata = document_metadata
|
||||
existing_document.updated_at = get_current_timestamp()
|
||||
|
||||
# Update embedding
|
||||
embedding = config.embedding_model_instance.embed(document_string)
|
||||
existing_document.embedding = embedding
|
||||
|
||||
# Update chunks - delete old and create new
|
||||
existing_document.chunks.clear()
|
||||
new_chunks = await create_document_chunks(document_string)
|
||||
existing_document.chunks = new_chunks
|
||||
|
||||
indexed_count += 1
|
||||
|
||||
else:
|
||||
# Create new document
|
||||
logger.info(f"Indexing new note: {title}")
|
||||
|
||||
# Generate summary
|
||||
summary_content = ""
|
||||
if long_context_llm:
|
||||
summary_content, _ = await generate_document_summary(
|
||||
document_string,
|
||||
long_context_llm,
|
||||
document_metadata,
|
||||
)
|
||||
|
||||
# Generate embedding
|
||||
embedding = config.embedding_model_instance.embed(document_string)
|
||||
|
||||
# Add URL and summary to metadata
|
||||
document_metadata["url"] = (
|
||||
f"obsidian://{vault_name}/{relative_path}"
|
||||
)
|
||||
document_metadata["summary"] = summary_content
|
||||
document_metadata["connector_id"] = connector_id
|
||||
|
||||
# Create chunks
|
||||
chunks = await create_document_chunks(document_string)
|
||||
|
||||
# Create document
|
||||
new_document = Document(
|
||||
search_space_id=search_space_id,
|
||||
title=title,
|
||||
document_type=DocumentType.OBSIDIAN_CONNECTOR,
|
||||
content=document_string,
|
||||
content_hash=content_hash,
|
||||
unique_identifier_hash=unique_identifier_hash,
|
||||
document_metadata=document_metadata,
|
||||
embedding=embedding,
|
||||
chunks=chunks,
|
||||
updated_at=get_current_timestamp(),
|
||||
)
|
||||
|
||||
session.add(new_document)
|
||||
|
||||
indexed_count += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Error processing file {file_info.get('path', 'unknown')}: {e}"
|
||||
)
|
||||
skipped_count += 1
|
||||
continue
|
||||
|
||||
# Update connector's last indexed timestamp
|
||||
await update_connector_last_indexed(session, connector, update_last_indexed)
|
||||
|
||||
# Commit all changes
|
||||
await session.commit()
|
||||
|
||||
await task_logger.log_task_success(
|
||||
log_entry,
|
||||
f"Successfully indexed {indexed_count} Obsidian notes (skipped {skipped_count})",
|
||||
{
|
||||
"indexed_count": indexed_count,
|
||||
"skipped_count": skipped_count,
|
||||
"total_files": len(files),
|
||||
},
|
||||
)
|
||||
|
||||
return indexed_count, None
|
||||
|
||||
except SQLAlchemyError as e:
|
||||
logger.exception(f"Database error during Obsidian indexing: {e}")
|
||||
await session.rollback()
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Database error during Obsidian indexing: {e}",
|
||||
"Database error",
|
||||
{"error_type": "SQLAlchemyError"},
|
||||
)
|
||||
return 0, f"Database error: {e}"
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error during Obsidian indexing: {e}")
|
||||
await task_logger.log_task_failure(
|
||||
log_entry,
|
||||
f"Error during Obsidian indexing: {e}",
|
||||
"Unexpected error",
|
||||
{"error_type": type(e).__name__},
|
||||
)
|
||||
return 0, str(e)
|
||||
|
|
@ -0,0 +1,464 @@
|
|||
"use client";
|
||||
|
||||
import { zodResolver } from "@hookform/resolvers/zod";
|
||||
import { FolderOpen, Info } from "lucide-react";
|
||||
import type { FC } from "react";
|
||||
import { useRef, useState } from "react";
|
||||
import { useForm } from "react-hook-form";
|
||||
import * as z from "zod";
|
||||
import {
|
||||
Accordion,
|
||||
AccordionContent,
|
||||
AccordionItem,
|
||||
AccordionTrigger,
|
||||
} from "@/components/ui/accordion";
|
||||
import { Alert, AlertDescription, AlertTitle } from "@/components/ui/alert";
|
||||
import {
|
||||
Form,
|
||||
FormControl,
|
||||
FormDescription,
|
||||
FormField,
|
||||
FormItem,
|
||||
FormLabel,
|
||||
FormMessage,
|
||||
} from "@/components/ui/form";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import { EnumConnectorName } from "@/contracts/enums/connector";
|
||||
import { getConnectorBenefits } from "../connector-benefits";
|
||||
import type { ConnectFormProps } from "../index";
|
||||
|
||||
const obsidianConnectorFormSchema = z.object({
|
||||
name: z.string().min(3, {
|
||||
message: "Connector name must be at least 3 characters.",
|
||||
}),
|
||||
vault_path: z.string().min(1, {
|
||||
message: "Vault path is required.",
|
||||
}),
|
||||
vault_name: z.string().min(1, {
|
||||
message: "Vault name is required.",
|
||||
}),
|
||||
exclude_folders: z.string().optional(),
|
||||
include_attachments: z.boolean(),
|
||||
});
|
||||
|
||||
type ObsidianConnectorFormValues = z.infer<typeof obsidianConnectorFormSchema>;
|
||||
|
||||
export const ObsidianConnectForm: FC<ConnectFormProps> = ({ onSubmit, isSubmitting }) => {
|
||||
const isSubmittingRef = useRef(false);
|
||||
const [periodicEnabled, setPeriodicEnabled] = useState(true);
|
||||
const [frequencyMinutes, setFrequencyMinutes] = useState("60");
|
||||
const form = useForm<ObsidianConnectorFormValues>({
|
||||
resolver: zodResolver(obsidianConnectorFormSchema),
|
||||
defaultValues: {
|
||||
name: "Obsidian Vault",
|
||||
vault_path: "",
|
||||
vault_name: "",
|
||||
exclude_folders: ".obsidian,.trash",
|
||||
include_attachments: false,
|
||||
},
|
||||
});
|
||||
|
||||
const handleSubmit = async (values: ObsidianConnectorFormValues) => {
|
||||
// Prevent multiple submissions
|
||||
if (isSubmittingRef.current || isSubmitting) {
|
||||
return;
|
||||
}
|
||||
|
||||
isSubmittingRef.current = true;
|
||||
try {
|
||||
// Parse exclude_folders into an array
|
||||
const excludeFolders = values.exclude_folders
|
||||
? values.exclude_folders
|
||||
.split(",")
|
||||
.map((f) => f.trim())
|
||||
.filter(Boolean)
|
||||
: [".obsidian", ".trash"];
|
||||
|
||||
await onSubmit({
|
||||
name: values.name,
|
||||
connector_type: EnumConnectorName.OBSIDIAN_CONNECTOR,
|
||||
config: {
|
||||
vault_path: values.vault_path,
|
||||
vault_name: values.vault_name,
|
||||
exclude_folders: excludeFolders,
|
||||
include_attachments: values.include_attachments,
|
||||
},
|
||||
is_indexable: true,
|
||||
is_active: true,
|
||||
last_indexed_at: null,
|
||||
periodic_indexing_enabled: periodicEnabled,
|
||||
indexing_frequency_minutes: periodicEnabled ? Number.parseInt(frequencyMinutes, 10) : null,
|
||||
next_scheduled_at: null,
|
||||
periodicEnabled,
|
||||
frequencyMinutes,
|
||||
});
|
||||
} finally {
|
||||
isSubmittingRef.current = false;
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="space-y-6 pb-6">
|
||||
<Alert className="bg-purple-500/10 dark:bg-purple-500/10 border-purple-500/30 p-2 sm:p-3 flex items-center [&>svg]:relative [&>svg]:left-0 [&>svg]:top-0 [&>svg+div]:translate-y-0">
|
||||
<FolderOpen className="h-3 w-3 sm:h-4 sm:w-4 shrink-0 ml-1 text-purple-500" />
|
||||
<div className="-ml-1">
|
||||
<AlertTitle className="text-xs sm:text-sm">Self-Hosted Only</AlertTitle>
|
||||
<AlertDescription className="text-[10px] sm:text-xs pl-0!">
|
||||
This connector requires direct file system access and only works with self-hosted
|
||||
SurfSense installations.
|
||||
</AlertDescription>
|
||||
</div>
|
||||
</Alert>
|
||||
|
||||
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
|
||||
<Form {...form}>
|
||||
<form
|
||||
id="obsidian-connect-form"
|
||||
onSubmit={form.handleSubmit(handleSubmit)}
|
||||
className="space-y-4 sm:space-y-6"
|
||||
>
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="name"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel className="text-xs sm:text-sm">Connector Name</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="My Obsidian Vault"
|
||||
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
|
||||
disabled={isSubmitting}
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
A friendly name to identify this connector.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="vault_path"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel className="text-xs sm:text-sm">Vault Path</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="/path/to/your/obsidian/vault"
|
||||
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40 font-mono"
|
||||
disabled={isSubmitting}
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
The absolute path to your Obsidian vault on the server. This must be accessible
|
||||
from the SurfSense backend.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="vault_name"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel className="text-xs sm:text-sm">Vault Name</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder="My Knowledge Base"
|
||||
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40"
|
||||
disabled={isSubmitting}
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
A display name for your vault. This will be used in search results.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="exclude_folders"
|
||||
render={({ field }) => (
|
||||
<FormItem>
|
||||
<FormLabel className="text-xs sm:text-sm">Exclude Folders</FormLabel>
|
||||
<FormControl>
|
||||
<Input
|
||||
placeholder=".obsidian,.trash,templates"
|
||||
className="h-8 sm:h-10 px-2 sm:px-3 text-xs sm:text-sm border-slate-400/20 focus-visible:border-slate-400/40 font-mono"
|
||||
disabled={isSubmitting}
|
||||
{...field}
|
||||
/>
|
||||
</FormControl>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
Comma-separated list of folder names to exclude from indexing.
|
||||
</FormDescription>
|
||||
<FormMessage />
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
<FormField
|
||||
control={form.control}
|
||||
name="include_attachments"
|
||||
render={({ field }) => (
|
||||
<FormItem className="flex flex-row items-center justify-between rounded-lg border border-slate-400/20 p-3">
|
||||
<div className="space-y-0.5">
|
||||
<FormLabel className="text-xs sm:text-sm">Include Attachments</FormLabel>
|
||||
<FormDescription className="text-[10px] sm:text-xs">
|
||||
Index attachment folders and embedded files (images, PDFs, etc.)
|
||||
</FormDescription>
|
||||
</div>
|
||||
<FormControl>
|
||||
<Switch
|
||||
checked={field.value}
|
||||
onCheckedChange={field.onChange}
|
||||
disabled={isSubmitting}
|
||||
/>
|
||||
</FormControl>
|
||||
</FormItem>
|
||||
)}
|
||||
/>
|
||||
|
||||
{/* Indexing Configuration */}
|
||||
<div className="space-y-4 pt-4 border-t border-slate-400/20">
|
||||
<h3 className="text-sm sm:text-base font-medium">Indexing Configuration</h3>
|
||||
|
||||
{/* Periodic Sync Config */}
|
||||
<div className="rounded-xl bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6">
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="space-y-1">
|
||||
<h3 className="font-medium text-sm sm:text-base">Enable Periodic Sync</h3>
|
||||
<p className="text-xs sm:text-sm text-muted-foreground">
|
||||
Automatically re-index at regular intervals
|
||||
</p>
|
||||
</div>
|
||||
<Switch
|
||||
checked={periodicEnabled}
|
||||
onCheckedChange={setPeriodicEnabled}
|
||||
disabled={isSubmitting}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{periodicEnabled && (
|
||||
<div className="mt-4 pt-4 border-t border-slate-400/20 space-y-3">
|
||||
<div className="space-y-2">
|
||||
<Label htmlFor="frequency" className="text-xs sm:text-sm">
|
||||
Sync Frequency
|
||||
</Label>
|
||||
<Select
|
||||
value={frequencyMinutes}
|
||||
onValueChange={setFrequencyMinutes}
|
||||
disabled={isSubmitting}
|
||||
>
|
||||
<SelectTrigger
|
||||
id="frequency"
|
||||
className="w-full bg-slate-400/5 dark:bg-slate-400/5 border-slate-400/20 text-xs sm:text-sm"
|
||||
>
|
||||
<SelectValue placeholder="Select frequency" />
|
||||
</SelectTrigger>
|
||||
<SelectContent className="z-100">
|
||||
<SelectItem value="5" className="text-xs sm:text-sm">
|
||||
Every 5 minutes
|
||||
</SelectItem>
|
||||
<SelectItem value="15" className="text-xs sm:text-sm">
|
||||
Every 15 minutes
|
||||
</SelectItem>
|
||||
<SelectItem value="60" className="text-xs sm:text-sm">
|
||||
Every hour
|
||||
</SelectItem>
|
||||
<SelectItem value="360" className="text-xs sm:text-sm">
|
||||
Every 6 hours
|
||||
</SelectItem>
|
||||
<SelectItem value="720" className="text-xs sm:text-sm">
|
||||
Every 12 hours
|
||||
</SelectItem>
|
||||
<SelectItem value="1440" className="text-xs sm:text-sm">
|
||||
Daily
|
||||
</SelectItem>
|
||||
<SelectItem value="10080" className="text-xs sm:text-sm">
|
||||
Weekly
|
||||
</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</Form>
|
||||
</div>
|
||||
|
||||
{/* What you get section */}
|
||||
{getConnectorBenefits(EnumConnectorName.OBSIDIAN_CONNECTOR) && (
|
||||
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 px-3 sm:px-6 py-4 space-y-2">
|
||||
<h4 className="text-xs sm:text-sm font-medium">
|
||||
What you get with Obsidian integration:
|
||||
</h4>
|
||||
<ul className="list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
|
||||
{getConnectorBenefits(EnumConnectorName.OBSIDIAN_CONNECTOR)?.map((benefit) => (
|
||||
<li key={benefit}>{benefit}</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Documentation Section */}
|
||||
<Accordion
|
||||
type="single"
|
||||
collapsible
|
||||
className="w-full border border-border rounded-xl bg-slate-400/5 dark:bg-white/5"
|
||||
>
|
||||
<AccordionItem value="documentation" className="border-0">
|
||||
<AccordionTrigger className="text-sm sm:text-base font-medium px-3 sm:px-6 no-underline hover:no-underline">
|
||||
Documentation
|
||||
</AccordionTrigger>
|
||||
<AccordionContent className="px-3 sm:px-6 pb-3 sm:pb-6 space-y-6">
|
||||
<div>
|
||||
<h3 className="text-sm sm:text-base font-semibold mb-2">How it works</h3>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground">
|
||||
The Obsidian connector scans your local Obsidian vault directory and indexes all
|
||||
Markdown files. It preserves your note structure and extracts metadata from YAML
|
||||
frontmatter.
|
||||
</p>
|
||||
<ul className="mt-2 list-disc pl-5 text-[10px] sm:text-xs text-muted-foreground space-y-1">
|
||||
<li>
|
||||
The connector parses frontmatter metadata (title, tags, aliases, dates, etc.)
|
||||
</li>
|
||||
<li>Wiki-style links ([[note]]) are extracted and preserved</li>
|
||||
<li>Inline tags (#tag) are recognized and indexed</li>
|
||||
<li>Content is chunked intelligently for optimal search results</li>
|
||||
<li>
|
||||
Subsequent indexing runs use content hashing to skip unchanged files for faster
|
||||
sync
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div className="space-y-4">
|
||||
<div>
|
||||
<h3 className="text-sm sm:text-base font-semibold mb-2">Setup</h3>
|
||||
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20 mb-4">
|
||||
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
|
||||
<AlertTitle className="text-[10px] sm:text-xs">
|
||||
File System Access Required
|
||||
</AlertTitle>
|
||||
<AlertDescription className="text-[9px] sm:text-[10px]">
|
||||
The SurfSense backend must have read access to your Obsidian vault directory.
|
||||
For Docker deployments, mount your vault as a volume.
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
|
||||
<div className="space-y-4 sm:space-y-6">
|
||||
<div>
|
||||
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
|
||||
Step 1: Locate your vault
|
||||
</h4>
|
||||
<ol className="list-decimal pl-5 space-y-2 text-[10px] sm:text-xs text-muted-foreground">
|
||||
<li>
|
||||
<strong>macOS/Linux:</strong> Right-click any note in Obsidian → "Reveal in
|
||||
Finder" to see the vault folder
|
||||
</li>
|
||||
<li>
|
||||
<strong>Windows:</strong> Right-click any note → "Show in system explorer"
|
||||
</li>
|
||||
<li>
|
||||
<strong>Or:</strong> Click the vault switcher (bottom-left icon) → "Open
|
||||
folder" next to your vault name
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
|
||||
Step 2: Enter the path
|
||||
</h4>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground mb-2">
|
||||
<strong>Running locally (no Docker):</strong> Use the direct path to your
|
||||
vault:
|
||||
</p>
|
||||
<pre className="bg-slate-800 text-slate-200 p-2 rounded text-[9px] sm:text-[10px] overflow-x-auto mb-2">
|
||||
{`/Users/yourname/Documents/MyObsidianVault`}
|
||||
</pre>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground mb-2">
|
||||
<strong>Running in Docker:</strong> Mount your vault as a volume in
|
||||
docker-compose.yml:
|
||||
</p>
|
||||
<pre className="bg-slate-800 text-slate-200 p-2 rounded text-[9px] sm:text-[10px] overflow-x-auto">
|
||||
{`volumes:
|
||||
- /path/to/your/vault:/app/obsidian_vaults/my-vault:ro`}
|
||||
</pre>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground mt-2">
|
||||
Then use <code>/app/obsidian_vaults/my-vault</code> as your vault path.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<h4 className="text-[10px] sm:text-xs font-medium mb-2">
|
||||
Step 3: Configure exclusions
|
||||
</h4>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground">
|
||||
Common folders to exclude:
|
||||
</p>
|
||||
<ul className="list-disc pl-5 mt-1 space-y-1 text-[10px] sm:text-xs text-muted-foreground">
|
||||
<li>
|
||||
<code>.obsidian</code> - Obsidian config (always recommended)
|
||||
</li>
|
||||
<li>
|
||||
<code>.trash</code> - Obsidian's trash folder
|
||||
</li>
|
||||
<li>
|
||||
<code>templates</code> - If you have a templates folder
|
||||
</li>
|
||||
<li>
|
||||
<code>daily-notes</code> - If you want to exclude daily notes
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="space-y-4">
|
||||
<div>
|
||||
<h3 className="text-sm sm:text-base font-semibold mb-2">What Gets Indexed</h3>
|
||||
<Alert className="bg-slate-400/5 dark:bg-white/5 border-slate-400/20">
|
||||
<Info className="h-3 w-3 sm:h-4 sm:w-4" />
|
||||
<AlertTitle className="text-[10px] sm:text-xs">Indexed Content</AlertTitle>
|
||||
<AlertDescription className="text-[9px] sm:text-[10px]">
|
||||
<p className="mb-2">The Obsidian connector indexes:</p>
|
||||
<ul className="list-disc pl-5 space-y-1">
|
||||
<li>All Markdown files (.md) in your vault</li>
|
||||
<li>YAML frontmatter metadata (title, tags, aliases, dates)</li>
|
||||
<li>Wiki-style links between notes</li>
|
||||
<li>Inline tags throughout your notes</li>
|
||||
<li>Full note content with proper chunking</li>
|
||||
</ul>
|
||||
</AlertDescription>
|
||||
</Alert>
|
||||
</div>
|
||||
</div>
|
||||
</AccordionContent>
|
||||
</AccordionItem>
|
||||
</Accordion>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
|
@ -108,6 +108,14 @@ export function getConnectorBenefits(connectorType: string): string[] | null {
|
|||
"Real-time updates via webhook integration",
|
||||
"No manual indexing required - meetings are added automatically",
|
||||
],
|
||||
OBSIDIAN_CONNECTOR: [
|
||||
"Search through all your Obsidian notes and knowledge base",
|
||||
"Access note content with YAML frontmatter metadata preserved",
|
||||
"Wiki-style links ([[note]]) and #tags are indexed",
|
||||
"Connect your personal knowledge base directly to your search space",
|
||||
"Incremental sync - only changed files are re-indexed",
|
||||
"Full support for your vault's folder structure",
|
||||
],
|
||||
};
|
||||
|
||||
return benefits[connectorType] || null;
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import { GithubConnectForm } from "./components/github-connect-form";
|
|||
import { LinkupApiConnectForm } from "./components/linkup-api-connect-form";
|
||||
import { LumaConnectForm } from "./components/luma-connect-form";
|
||||
import { MCPConnectForm } from "./components/mcp-connect-form";
|
||||
import { ObsidianConnectForm } from "./components/obsidian-connect-form";
|
||||
import { SearxngConnectForm } from "./components/searxng-connect-form";
|
||||
import { TavilyApiConnectForm } from "./components/tavily-api-connect-form";
|
||||
|
||||
|
|
@ -58,6 +59,8 @@ export function getConnectFormComponent(connectorType: string): ConnectFormCompo
|
|||
return CirclebackConnectForm;
|
||||
case "MCP_CONNECTOR":
|
||||
return MCPConnectForm;
|
||||
case "OBSIDIAN_CONNECTOR":
|
||||
return ObsidianConnectForm;
|
||||
// Add other connector types here as needed
|
||||
default:
|
||||
return null;
|
||||
|
|
|
|||
|
|
@ -0,0 +1,191 @@
|
|||
"use client";
|
||||
|
||||
import { FolderOpen } from "lucide-react";
|
||||
import type { FC } from "react";
|
||||
import { useEffect, useState } from "react";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { Label } from "@/components/ui/label";
|
||||
import { Switch } from "@/components/ui/switch";
|
||||
import type { ConnectorConfigProps } from "../index";
|
||||
|
||||
export interface ObsidianConfigProps extends ConnectorConfigProps {
|
||||
onNameChange?: (name: string) => void;
|
||||
}
|
||||
|
||||
export const ObsidianConfig: FC<ObsidianConfigProps> = ({
|
||||
connector,
|
||||
onConfigChange,
|
||||
onNameChange,
|
||||
}) => {
|
||||
const [vaultPath, setVaultPath] = useState<string>(
|
||||
(connector.config?.vault_path as string) || ""
|
||||
);
|
||||
const [vaultName, setVaultName] = useState<string>(
|
||||
(connector.config?.vault_name as string) || ""
|
||||
);
|
||||
const [excludeFolders, setExcludeFolders] = useState<string>(() => {
|
||||
const folders = connector.config?.exclude_folders;
|
||||
if (Array.isArray(folders)) {
|
||||
return folders.join(", ");
|
||||
}
|
||||
return (folders as string) || ".obsidian, .trash";
|
||||
});
|
||||
const [includeAttachments, setIncludeAttachments] = useState<boolean>(
|
||||
(connector.config?.include_attachments as boolean) || false
|
||||
);
|
||||
const [name, setName] = useState<string>(connector.name || "");
|
||||
|
||||
// Update values when connector changes
|
||||
useEffect(() => {
|
||||
const path = (connector.config?.vault_path as string) || "";
|
||||
const vName = (connector.config?.vault_name as string) || "";
|
||||
const folders = connector.config?.exclude_folders;
|
||||
const attachments = (connector.config?.include_attachments as boolean) || false;
|
||||
|
||||
setVaultPath(path);
|
||||
setVaultName(vName);
|
||||
setIncludeAttachments(attachments);
|
||||
setName(connector.name || "");
|
||||
|
||||
if (Array.isArray(folders)) {
|
||||
setExcludeFolders(folders.join(", "));
|
||||
} else if (typeof folders === "string") {
|
||||
setExcludeFolders(folders);
|
||||
}
|
||||
}, [connector.config, connector.name]);
|
||||
|
||||
const handleVaultPathChange = (value: string) => {
|
||||
setVaultPath(value);
|
||||
if (onConfigChange) {
|
||||
onConfigChange({
|
||||
...connector.config,
|
||||
vault_path: value,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleVaultNameChange = (value: string) => {
|
||||
setVaultName(value);
|
||||
if (onConfigChange) {
|
||||
onConfigChange({
|
||||
...connector.config,
|
||||
vault_name: value,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleExcludeFoldersChange = (value: string) => {
|
||||
setExcludeFolders(value);
|
||||
const foldersArray = value
|
||||
.split(",")
|
||||
.map((f) => f.trim())
|
||||
.filter(Boolean);
|
||||
if (onConfigChange) {
|
||||
onConfigChange({
|
||||
...connector.config,
|
||||
exclude_folders: foldersArray,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleIncludeAttachmentsChange = (value: boolean) => {
|
||||
setIncludeAttachments(value);
|
||||
if (onConfigChange) {
|
||||
onConfigChange({
|
||||
...connector.config,
|
||||
include_attachments: value,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const handleNameChange = (value: string) => {
|
||||
setName(value);
|
||||
if (onNameChange) {
|
||||
onNameChange(value);
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="space-y-6">
|
||||
{/* Connector Name */}
|
||||
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
|
||||
<div className="space-y-2">
|
||||
<Label className="text-xs sm:text-sm">Connector Name</Label>
|
||||
<Input
|
||||
value={name}
|
||||
onChange={(e) => handleNameChange(e.target.value)}
|
||||
placeholder="My Obsidian Vault"
|
||||
className="border-slate-400/20 focus-visible:border-slate-400/40"
|
||||
/>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground">
|
||||
A friendly name to identify this connector.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Configuration */}
|
||||
<div className="rounded-xl border border-border bg-slate-400/5 dark:bg-white/5 p-3 sm:p-6 space-y-3 sm:space-y-4">
|
||||
<div className="space-y-1 sm:space-y-2">
|
||||
<h3 className="font-medium text-sm sm:text-base flex items-center gap-2">
|
||||
<FolderOpen className="h-4 w-4 text-purple-500" />
|
||||
Vault Configuration
|
||||
</h3>
|
||||
</div>
|
||||
|
||||
<div className="space-y-4">
|
||||
<div className="space-y-2">
|
||||
<Label className="text-xs sm:text-sm">Vault Path</Label>
|
||||
<Input
|
||||
value={vaultPath}
|
||||
onChange={(e) => handleVaultPathChange(e.target.value)}
|
||||
placeholder="/path/to/your/obsidian/vault"
|
||||
className="border-slate-400/20 focus-visible:border-slate-400/40 font-mono"
|
||||
/>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground">
|
||||
The absolute path to your Obsidian vault on the server.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label className="text-xs sm:text-sm">Vault Name</Label>
|
||||
<Input
|
||||
value={vaultName}
|
||||
onChange={(e) => handleVaultNameChange(e.target.value)}
|
||||
placeholder="My Knowledge Base"
|
||||
className="border-slate-400/20 focus-visible:border-slate-400/40"
|
||||
/>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground">
|
||||
A display name for your vault in search results.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="space-y-2">
|
||||
<Label className="text-xs sm:text-sm">Exclude Folders</Label>
|
||||
<Input
|
||||
value={excludeFolders}
|
||||
onChange={(e) => handleExcludeFoldersChange(e.target.value)}
|
||||
placeholder=".obsidian, .trash, templates"
|
||||
className="border-slate-400/20 focus-visible:border-slate-400/40 font-mono"
|
||||
/>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground">
|
||||
Comma-separated list of folder names to exclude from indexing.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center justify-between rounded-lg border border-slate-400/20 p-3">
|
||||
<div className="space-y-0.5">
|
||||
<Label className="text-xs sm:text-sm">Include Attachments</Label>
|
||||
<p className="text-[10px] sm:text-xs text-muted-foreground">
|
||||
Index attachment folders and embedded files
|
||||
</p>
|
||||
</div>
|
||||
<Switch
|
||||
checked={includeAttachments}
|
||||
onCheckedChange={handleIncludeAttachmentsChange}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
|
@ -16,6 +16,7 @@ import { JiraConfig } from "./components/jira-config";
|
|||
import { LinkupApiConfig } from "./components/linkup-api-config";
|
||||
import { LumaConfig } from "./components/luma-config";
|
||||
import { MCPConfig } from "./components/mcp-config";
|
||||
import { ObsidianConfig } from "./components/obsidian-config";
|
||||
import { SearxngConfig } from "./components/searxng-config";
|
||||
import { SlackConfig } from "./components/slack-config";
|
||||
import { TavilyApiConfig } from "./components/tavily-api-config";
|
||||
|
|
@ -74,6 +75,8 @@ export function getConnectorConfigComponent(
|
|||
return CirclebackConfig;
|
||||
case "MCP_CONNECTOR":
|
||||
return MCPConfig;
|
||||
case "OBSIDIAN_CONNECTOR":
|
||||
return ObsidianConfig;
|
||||
case "COMPOSIO_CONNECTOR":
|
||||
return ComposioConfig;
|
||||
// OAuth connectors (Gmail, Calendar, Airtable, Notion) and others don't need special config UI
|
||||
|
|
|
|||
|
|
@ -57,6 +57,7 @@ export const ConnectorConnectView: FC<ConnectorConnectViewProps> = ({
|
|||
LUMA_CONNECTOR: "luma-connect-form",
|
||||
CIRCLEBACK_CONNECTOR: "circleback-connect-form",
|
||||
MCP_CONNECTOR: "mcp-connect-form",
|
||||
OBSIDIAN_CONNECTOR: "obsidian-connect-form",
|
||||
};
|
||||
const formId = formIdMap[connectorType];
|
||||
if (formId) {
|
||||
|
|
@ -141,12 +142,10 @@ export const ConnectorConnectView: FC<ConnectorConnectViewProps> = ({
|
|||
<Loader2 className="mr-2 h-4 w-4 animate-spin" />
|
||||
Connecting
|
||||
</>
|
||||
) : connectorType === "MCP_CONNECTOR" ? (
|
||||
"Connect"
|
||||
) : (
|
||||
<>
|
||||
{connectorType === "MCP_CONNECTOR"
|
||||
? "Connect"
|
||||
: `Connect ${getConnectorTypeDisplay(connectorType)}`}
|
||||
</>
|
||||
`Connect ${getConnectorTypeDisplay(connectorType)}`
|
||||
)}
|
||||
</Button>
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -166,6 +166,13 @@ export const OTHER_CONNECTORS = [
|
|||
description: "Connect to MCP servers for AI tools",
|
||||
connectorType: EnumConnectorName.MCP_CONNECTOR,
|
||||
},
|
||||
{
|
||||
id: "obsidian-connector",
|
||||
title: "Obsidian",
|
||||
description: "Index your Obsidian vault (self-hosted only)",
|
||||
connectorType: EnumConnectorName.OBSIDIAN_CONNECTOR,
|
||||
selfHostedOnly: true,
|
||||
},
|
||||
] as const;
|
||||
|
||||
// Composio Connector (Single entry that opens toolkit selector)
|
||||
|
|
|
|||
|
|
@ -3,8 +3,9 @@
|
|||
import type { FC } from "react";
|
||||
import { EnumConnectorName } from "@/contracts/enums/connector";
|
||||
import type { SearchSourceConnector } from "@/contracts/types/connector.types";
|
||||
import { ComposioConnectorCard } from "../components/composio-connector-card";
|
||||
import { isSelfHosted } from "@/lib/env-config";
|
||||
import { ConnectorCard } from "../components/connector-card";
|
||||
import { ComposioConnectorCard } from "../components/composio-connector-card";
|
||||
import {
|
||||
COMPOSIO_CONNECTORS,
|
||||
CRAWLERS,
|
||||
|
|
@ -58,23 +59,31 @@ export const AllConnectorsTab: FC<AllConnectorsTabProps> = ({
|
|||
onViewAccountsList,
|
||||
onOpenComposio,
|
||||
}) => {
|
||||
// Filter connectors based on search
|
||||
// Check if self-hosted mode (for showing self-hosted only connectors)
|
||||
const selfHosted = isSelfHosted();
|
||||
|
||||
// Filter connectors based on search and deployment mode
|
||||
const filteredOAuth = OAUTH_CONNECTORS.filter(
|
||||
(c) =>
|
||||
c.title.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
||||
c.description.toLowerCase().includes(searchQuery.toLowerCase())
|
||||
// Filter by search query
|
||||
(c.title.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
||||
c.description.toLowerCase().includes(searchQuery.toLowerCase())) &&
|
||||
// Filter self-hosted only connectors in cloud mode
|
||||
(!("selfHostedOnly" in c) || !c.selfHostedOnly || selfHosted)
|
||||
);
|
||||
|
||||
const filteredCrawlers = CRAWLERS.filter(
|
||||
(c) =>
|
||||
c.title.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
||||
c.description.toLowerCase().includes(searchQuery.toLowerCase())
|
||||
(c.title.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
||||
c.description.toLowerCase().includes(searchQuery.toLowerCase())) &&
|
||||
(!("selfHostedOnly" in c) || !c.selfHostedOnly || selfHosted)
|
||||
);
|
||||
|
||||
const filteredOther = OTHER_CONNECTORS.filter(
|
||||
(c) =>
|
||||
c.title.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
||||
c.description.toLowerCase().includes(searchQuery.toLowerCase())
|
||||
(c.title.toLowerCase().includes(searchQuery.toLowerCase()) ||
|
||||
c.description.toLowerCase().includes(searchQuery.toLowerCase())) &&
|
||||
(!("selfHostedOnly" in c) || !c.selfHostedOnly || selfHosted)
|
||||
);
|
||||
|
||||
// Filter Composio connectors
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ export const CONNECTOR_TO_DOCUMENT_TYPE: Record<string, string> = {
|
|||
ELASTICSEARCH_CONNECTOR: "ELASTICSEARCH_CONNECTOR",
|
||||
BOOKSTACK_CONNECTOR: "BOOKSTACK_CONNECTOR",
|
||||
CIRCLEBACK_CONNECTOR: "CIRCLEBACK",
|
||||
OBSIDIAN_CONNECTOR: "OBSIDIAN_CONNECTOR",
|
||||
|
||||
// Special mappings (connector type differs from document type)
|
||||
GOOGLE_DRIVE_CONNECTOR: "GOOGLE_DRIVE_FILE",
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ export enum EnumConnectorName {
|
|||
WEBCRAWLER_CONNECTOR = "WEBCRAWLER_CONNECTOR",
|
||||
YOUTUBE_CONNECTOR = "YOUTUBE_CONNECTOR",
|
||||
CIRCLEBACK_CONNECTOR = "CIRCLEBACK_CONNECTOR",
|
||||
OBSIDIAN_CONNECTOR = "OBSIDIAN_CONNECTOR",
|
||||
MCP_CONNECTOR = "MCP_CONNECTOR",
|
||||
COMPOSIO_CONNECTOR = "COMPOSIO_CONNECTOR",
|
||||
}
|
||||
|
|
|
|||
|
|
@ -66,6 +66,8 @@ export const getConnectorIcon = (connectorType: EnumConnectorName | string, clas
|
|||
return <IconUsersGroup {...iconProps} />;
|
||||
case EnumConnectorName.MCP_CONNECTOR:
|
||||
return <Image src="/connectors/modelcontextprotocol.svg" alt="MCP" {...imgProps} />;
|
||||
case EnumConnectorName.OBSIDIAN_CONNECTOR:
|
||||
return <Image src="/connectors/obsidian.svg" alt="Obsidian" {...imgProps} />;
|
||||
case EnumConnectorName.COMPOSIO_CONNECTOR:
|
||||
return <Image src="/connectors/composio.svg" alt="Composio" {...imgProps} />;
|
||||
// Additional cases for non-enum connector types
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@ export const searchSourceConnectorTypeEnum = z.enum([
|
|||
"BOOKSTACK_CONNECTOR",
|
||||
"CIRCLEBACK_CONNECTOR",
|
||||
"MCP_CONNECTOR",
|
||||
"OBSIDIAN_CONNECTOR",
|
||||
"COMPOSIO_CONNECTOR",
|
||||
]);
|
||||
|
||||
|
|
|
|||
|
|
@ -21,8 +21,21 @@ export const BACKEND_URL = process.env.NEXT_PUBLIC_FASTAPI_BACKEND_URL || "http:
|
|||
// Placeholder: __NEXT_PUBLIC_ETL_SERVICE__
|
||||
export const ETL_SERVICE = process.env.NEXT_PUBLIC_ETL_SERVICE || "DOCLING";
|
||||
|
||||
// Deployment Mode: "self-hosted" or "cloud"
|
||||
// Matches backend's SURFSENSE_DEPLOYMENT_MODE - defaults to "self-hosted"
|
||||
// self-hosted: Full access to local file system connectors (Obsidian, etc.)
|
||||
// cloud: Only cloud-based connectors available
|
||||
// Placeholder: __NEXT_PUBLIC_DEPLOYMENT_MODE__
|
||||
export const DEPLOYMENT_MODE = process.env.NEXT_PUBLIC_DEPLOYMENT_MODE || "self-hosted";
|
||||
|
||||
// Helper to check if local auth is enabled
|
||||
export const isLocalAuth = () => AUTH_TYPE === "LOCAL";
|
||||
|
||||
// Helper to check if Google auth is enabled
|
||||
export const isGoogleAuth = () => AUTH_TYPE === "GOOGLE";
|
||||
|
||||
// Helper to check if running in self-hosted mode
|
||||
export const isSelfHosted = () => DEPLOYMENT_MODE === "self-hosted";
|
||||
|
||||
// Helper to check if running in cloud mode
|
||||
export const isCloud = () => DEPLOYMENT_MODE === "cloud";
|
||||
|
|
|
|||
12
surfsense_web/public/connectors/obsidian.svg
Normal file
12
surfsense_web/public/connectors/obsidian.svg
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 100 100" fill="none">
|
||||
<defs>
|
||||
<linearGradient id="obsidian-gradient" x1="0%" y1="0%" x2="100%" y2="100%">
|
||||
<stop offset="0%" style="stop-color:#7C3AED"/>
|
||||
<stop offset="100%" style="stop-color:#4F46E5"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<path d="M50 5 L90 35 L75 95 L25 95 L10 35 Z" fill="url(#obsidian-gradient)" stroke="#6D28D9" stroke-width="2"/>
|
||||
<path d="M50 20 L70 38 L62 75 L38 75 L30 38 Z" fill="#A78BFA" opacity="0.4"/>
|
||||
<path d="M50 5 L50 95" stroke="#8B5CF6" stroke-width="1" opacity="0.5"/>
|
||||
<path d="M10 35 L90 35" stroke="#8B5CF6" stroke-width="1" opacity="0.3"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 657 B |
Loading…
Add table
Add a link
Reference in a new issue