Merge remote-tracking branch 'upstream/dev' into feat/document-revamp

This commit is contained in:
Anish Sarkar 2026-03-07 04:37:37 +05:30
commit 2ea67c1764
22 changed files with 828 additions and 281 deletions

View file

@ -5,7 +5,7 @@
# ==============================================================================
# SurfSense version (use "latest", a clean version like "0.0.14", or a specific build like "0.0.14.1")
SURFSENSE_VERSION=0.0.13.9
SURFSENSE_VERSION=latest
# ------------------------------------------------------------------------------
# Core Settings

View file

@ -24,7 +24,7 @@ $ErrorActionPreference = 'Stop'
# ── Configuration ───────────────────────────────────────────────────────────
$RepoRaw = "https://raw.githubusercontent.com/MODSetter/SurfSense/dev"
$RepoRaw = "https://raw.githubusercontent.com/MODSetter/SurfSense/main"
$InstallDir = ".\surfsense"
$OldVolume = "surfsense-data"
$DumpFile = ".\surfsense_migration_backup.sql"
@ -208,11 +208,12 @@ if ($MigrationMode) {
if (-not (Test-Path $DumpFile)) {
Write-Err "Dump file '$DumpFile' not found. The migration script may have failed."
}
$DumpFilePath = (Resolve-Path $DumpFile).Path
Write-Info "Restoring dump into PostgreSQL 17 - this may take a while for large databases..."
$restoreErrFile = Join-Path $env:TEMP "surfsense_restore_err.log"
Push-Location $InstallDir
Invoke-NativeSafe { Get-Content $DumpFile | docker compose exec -T -e "PGPASSWORD=$DbPass" db psql -U $DbUser -d $DbName 2>$restoreErrFile | Out-Null } | Out-Null
Invoke-NativeSafe { Get-Content -LiteralPath $DumpFilePath | docker compose exec -T -e "PGPASSWORD=$DbPass" db psql -U $DbUser -d $DbName 2>$restoreErrFile | Out-Null } | Out-Null
Pop-Location
$fatalErrors = @()
@ -246,7 +247,7 @@ if ($MigrationMode) {
Write-Step "Starting all SurfSense services"
Push-Location $InstallDir
Invoke-NativeSafe { docker compose up -d } | Out-Null
Invoke-NativeSafe { docker compose up -d }
Pop-Location
Write-Ok "All services started."
@ -255,7 +256,7 @@ if ($MigrationMode) {
} else {
Write-Step "Starting SurfSense"
Push-Location $InstallDir
Invoke-NativeSafe { docker compose up -d } | Out-Null
Invoke-NativeSafe { docker compose up -d }
Pop-Location
Write-Ok "All services started."
}
@ -316,7 +317,7 @@ Y88b d88P Y88b 888 888 888 Y88b d88P Y8b. 888 888 X88 Y8b.
$versionDisplay = (Get-Content $envPath | Select-String '^SURFSENSE_VERSION=' | ForEach-Object { ($_ -split '=',2)[1].Trim('"') }) | Select-Object -First 1
if (-not $versionDisplay) { $versionDisplay = "latest" }
Write-Host " Your personal AI-powered search engine [$versionDisplay]" -ForegroundColor Yellow
Write-Host " OSS Alternative to NotebookLM for Teams [$versionDisplay]" -ForegroundColor Yellow
Write-Host ("=" * 62) -ForegroundColor Cyan
Write-Host ""

View file

@ -25,7 +25,7 @@ set -euo pipefail
main() {
REPO_RAW="https://raw.githubusercontent.com/MODSetter/SurfSense/dev"
REPO_RAW="https://raw.githubusercontent.com/MODSetter/SurfSense/main"
INSTALL_DIR="./surfsense"
OLD_VOLUME="surfsense-data"
DUMP_FILE="./surfsense_migration_backup.sql"
@ -301,7 +301,7 @@ Y88b d88P Y88b 888 888 888 Y88b d88P Y8b. 888 888 X88 Y8b.
EOF
_version_display=$(grep '^SURFSENSE_VERSION=' "${INSTALL_DIR}/.env" 2>/dev/null | cut -d= -f2 | tr -d '"' | head -1 || true)
_version_display="${_version_display:-latest}"
printf " Your personal AI-powered search engine ${YELLOW}[%s]${NC}\n" "${_version_display}"
printf " OSS Alternative to NotebookLM for Teams ${YELLOW}[%s]${NC}\n" "${_version_display}"
printf "${CYAN}══════════════════════════════════════════════════════════════${NC}\n\n"
info " Frontend: http://localhost:3000"

View file

@ -12,7 +12,7 @@ from litellm import aspeech
from app.config import config as app_config
from app.services.kokoro_tts_service import get_kokoro_tts_service
from app.services.llm_service import get_document_summary_llm
from app.services.llm_service import get_agent_llm
from .configuration import Configuration
from .prompts import get_podcast_generation_prompt
@ -31,7 +31,7 @@ async def create_podcast_transcript(
user_prompt = configuration.user_prompt
# Get search space's document summary LLM
llm = await get_document_summary_llm(state.db_session, search_space_id)
llm = await get_agent_llm(state.db_session, search_space_id)
if not llm:
error_message = (
f"No document summary LLM configured for search space {search_space_id}"

View file

@ -1,47 +1,83 @@
from sqlalchemy.ext.asyncio import AsyncSession
from app.db import DocumentStatus, DocumentType
from app.db import Document, DocumentStatus, DocumentType
from app.indexing_pipeline.connector_document import ConnectorDocument
from app.indexing_pipeline.document_hashing import compute_content_hash
from app.indexing_pipeline.indexing_pipeline_service import IndexingPipelineService
async def index_uploaded_file(
markdown_content: str,
filename: str,
etl_service: str,
search_space_id: int,
user_id: str,
session: AsyncSession,
llm,
should_summarize: bool = False,
) -> None:
connector_doc = ConnectorDocument(
title=filename,
source_markdown=markdown_content,
unique_id=filename,
document_type=DocumentType.FILE,
search_space_id=search_space_id,
created_by_id=user_id,
connector_id=None,
should_summarize=should_summarize,
should_use_code_chunker=False,
fallback_summary=markdown_content[:4000],
metadata={
"FILE_NAME": filename,
"ETL_SERVICE": etl_service,
},
)
class UploadDocumentAdapter:
def __init__(self, session: AsyncSession) -> None:
self._session = session
self._service = IndexingPipelineService(session)
service = IndexingPipelineService(session)
documents = await service.prepare_for_indexing([connector_doc])
async def index(
self,
markdown_content: str,
filename: str,
etl_service: str,
search_space_id: int,
user_id: str,
llm,
should_summarize: bool = False,
) -> None:
connector_doc = ConnectorDocument(
title=filename,
source_markdown=markdown_content,
unique_id=filename,
document_type=DocumentType.FILE,
search_space_id=search_space_id,
created_by_id=user_id,
connector_id=None,
should_summarize=should_summarize,
should_use_code_chunker=False,
fallback_summary=markdown_content[:4000],
metadata={
"FILE_NAME": filename,
"ETL_SERVICE": etl_service,
},
)
if not documents:
raise RuntimeError("prepare_for_indexing returned no documents")
documents = await self._service.prepare_for_indexing([connector_doc])
indexed = await service.index(documents[0], connector_doc, llm)
if not documents:
raise RuntimeError("prepare_for_indexing returned no documents")
if not DocumentStatus.is_state(indexed.status, DocumentStatus.READY):
raise RuntimeError(indexed.status.get("reason", "Indexing failed"))
indexed = await self._service.index(documents[0], connector_doc, llm)
indexed.content_needs_reindexing = False
await session.commit()
if not DocumentStatus.is_state(indexed.status, DocumentStatus.READY):
raise RuntimeError(indexed.status.get("reason", "Indexing failed"))
indexed.content_needs_reindexing = False
await self._session.commit()
async def reindex(self, document: Document, llm) -> None:
"""Re-index an existing document after its source_markdown has been updated."""
if not document.source_markdown:
raise RuntimeError("Document has no source_markdown to reindex")
metadata = document.document_metadata or {}
connector_doc = ConnectorDocument(
title=document.title,
source_markdown=document.source_markdown,
unique_id=document.title,
document_type=document.document_type,
search_space_id=document.search_space_id,
created_by_id=str(document.created_by_id),
connector_id=document.connector_id,
should_summarize=True,
should_use_code_chunker=False,
fallback_summary=document.source_markdown[:4000],
metadata=metadata,
)
document.content_hash = compute_content_hash(connector_doc)
indexed = await self._service.index(document, connector_doc, llm)
if not DocumentStatus.is_state(indexed.status, DocumentStatus.READY):
raise RuntimeError(indexed.status.get("reason", "Reindexing failed"))
indexed.content_needs_reindexing = False
await self._session.commit()

View file

@ -2,19 +2,16 @@
import logging
from sqlalchemy import delete, select
from sqlalchemy import select
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import selectinload
from app.celery_app import celery_app
from app.db import Document
from app.indexing_pipeline.adapters.file_upload_adapter import UploadDocumentAdapter
from app.services.llm_service import get_user_long_context_llm
from app.services.task_logging_service import TaskLoggingService
from app.tasks.celery_tasks import get_celery_session_maker
from app.utils.document_converters import (
create_document_chunks,
generate_document_summary,
)
logger = logging.getLogger(__name__)
@ -42,7 +39,6 @@ def reindex_document_task(self, document_id: int, user_id: str):
async def _reindex_document(document_id: int, user_id: str):
"""Async function to reindex a document."""
async with get_celery_session_maker()() as session:
# First, get the document to get search_space_id for logging
result = await session.execute(
select(Document)
.options(selectinload(Document.chunks))
@ -54,10 +50,8 @@ async def _reindex_document(document_id: int, user_id: str):
logger.error(f"Document {document_id} not found")
return
# Initialize task logger
task_logger = TaskLoggingService(session, document.search_space_id)
# Log task start
log_entry = await task_logger.log_task_start(
task_name="document_reindex",
source="editor",
@ -71,10 +65,7 @@ async def _reindex_document(document_id: int, user_id: str):
)
try:
# Read markdown directly from source_markdown
markdown_content = document.source_markdown
if not markdown_content:
if not document.source_markdown:
await task_logger.log_task_failure(
log_entry,
f"Document {document_id} has no source_markdown to reindex",
@ -85,51 +76,17 @@ async def _reindex_document(document_id: int, user_id: str):
logger.info(f"Reindexing document {document_id} ({document.title})")
# 1. Delete old chunks explicitly
from app.db import Chunk
await session.execute(delete(Chunk).where(Chunk.document_id == document_id))
await session.flush() # Ensure old chunks are deleted
# 2. Create new chunks from source_markdown
new_chunks = await create_document_chunks(markdown_content)
# 3. Add new chunks to session
for chunk in new_chunks:
chunk.document_id = document_id
session.add(chunk)
logger.info(f"Created {len(new_chunks)} chunks for document {document_id}")
# 4. Regenerate summary
user_llm = await get_user_long_context_llm(
session, user_id, document.search_space_id
)
document_metadata = {
"title": document.title,
"document_type": document.document_type.value,
}
adapter = UploadDocumentAdapter(session)
await adapter.reindex(document=document, llm=user_llm)
summary_content, summary_embedding = await generate_document_summary(
markdown_content, user_llm, document_metadata
)
# 5. Update document
document.content = summary_content
document.embedding = summary_embedding
document.content_needs_reindexing = False
await session.commit()
# Log success
await task_logger.log_task_success(
log_entry,
f"Successfully reindexed document: {document.title}",
{
"chunks_created": len(new_chunks),
"document_id": document_id,
},
{"document_id": document_id},
)
logger.info(f"Successfully reindexed document {document_id}")

View file

@ -18,7 +18,7 @@ from sqlalchemy.ext.asyncio import AsyncSession
from app.config import config as app_config
from app.db import Document, DocumentStatus, DocumentType, Log, Notification
from app.indexing_pipeline.adapters.file_upload_adapter import index_uploaded_file
from app.indexing_pipeline.adapters.file_upload_adapter import UploadDocumentAdapter
from app.services.llm_service import get_user_long_context_llm
from app.services.notification_service import NotificationService
from app.services.task_logging_service import TaskLoggingService
@ -1871,13 +1871,13 @@ async def process_file_in_background_with_document(
user_llm = await get_user_long_context_llm(session, user_id, search_space_id)
await index_uploaded_file(
adapter = UploadDocumentAdapter(session)
await adapter.index(
markdown_content=markdown_content,
filename=filename,
etl_service=etl_service,
search_space_id=search_space_id,
user_id=user_id,
session=session,
llm=user_llm,
should_summarize=should_summarize,
)

View file

@ -2,7 +2,7 @@ import pytest
from sqlalchemy import select
from app.db import Chunk, Document, DocumentStatus
from app.indexing_pipeline.adapters.file_upload_adapter import index_uploaded_file
from app.indexing_pipeline.adapters.file_upload_adapter import UploadDocumentAdapter
pytestmark = pytest.mark.integration
@ -12,13 +12,13 @@ pytestmark = pytest.mark.integration
)
async def test_sets_status_ready(db_session, db_search_space, db_user, mocker):
"""Document status is READY after successful indexing."""
await index_uploaded_file(
adapter = UploadDocumentAdapter(db_session)
await adapter.index(
markdown_content="## Hello\n\nSome content.",
filename="test.pdf",
etl_service="UNSTRUCTURED",
search_space_id=db_search_space.id,
user_id=str(db_user.id),
session=db_session,
llm=mocker.Mock(),
)
@ -35,14 +35,15 @@ async def test_sets_status_ready(db_session, db_search_space, db_user, mocker):
)
async def test_content_is_summary(db_session, db_search_space, db_user, mocker):
"""Document content is set to the LLM-generated summary."""
await index_uploaded_file(
adapter = UploadDocumentAdapter(db_session)
await adapter.index(
markdown_content="## Hello\n\nSome content.",
filename="test.pdf",
etl_service="UNSTRUCTURED",
search_space_id=db_search_space.id,
user_id=str(db_user.id),
session=db_session,
llm=mocker.Mock(),
should_summarize=True,
)
result = await db_session.execute(
@ -58,13 +59,13 @@ async def test_content_is_summary(db_session, db_search_space, db_user, mocker):
)
async def test_chunks_written_to_db(db_session, db_search_space, db_user, mocker):
"""Chunks derived from the source markdown are persisted in the DB."""
await index_uploaded_file(
adapter = UploadDocumentAdapter(db_session)
await adapter.index(
markdown_content="## Hello\n\nSome content.",
filename="test.pdf",
etl_service="UNSTRUCTURED",
search_space_id=db_search_space.id,
user_id=str(db_user.id),
session=db_session,
llm=mocker.Mock(),
)
@ -87,13 +88,239 @@ async def test_chunks_written_to_db(db_session, db_search_space, db_user, mocker
)
async def test_raises_on_indexing_failure(db_session, db_search_space, db_user, mocker):
"""RuntimeError is raised when the indexing step fails so the caller can fire a failure notification."""
with pytest.raises(RuntimeError):
await index_uploaded_file(
adapter = UploadDocumentAdapter(db_session)
with pytest.raises(RuntimeError, match=r"Embedding failed|Indexing failed"):
await adapter.index(
markdown_content="## Hello\n\nSome content.",
filename="test.pdf",
etl_service="UNSTRUCTURED",
search_space_id=db_search_space.id,
user_id=str(db_user.id),
session=db_session,
llm=mocker.Mock(),
should_summarize=True,
)
# ---------------------------------------------------------------------------
# reindex() tests
# ---------------------------------------------------------------------------
@pytest.mark.usefixtures(
"patched_summarize", "patched_embed_text", "patched_chunk_text"
)
async def test_reindex_updates_content(db_session, db_search_space, db_user, mocker):
"""Document content is updated to the new summary after reindexing."""
adapter = UploadDocumentAdapter(db_session)
await adapter.index(
markdown_content="## Original\n\nOriginal content.",
filename="test.pdf",
etl_service="UNSTRUCTURED",
search_space_id=db_search_space.id,
user_id=str(db_user.id),
llm=mocker.Mock(),
)
result = await db_session.execute(
select(Document).filter(Document.search_space_id == db_search_space.id)
)
document = result.scalars().first()
document.source_markdown = "## Edited\n\nNew content after user edit."
await db_session.flush()
await adapter.reindex(document=document, llm=mocker.Mock())
await db_session.refresh(document)
assert document.content == "Mocked summary."
@pytest.mark.usefixtures(
"patched_summarize", "patched_embed_text", "patched_chunk_text"
)
async def test_reindex_updates_content_hash(
db_session, db_search_space, db_user, mocker
):
"""Content hash is recomputed after reindexing with new source markdown."""
adapter = UploadDocumentAdapter(db_session)
await adapter.index(
markdown_content="## Original\n\nOriginal content.",
filename="test.pdf",
etl_service="UNSTRUCTURED",
search_space_id=db_search_space.id,
user_id=str(db_user.id),
llm=mocker.Mock(),
)
result = await db_session.execute(
select(Document).filter(Document.search_space_id == db_search_space.id)
)
document = result.scalars().first()
original_hash = document.content_hash
document.source_markdown = "## Edited\n\nNew content after user edit."
await db_session.flush()
await adapter.reindex(document=document, llm=mocker.Mock())
await db_session.refresh(document)
assert document.content_hash != original_hash
@pytest.mark.usefixtures(
"patched_summarize", "patched_embed_text", "patched_chunk_text"
)
async def test_reindex_sets_status_ready(db_session, db_search_space, db_user, mocker):
"""Document status is READY after successful reindexing."""
adapter = UploadDocumentAdapter(db_session)
await adapter.index(
markdown_content="## Original\n\nOriginal content.",
filename="test.pdf",
etl_service="UNSTRUCTURED",
search_space_id=db_search_space.id,
user_id=str(db_user.id),
llm=mocker.Mock(),
)
result = await db_session.execute(
select(Document).filter(Document.search_space_id == db_search_space.id)
)
document = result.scalars().first()
document.source_markdown = "## Edited\n\nNew content after user edit."
await db_session.flush()
await adapter.reindex(document=document, llm=mocker.Mock())
await db_session.refresh(document)
assert DocumentStatus.is_state(document.status, DocumentStatus.READY)
@pytest.mark.usefixtures("patched_summarize", "patched_embed_text")
async def test_reindex_replaces_chunks(db_session, db_search_space, db_user, mocker):
"""Reindexing replaces old chunks with new content rather than appending."""
mocker.patch(
"app.indexing_pipeline.indexing_pipeline_service.chunk_text",
side_effect=[["Original chunk."], ["Updated chunk."]],
)
adapter = UploadDocumentAdapter(db_session)
await adapter.index(
markdown_content="## Original\n\nOriginal content.",
filename="test.pdf",
etl_service="UNSTRUCTURED",
search_space_id=db_search_space.id,
user_id=str(db_user.id),
llm=mocker.Mock(),
)
result = await db_session.execute(
select(Document).filter(Document.search_space_id == db_search_space.id)
)
document = result.scalars().first()
document_id = document.id
document.source_markdown = "## Edited\n\nNew content after user edit."
await db_session.flush()
await adapter.reindex(document=document, llm=mocker.Mock())
chunks_result = await db_session.execute(
select(Chunk).filter(Chunk.document_id == document_id)
)
chunks = chunks_result.scalars().all()
assert len(chunks) == 1
assert chunks[0].content == "Updated chunk."
@pytest.mark.usefixtures(
"patched_summarize", "patched_embed_text", "patched_chunk_text"
)
async def test_reindex_clears_reindexing_flag(
db_session, db_search_space, db_user, mocker
):
"""After successful reindex, content_needs_reindexing is False."""
adapter = UploadDocumentAdapter(db_session)
await adapter.index(
markdown_content="## Original\n\nOriginal content.",
filename="test.pdf",
etl_service="UNSTRUCTURED",
search_space_id=db_search_space.id,
user_id=str(db_user.id),
llm=mocker.Mock(),
)
result = await db_session.execute(
select(Document).filter(Document.search_space_id == db_search_space.id)
)
document = result.scalars().first()
document.source_markdown = "## Edited\n\nNew content after user edit."
document.content_needs_reindexing = True
await db_session.flush()
await adapter.reindex(document=document, llm=mocker.Mock())
await db_session.refresh(document)
assert document.content_needs_reindexing is False
@pytest.mark.usefixtures("patched_embed_text", "patched_chunk_text")
async def test_reindex_raises_on_failure(db_session, db_search_space, db_user, mocker):
"""RuntimeError is raised when reindexing fails so the caller can handle it."""
mocker.patch(
"app.indexing_pipeline.indexing_pipeline_service.summarize_document",
return_value="Mocked summary.",
)
adapter = UploadDocumentAdapter(db_session)
await adapter.index(
markdown_content="## Original\n\nOriginal content.",
filename="test.pdf",
etl_service="UNSTRUCTURED",
search_space_id=db_search_space.id,
user_id=str(db_user.id),
llm=mocker.Mock(),
)
result = await db_session.execute(
select(Document).filter(Document.search_space_id == db_search_space.id)
)
document = result.scalars().first()
document.source_markdown = "## Edited\n\nNew content after user edit."
await db_session.flush()
mocker.patch(
"app.indexing_pipeline.indexing_pipeline_service.summarize_document",
side_effect=RuntimeError("LLM unavailable"),
)
with pytest.raises(RuntimeError, match=r"Embedding failed|Reindexing failed"):
await adapter.reindex(document=document, llm=mocker.Mock())
async def test_reindex_raises_on_empty_source_markdown(
db_session, db_search_space, db_user, mocker
):
"""Reindexing a document with no source_markdown raises immediately."""
from app.db import DocumentType
document = Document(
title="empty.pdf",
document_type=DocumentType.FILE,
content="placeholder",
content_hash="abc123",
unique_identifier_hash="def456",
source_markdown="",
search_space_id=db_search_space.id,
created_by_id=str(db_user.id),
)
db_session.add(document)
await db_session.flush()
adapter = UploadDocumentAdapter(db_session)
with pytest.raises(RuntimeError, match="no source_markdown"):
await adapter.reindex(document=document, llm=mocker.Mock())

View file

@ -2,7 +2,7 @@ import { Route, Routes } from "react-router-dom";
import ApiKeyForm from "./pages/ApiKeyForm";
import HomePage from "./pages/HomePage";
import "../tailwind.css";
import "~tailwind.css";
export const Routing = () => (
<Routes>

View file

@ -4,6 +4,8 @@ import { ReloadIcon } from "@radix-ui/react-icons";
import { useState } from "react";
import { useNavigate } from "react-router-dom";
import { Button } from "~/routes/ui/button";
import { ConnectionSettingsButton } from "~/routes/ui/connection-settings-button";
import { buildBackendUrl } from "~utils/backend-url";
const ApiKeyForm = () => {
const navigation = useNavigate();
@ -27,8 +29,7 @@ const ApiKeyForm = () => {
setLoading(true);
try {
// Verify token is valid by making a request to the API
const response = await fetch(`${process.env.PLASMO_PUBLIC_BACKEND_URL}/verify-token`, {
const response = await fetch(await buildBackendUrl("/verify-token"), {
method: "GET",
headers: {
Authorization: `Bearer ${apiKey}`,
@ -53,6 +54,10 @@ const ApiKeyForm = () => {
return (
<div className="min-h-screen bg-gradient-to-br from-gray-900 to-gray-800 flex flex-col items-center justify-center p-6">
<div className="w-full max-w-md mx-auto space-y-8">
<div className="flex justify-end">
<ConnectionSettingsButton />
</div>
<div className="flex flex-col items-center space-y-2">
<div className="bg-gray-800 p-3 rounded-full ring-2 ring-gray-700 shadow-lg">
<img className="w-12 h-12" src={icon} alt="SurfSense" />

View file

@ -16,6 +16,7 @@ import React, { useEffect, useState } from "react";
import { useNavigate } from "react-router-dom";
import { cn } from "~/lib/utils";
import { Button } from "~/routes/ui/button";
import { ConnectionSettingsButton } from "~/routes/ui/connection-settings-button";
import {
Command,
CommandEmpty,
@ -27,6 +28,7 @@ import {
import { Popover, PopoverContent, PopoverTrigger } from "~/routes/ui/popover";
import { Label } from "~routes/ui/label";
import { useToast } from "~routes/ui/use-toast";
import { buildBackendUrl } from "~utils/backend-url";
import { getRenderedHtml } from "~utils/commons";
import type { WebHistory } from "~utils/interfaces";
import Loading from "./Loading";
@ -45,15 +47,19 @@ const HomePage = () => {
const checkSearchSpaces = async () => {
const storage = new Storage({ area: "local" });
const token = await storage.get("token");
if (!token) {
setLoading(false);
navigation("/login");
return;
}
try {
const response = await fetch(
`${process.env.PLASMO_PUBLIC_BACKEND_URL}/api/v1/searchspaces`,
{
headers: {
Authorization: `Bearer ${token}`,
},
const response = await fetch(await buildBackendUrl("/api/v1/searchspaces"), {
headers: {
Authorization: `Bearer ${token}`,
}
);
});
if (!response.ok) {
throw new Error("Token verification failed");
@ -66,11 +72,12 @@ const HomePage = () => {
await storage.remove("token");
await storage.remove("showShadowDom");
navigation("/login");
} finally {
setLoading(false);
}
};
checkSearchSpaces();
setLoading(false);
}, []);
useEffect(() => {
@ -304,6 +311,19 @@ const HomePage = () => {
navigation("/login");
}
async function handleConnectionSaved(changed: boolean): Promise<void> {
if (!changed) {
return;
}
const storage = new Storage({ area: "local" });
await storage.remove("token");
await storage.remove("showShadowDom");
await storage.remove("search_space");
await storage.remove("search_space_id");
navigation("/login");
}
if (loading) {
return <Loading />;
} else {
@ -344,15 +364,18 @@ const HomePage = () => {
</div>
<h1 className="text-xl font-semibold text-white">SurfSense</h1>
</div>
<Button
variant="ghost"
size="icon"
onClick={logOut}
className="rounded-full text-gray-400 hover:bg-gray-800 hover:text-white"
>
<ExitIcon className="h-4 w-4" />
<span className="sr-only">Log out</span>
</Button>
<div className="flex items-center gap-1">
<ConnectionSettingsButton onSaved={handleConnectionSaved} />
<Button
variant="ghost"
size="icon"
onClick={logOut}
className="rounded-full text-gray-400 hover:bg-gray-800 hover:text-white"
>
<ExitIcon className="h-4 w-4" />
<span className="sr-only">Log out</span>
</Button>
</div>
</div>
<div className="space-y-3 py-4">

View file

@ -0,0 +1,114 @@
import { GearIcon } from "@radix-ui/react-icons";
import { useEffect, useState } from "react";
import { Button } from "~/routes/ui/button";
import {
Dialog,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
} from "~/routes/ui/dialog";
import { Label } from "~/routes/ui/label";
import {
DEFAULT_BACKEND_BASE_URL,
getCustomBackendBaseUrl,
normalizeBackendBaseUrl,
setCustomBackendBaseUrl,
} from "~utils/backend-url";
type ConnectionSettingsButtonProps = {
onSaved?: (changed: boolean) => void | Promise<void>;
};
export function ConnectionSettingsButton({ onSaved }: ConnectionSettingsButtonProps) {
const [open, setOpen] = useState(false);
const [customUrl, setCustomUrl] = useState("");
const [savedUrl, setSavedUrl] = useState("");
useEffect(() => {
if (!open) {
return;
}
const loadSettings = async () => {
const normalized = await getCustomBackendBaseUrl();
setCustomUrl(normalized || DEFAULT_BACKEND_BASE_URL);
setSavedUrl(normalized);
};
loadSettings();
}, [open]);
const handleSave = async () => {
const normalizedUrl = normalizeBackendBaseUrl(customUrl);
const nextUrl = await setCustomBackendBaseUrl(
normalizedUrl === DEFAULT_BACKEND_BASE_URL ? "" : normalizedUrl
);
const changed = nextUrl !== savedUrl;
setSavedUrl(nextUrl);
setCustomUrl(nextUrl || DEFAULT_BACKEND_BASE_URL);
setOpen(false);
if (onSaved) {
await onSaved(changed);
}
};
return (
<>
<Button
variant="ghost"
size="icon"
onClick={() => setOpen(true)}
className="rounded-full text-gray-400 hover:bg-gray-800 hover:text-white"
>
<GearIcon className="h-4 w-4" />
<span className="sr-only">Connection settings</span>
</Button>
<Dialog open={open} onOpenChange={setOpen}>
<DialogContent className="max-w-md border-gray-700 bg-gray-800 text-white">
<DialogHeader>
<DialogTitle>Connection Settings</DialogTitle>
<DialogDescription className="text-gray-400">
Leave blank to use the default SurfSense backend URL.
</DialogDescription>
</DialogHeader>
<div className="space-y-2">
<Label htmlFor="backendBaseUrl" className="text-gray-300">
Custom Backend URL
</Label>
<input
id="backendBaseUrl"
type="url"
value={customUrl}
onChange={(event) => setCustomUrl(event.target.value)}
placeholder={DEFAULT_BACKEND_BASE_URL}
className="w-full rounded-md border border-gray-700 bg-gray-900 px-3 py-2 text-white placeholder:text-gray-500 focus:outline-none focus:ring-2 focus:ring-teal-500"
/>
<p className="text-xs text-gray-500">Default: {DEFAULT_BACKEND_BASE_URL}</p>
</div>
<DialogFooter className="gap-2">
<Button
type="button"
variant="outline"
onClick={() => setCustomUrl(DEFAULT_BACKEND_BASE_URL)}
className="border-gray-700 bg-gray-900 text-gray-200 hover:bg-gray-700"
>
Use Default
</Button>
<Button
type="button"
onClick={handleSave}
className="bg-teal-600 text-white hover:bg-teal-500"
>
Save
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
</>
);
}

View file

@ -0,0 +1,41 @@
import { Storage } from "@plasmohq/storage";
export const BACKEND_URL_STORAGE_KEY = "backend_base_url";
export const FALLBACK_BACKEND_BASE_URL = "https://www.surfsense.com";
const storage = new Storage({ area: "local" });
export function normalizeBackendBaseUrl(url: string) {
return url.trim().replace(/\/+$/, "");
}
export const DEFAULT_BACKEND_BASE_URL = normalizeBackendBaseUrl(
process.env.PLASMO_PUBLIC_BACKEND_URL || FALLBACK_BACKEND_BASE_URL
);
export async function getCustomBackendBaseUrl() {
const value = await storage.get(BACKEND_URL_STORAGE_KEY);
return typeof value === "string" ? normalizeBackendBaseUrl(value) : "";
}
export async function setCustomBackendBaseUrl(url: string) {
const normalized = normalizeBackendBaseUrl(url);
if (normalized) {
await storage.set(BACKEND_URL_STORAGE_KEY, normalized);
return normalized;
}
await storage.remove(BACKEND_URL_STORAGE_KEY);
return "";
}
export async function getBackendBaseUrl() {
return (await getCustomBackendBaseUrl()) || DEFAULT_BACKEND_BASE_URL;
}
export async function buildBackendUrl(path: string) {
const baseUrl = await getBackendBaseUrl();
const normalizedPath = path.startsWith("/") ? path : `/${path}`;
return `${baseUrl}${normalizedPath}`;
}

View file

@ -1,147 +1,9 @@
"use client";
import {
Bell,
BellOff,
ExternalLink,
Info,
type Megaphone,
Rocket,
Wrench,
Zap,
} from "lucide-react";
import Link from "next/link";
import { useEffect } from "react";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import {
Card,
CardContent,
CardDescription,
CardFooter,
CardHeader,
CardTitle,
} from "@/components/ui/card";
import type { AnnouncementCategory } from "@/contracts/types/announcement.types";
import { type AnnouncementWithState, useAnnouncements } from "@/hooks/use-announcements";
import { formatRelativeDate } from "@/lib/format-date";
// ---------------------------------------------------------------------------
// Category configuration
// ---------------------------------------------------------------------------
const categoryConfig: Record<
AnnouncementCategory,
{
label: string;
icon: typeof Megaphone;
color: string;
badgeVariant: "default" | "secondary" | "destructive" | "outline";
}
> = {
feature: {
label: "Feature",
icon: Rocket,
color: "text-emerald-500",
badgeVariant: "default",
},
update: {
label: "Update",
icon: Zap,
color: "text-blue-500",
badgeVariant: "secondary",
},
maintenance: {
label: "Maintenance",
icon: Wrench,
color: "text-amber-500",
badgeVariant: "outline",
},
info: {
label: "Info",
icon: Info,
color: "text-muted-foreground",
badgeVariant: "secondary",
},
};
// ---------------------------------------------------------------------------
// Announcement card
// ---------------------------------------------------------------------------
function AnnouncementCard({ announcement }: { announcement: AnnouncementWithState }) {
const config = categoryConfig[announcement.category] ?? categoryConfig.info;
const Icon = config.icon;
return (
<Card className="group relative transition-all duration-200 hover:shadow-md">
<CardHeader className="pb-3">
<div className="flex items-start justify-between gap-3">
<div className="flex items-start gap-3 min-w-0">
<div
className={`mt-0.5 flex h-8 w-8 shrink-0 items-center justify-center rounded-lg bg-muted ${config.color}`}
>
<Icon className="h-4 w-4" />
</div>
<div className="min-w-0 flex-1">
<div className="flex items-center gap-2 flex-wrap">
<CardTitle className="text-base leading-tight">{announcement.title}</CardTitle>
<Badge variant={config.badgeVariant} className="text-[10px] px-1.5 py-0">
{config.label}
</Badge>
{announcement.isImportant && (
<Badge variant="destructive" className="text-[10px] px-1.5 py-0 gap-0.5">
<Bell className="h-2.5 w-2.5" />
Important
</Badge>
)}
</div>
<CardDescription className="mt-1 text-xs">
{formatRelativeDate(announcement.date)}
</CardDescription>
</div>
</div>
</div>
</CardHeader>
<CardContent className="pb-3">
<p className="text-sm text-muted-foreground leading-relaxed">{announcement.description}</p>
</CardContent>
{announcement.link && (
<CardFooter className="pt-0 pb-4">
<Button variant="outline" size="sm" asChild className="gap-1.5">
<Link
href={announcement.link.url}
target={announcement.link.url.startsWith("http") ? "_blank" : undefined}
>
{announcement.link.label}
<ExternalLink className="h-3 w-3" />
</Link>
</Button>
</CardFooter>
)}
</Card>
);
}
// ---------------------------------------------------------------------------
// Empty state
// ---------------------------------------------------------------------------
function EmptyState() {
return (
<div className="flex flex-col items-center justify-center py-16 text-center">
<div className="flex h-16 w-16 items-center justify-center rounded-full bg-muted mb-4">
<BellOff className="h-7 w-7 text-muted-foreground" />
</div>
<h3 className="text-lg font-semibold">No announcements</h3>
<p className="mt-1 text-sm text-muted-foreground max-w-sm">
You're all caught up! New announcements will appear here.
</p>
</div>
);
}
import { AnnouncementCard } from "@/components/announcements/AnnouncementCard";
import { AnnouncementsEmptyState } from "@/components/announcements/AnnouncementsEmptyState";
import { useAnnouncements } from "@/hooks/use-announcements";
// ---------------------------------------------------------------------------
// Page
@ -171,7 +33,7 @@ export default function AnnouncementsPage() {
{/* Content */}
<div className="max-w-3xl mx-auto px-6 lg:px-10 pt-8 pb-20">
{announcements.length === 0 ? (
<EmptyState />
<AnnouncementsEmptyState />
) : (
<div className="flex flex-col gap-4">
{announcements.map((announcement) => (

View file

@ -0,0 +1,25 @@
import { NextRequest, NextResponse } from "next/server";
const backendBaseUrl = (process.env.INTERNAL_FASTAPI_BACKEND_URL || "http://backend:8000").replace(
/\/+$/,
""
);
export async function GET(request: NextRequest) {
const response = await fetch(`${backendBaseUrl}/verify-token`, {
method: "GET",
headers: {
Authorization: request.headers.get("authorization") || "",
"X-API-Key": request.headers.get("x-api-key") || "",
},
cache: "no-store",
});
return new NextResponse(response.body, {
status: response.status,
headers: {
"content-type": response.headers.get("content-type") || "application/json",
"cache-control": "no-store",
},
});
}

View file

@ -0,0 +1,117 @@
"use client";
import {
Bell,
ExternalLink,
Info,
type LucideIcon,
Rocket,
Wrench,
Zap,
} from "lucide-react";
import Link from "next/link";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
import {
Card,
CardContent,
CardDescription,
CardFooter,
CardHeader,
CardTitle,
} from "@/components/ui/card";
import type { AnnouncementCategory } from "@/contracts/types/announcement.types";
import type { AnnouncementWithState } from "@/hooks/use-announcements";
import { formatRelativeDate } from "@/lib/format-date";
const categoryConfig: Record<
AnnouncementCategory,
{
label: string;
icon: LucideIcon;
color: string;
badgeVariant: "default" | "secondary" | "destructive" | "outline";
}
> = {
feature: {
label: "Feature",
icon: Rocket,
color: "text-emerald-500",
badgeVariant: "default",
},
update: {
label: "Update",
icon: Zap,
color: "text-blue-500",
badgeVariant: "secondary",
},
maintenance: {
label: "Maintenance",
icon: Wrench,
color: "text-amber-500",
badgeVariant: "outline",
},
info: {
label: "Info",
icon: Info,
color: "text-muted-foreground",
badgeVariant: "secondary",
},
};
export function AnnouncementCard({ announcement }: { announcement: AnnouncementWithState }) {
const config = categoryConfig[announcement.category] ?? categoryConfig.info;
const Icon = config.icon;
return (
<Card className="group relative transition-all duration-200 hover:shadow-md">
<CardHeader className="pb-3">
<div className="flex items-start justify-between gap-3">
<div className="flex items-start gap-3 min-w-0">
<div
className={`mt-0.5 flex h-8 w-8 shrink-0 items-center justify-center rounded-lg bg-muted ${config.color}`}
>
<Icon className="h-4 w-4" />
</div>
<div className="min-w-0 flex-1">
<div className="flex items-center gap-2 flex-wrap">
<CardTitle className="text-base leading-tight">{announcement.title}</CardTitle>
<Badge variant={config.badgeVariant} className="text-[10px] px-1.5 py-0">
{config.label}
</Badge>
{announcement.isImportant && (
<Badge variant="destructive" className="text-[10px] px-1.5 py-0 gap-0.5">
<Bell className="h-2.5 w-2.5" />
Important
</Badge>
)}
</div>
<CardDescription className="mt-1 text-xs">
{formatRelativeDate(announcement.date)}
</CardDescription>
</div>
</div>
</div>
</CardHeader>
<CardContent className="pb-3">
<p className="text-sm text-muted-foreground leading-relaxed">{announcement.description}</p>
</CardContent>
{announcement.link && (
<CardFooter className="pt-0 pb-4">
<Button variant="outline" size="sm" asChild className="gap-1.5">
<Link
href={announcement.link.url}
target={announcement.link.url.startsWith("http") ? "_blank" : undefined}
>
{announcement.link.label}
<ExternalLink className="h-3 w-3" />
</Link>
</Button>
</CardFooter>
)}
</Card>
);
}

View file

@ -0,0 +1,18 @@
"use client";
import { BellOff } from "lucide-react";
export function AnnouncementsEmptyState() {
return (
<div className="flex flex-col items-center justify-center py-16 text-center">
<div className="flex h-16 w-16 items-center justify-center rounded-full bg-muted mb-4">
<BellOff className="h-7 w-7 text-muted-foreground" />
</div>
<h3 className="text-lg font-semibold">No announcements</h3>
<p className="mt-1 text-sm text-muted-foreground max-w-sm">
You're all caught up! New announcements will appear here.
</p>
</div>
);
}

View file

@ -124,6 +124,9 @@ export function LayoutDataProvider({
// Documents sidebar state (shared atom so Composer can toggle it)
const [isDocumentsSidebarOpen, setIsDocumentsSidebarOpen] = useAtom(documentsSidebarOpenAtom);
// Announcements sidebar state
const [isAnnouncementsSidebarOpen, setIsAnnouncementsSidebarOpen] = useState(false);
// Search space dialog state
const [isCreateSearchSpaceDialogOpen, setIsCreateSearchSpaceDialogOpen] = useState(false);
@ -267,7 +270,7 @@ export function LayoutDataProvider({
() => [
{
title: "Inbox",
url: "#inbox", // Special URL to indicate this is handled differently
url: "#inbox",
icon: Inbox,
isActive: isInboxSidebarOpen,
badge: totalUnreadCount > 0 ? formatInboxCount(totalUnreadCount) : undefined,
@ -281,17 +284,17 @@ export function LayoutDataProvider({
},
{
title: "Announcements",
url: "/announcements",
url: "#announcements",
icon: Megaphone,
isActive: pathname?.includes("/announcements"),
isActive: isAnnouncementsSidebarOpen,
badge: announcementUnreadCount > 0 ? formatInboxCount(announcementUnreadCount) : undefined,
},
],
[
pathname,
isInboxSidebarOpen,
isDocumentsSidebarOpen,
totalUnreadCount,
isAnnouncementsSidebarOpen,
announcementUnreadCount,
isDocumentsProcessing,
]
@ -386,25 +389,37 @@ export function LayoutDataProvider({
const handleNavItemClick = useCallback(
(item: NavItem) => {
// Handle inbox specially - toggle sidebar instead of navigating
if (item.url === "#inbox") {
setIsInboxSidebarOpen((prev) => {
if (!prev) {
setIsAllSharedChatsSidebarOpen(false);
setIsAllPrivateChatsSidebarOpen(false);
setIsDocumentsSidebarOpen(false);
setIsAnnouncementsSidebarOpen(false);
}
return !prev;
});
return;
}
// Handle documents specially - toggle sidebar instead of navigating
if (item.url === "#documents") {
setIsDocumentsSidebarOpen((prev) => {
if (!prev) {
setIsInboxSidebarOpen(false);
setIsAllSharedChatsSidebarOpen(false);
setIsAllPrivateChatsSidebarOpen(false);
setIsAnnouncementsSidebarOpen(false);
}
return !prev;
});
return;
}
if (item.url === "#announcements") {
setIsAnnouncementsSidebarOpen((prev) => {
if (!prev) {
setIsInboxSidebarOpen(false);
setIsAllSharedChatsSidebarOpen(false);
setIsAllPrivateChatsSidebarOpen(false);
setIsDocumentsSidebarOpen(false);
}
return !prev;
});
@ -510,6 +525,7 @@ export function LayoutDataProvider({
setIsAllPrivateChatsSidebarOpen(false);
setIsInboxSidebarOpen(false);
setIsDocumentsSidebarOpen(false);
setIsAnnouncementsSidebarOpen(false);
}, [setIsDocumentsSidebarOpen]);
const handleViewAllPrivateChats = useCallback(() => {
@ -517,6 +533,7 @@ export function LayoutDataProvider({
setIsAllSharedChatsSidebarOpen(false);
setIsInboxSidebarOpen(false);
setIsDocumentsSidebarOpen(false);
setIsAnnouncementsSidebarOpen(false);
}, [setIsDocumentsSidebarOpen]);
// Delete handlers
@ -633,6 +650,10 @@ export function LayoutDataProvider({
isDocked: isInboxDocked,
onDockedChange: setIsInboxDocked,
}}
announcementsPanel={{
open: isAnnouncementsSidebarOpen,
onOpenChange: setIsAnnouncementsSidebarOpen,
}}
allSharedChatsPanel={{
open: isAllSharedChatsSidebarOpen,
onOpenChange: setIsAllSharedChatsSidebarOpen,

View file

@ -13,6 +13,7 @@ import { IconRail } from "../icon-rail";
import {
AllPrivateChatsSidebar,
AllSharedChatsSidebar,
AnnouncementsSidebar,
DocumentsSidebar,
InboxSidebar,
MobileSidebar,
@ -77,6 +78,10 @@ interface LayoutShellProps {
className?: string;
// Inbox props
inbox?: InboxProps;
announcementsPanel?: {
open: boolean;
onOpenChange: (open: boolean) => void;
};
isLoadingChats?: boolean;
// All chats panel props
allSharedChatsPanel?: {
@ -128,6 +133,7 @@ export function LayoutShell({
children,
className,
inbox,
announcementsPanel,
isLoadingChats = false,
allSharedChatsPanel,
allPrivateChatsPanel,
@ -215,6 +221,15 @@ export function LayoutShell({
/>
)}
{/* Mobile Announcements Sidebar */}
{announcementsPanel?.open && (
<AnnouncementsSidebar
open={announcementsPanel.open}
onOpenChange={announcementsPanel.onOpenChange}
onCloseMobileSidebar={() => setMobileMenuOpen(false)}
/>
)}
{/* Mobile All Shared Chats - slide-out panel */}
{allSharedChatsPanel && (
<AllSharedChatsSidebar
@ -333,6 +348,14 @@ export function LayoutShell({
/>
)}
{/* Announcements Sidebar */}
{announcementsPanel && (
<AnnouncementsSidebar
open={announcementsPanel.open}
onOpenChange={announcementsPanel.onOpenChange}
/>
)}
{/* All Shared Chats - slide-out panel */}
{allSharedChatsPanel && (
<AllSharedChatsSidebar

View file

@ -0,0 +1,75 @@
"use client";
import { ChevronLeft } from "lucide-react";
import { useEffect } from "react";
import { AnnouncementsEmptyState } from "@/components/announcements/AnnouncementsEmptyState";
import { AnnouncementCard } from "@/components/announcements/AnnouncementCard";
import { Button } from "@/components/ui/button";
import { useAnnouncements } from "@/hooks/use-announcements";
import { useMediaQuery } from "@/hooks/use-media-query";
import { SidebarSlideOutPanel } from "./SidebarSlideOutPanel";
interface AnnouncementsSidebarProps {
open: boolean;
onOpenChange: (open: boolean) => void;
onCloseMobileSidebar?: () => void;
}
export function AnnouncementsSidebar({
open,
onOpenChange,
onCloseMobileSidebar,
}: AnnouncementsSidebarProps) {
const isMobile = !useMediaQuery("(min-width: 640px)");
const { announcements, markAllRead } = useAnnouncements();
useEffect(() => {
if (!open) return;
markAllRead();
}, [open, markAllRead]);
const body = (
<div className="h-full flex flex-col">
<div className="shrink-0 p-4 pb-2 space-y-3">
<div className="flex items-center justify-between">
<div className="flex items-center gap-2">
{isMobile && (
<Button
variant="ghost"
size="icon"
className="h-8 w-8 rounded-full"
onClick={() => {
onOpenChange(false);
onCloseMobileSidebar?.();
}}
>
<ChevronLeft className="h-4 w-4 text-muted-foreground" />
<span className="sr-only">Close</span>
</Button>
)}
<h2 className="text-lg font-semibold">Announcements</h2>
</div>
</div>
</div>
<div className="flex-1 overflow-y-auto p-4">
{announcements.length === 0 ? (
<AnnouncementsEmptyState />
) : (
<div className="flex flex-col gap-4">
{announcements.map((announcement) => (
<AnnouncementCard key={announcement.id} announcement={announcement} />
))}
</div>
)}
</div>
</div>
);
return (
<SidebarSlideOutPanel open={open} onOpenChange={onOpenChange} ariaLabel="Announcements">
{body}
</SidebarSlideOutPanel>
);
}

View file

@ -1,5 +1,6 @@
export { AllPrivateChatsSidebar } from "./AllPrivateChatsSidebar";
export { AllSharedChatsSidebar } from "./AllSharedChatsSidebar";
export { AnnouncementsSidebar } from "./AnnouncementsSidebar";
export { ChatListItem } from "./ChatListItem";
export { DocumentsSidebar } from "./DocumentsSidebar";
export { InboxSidebar } from "./InboxSidebar";

View file

@ -1,5 +1,6 @@
import { useQuery, useQueryClient } from "@tanstack/react-query";
import { useEffect, useRef } from "react";
import type { GetCommentsResponse } from "@/contracts/types/chat-comments.types";
import { chatCommentsApiService } from "@/lib/apis/chat-comments-api.service";
import { cacheKeys } from "@/lib/query-client/cache-keys";
@ -22,20 +23,20 @@ let _batchTargetIds = new Set<number>();
let _batchReady: Promise<void> | null = null;
let _resolveBatchReady: (() => void) | null = null;
function resetBatchGate() {
function resetBatchGate(resolveImmediately = false) {
_batchReady = new Promise<void>((r) => {
_resolveBatchReady = r;
if (resolveImmediately) r();
});
}
// Open the initial gate immediately (no batch pending yet)
resetBatchGate();
_resolveBatchReady?.();
resetBatchGate(true);
export function useComments({ messageId, enabled = true }: UseCommentsOptions) {
const queryClient = useQueryClient();
return useQuery({
return useQuery<GetCommentsResponse>({
queryKey: cacheKeys.comments.byMessage(messageId),
queryFn: async () => {
// Wait for the batch gate so the useEffect in useBatchCommentsPreload
@ -46,7 +47,7 @@ export function useComments({ messageId, enabled = true }: UseCommentsOptions) {
if (_batchInflight && _batchTargetIds.has(messageId)) {
await _batchInflight;
const cached = queryClient.getQueryData(cacheKeys.comments.byMessage(messageId));
const cached = queryClient.getQueryData<GetCommentsResponse>(cacheKeys.comments.byMessage(messageId));
if (cached) return cached;
}