mirror of
https://github.com/MODSetter/SurfSense.git
synced 2026-05-05 23:42:21 +00:00
refactor: update report routes and improve export handling
- Revised report routes to clarify functionality for read, export, and delete operations. - Enhanced the export process to run all blocking I/O in a thread executor, improving async performance. - Updated error handling in the report panel to provide clearer feedback on loading failures.
This commit is contained in:
parent
adeef35443
commit
a668219240
2 changed files with 27 additions and 23 deletions
|
|
@ -1,8 +1,8 @@
|
|||
"""
|
||||
Report routes for CRUD operations and export (PDF/DOCX).
|
||||
Report routes for read, export (PDF/DOCX), and delete operations.
|
||||
|
||||
These routes support the report generation feature in new-chat.
|
||||
Reports are generated inline by the agent tool and stored as Markdown.
|
||||
No create or update endpoints here — reports are generated inline by the
|
||||
agent tool during chat and stored as Markdown in the database.
|
||||
Export to PDF/DOCX is on-demand via pypandoc.
|
||||
|
||||
Authorization: lightweight search-space membership checks (no granular RBAC)
|
||||
|
|
@ -12,6 +12,8 @@ since reports are chat-generated artifacts, not standalone managed resources.
|
|||
import asyncio
|
||||
import io
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
from enum import Enum
|
||||
|
||||
import pypandoc
|
||||
|
|
@ -205,26 +207,32 @@ async def export_report(
|
|||
)
|
||||
|
||||
# Convert Markdown to the requested format via pypandoc.
|
||||
# pypandoc spawns a pandoc subprocess (blocking), so we run it in a
|
||||
# thread executor to avoid blocking the async event loop.
|
||||
# pypandoc spawns a pandoc subprocess (blocking), so we run the
|
||||
# entire convert → read → cleanup pipeline in a thread executor
|
||||
# to avoid blocking the async event loop on any file I/O.
|
||||
extra_args = ["--standalone"]
|
||||
if format == ExportFormat.PDF:
|
||||
extra_args.append("--pdf-engine=weasyprint")
|
||||
|
||||
loop = asyncio.get_running_loop()
|
||||
output = await loop.run_in_executor(
|
||||
None, # default thread-pool
|
||||
lambda: pypandoc.convert_text(
|
||||
report.content,
|
||||
format.value,
|
||||
format="md",
|
||||
extra_args=extra_args,
|
||||
),
|
||||
)
|
||||
def _convert_and_read() -> bytes:
|
||||
"""Run all blocking I/O (tempfile, pandoc, file read, cleanup) in a thread."""
|
||||
fd, tmp_path = tempfile.mkstemp(suffix=f".{format.value}")
|
||||
os.close(fd)
|
||||
try:
|
||||
pypandoc.convert_text(
|
||||
report.content,
|
||||
format.value,
|
||||
format="md",
|
||||
extra_args=extra_args,
|
||||
outputfile=tmp_path,
|
||||
)
|
||||
with open(tmp_path, "rb") as f:
|
||||
return f.read()
|
||||
finally:
|
||||
os.unlink(tmp_path)
|
||||
|
||||
# pypandoc returns bytes for binary formats (pdf, docx), str for text formats
|
||||
if isinstance(output, str):
|
||||
output = output.encode("utf-8")
|
||||
loop = asyncio.get_running_loop()
|
||||
output = await loop.run_in_executor(None, _convert_and_read)
|
||||
|
||||
# Sanitize filename
|
||||
safe_title = (
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@
|
|||
import { useAtomValue, useSetAtom } from "jotai";
|
||||
import {
|
||||
ChevronDownIcon,
|
||||
FileTextIcon,
|
||||
XIcon,
|
||||
} from "lucide-react";
|
||||
import { useCallback, useEffect, useRef, useState } from "react";
|
||||
|
|
@ -257,12 +256,9 @@ function ReportPanelContent({
|
|||
if (error || !reportContent) {
|
||||
return (
|
||||
<div className="flex flex-1 flex-col items-center justify-center gap-3 p-6 text-center">
|
||||
<div className="flex size-12 items-center justify-center rounded-full bg-muted">
|
||||
<FileTextIcon className="size-6 text-muted-foreground" />
|
||||
</div>
|
||||
<div>
|
||||
<p className="font-medium text-foreground">Failed to load report</p>
|
||||
<p className="text-sm text-muted-foreground mt-1">
|
||||
<p className="text-sm text-red-500 mt-1">
|
||||
{error || "An unknown error occurred"}
|
||||
</p>
|
||||
</div>
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue