mirror of
https://github.com/MODSetter/SurfSense.git
synced 2025-09-01 18:19:08 +00:00
fix: coderabbit suggestions
This commit is contained in:
parent
8f8522af7d
commit
cdc69c1c7d
3 changed files with 2 additions and 16 deletions
|
@ -11,20 +11,6 @@ class GraphState(TypedDict):
|
|||
# Final output
|
||||
final_written_report: Optional[str]
|
||||
|
||||
def route_based_on_research_mode(state: State) -> str:
|
||||
"""
|
||||
Route to different workflows based on research_mode.
|
||||
|
||||
Args:
|
||||
state: The current state containing the configuration
|
||||
|
||||
Returns:
|
||||
"qna_workflow" for QNA mode, "report_workflow" for report modes
|
||||
"""
|
||||
# The configuration should be available in the graph context
|
||||
# We'll handle this by checking the research_mode during execution
|
||||
return "route_research_mode"
|
||||
|
||||
def build_graph():
|
||||
"""
|
||||
Build and return the LangGraph workflow.
|
||||
|
|
|
@ -928,7 +928,7 @@ async def handle_qna_workflow(state: State, config: RunnableConfig, writer: Stre
|
|||
complete_content = ""
|
||||
|
||||
# Call the QNA agent with streaming
|
||||
async for chunk_type, chunk in qna_agent_graph.astream(qna_state, qna_config, stream_mode=["values"]):
|
||||
async for _chunk_type, chunk in qna_agent_graph.astream(qna_state, qna_config, stream_mode=["values"]):
|
||||
if "final_answer" in chunk:
|
||||
new_content = chunk["final_answer"]
|
||||
if new_content and new_content != complete_content:
|
||||
|
|
|
@ -95,7 +95,7 @@ async def answer_question(state: State, config: RunnableConfig) -> Dict[str, Any
|
|||
|
||||
# Prepare documents for citation formatting
|
||||
formatted_documents = []
|
||||
for i, doc in enumerate(documents):
|
||||
for _i, doc in enumerate(documents):
|
||||
# Extract content and metadata
|
||||
content = doc.get("content", "")
|
||||
doc_info = doc.get("document", {})
|
||||
|
|
Loading…
Add table
Reference in a new issue