mirror of
https://github.com/MODSetter/SurfSense.git
synced 2025-09-02 10:39:13 +00:00
commit
1b6b406d34
9 changed files with 197 additions and 11 deletions
|
@ -68,3 +68,27 @@ def build_graph():
|
||||||
|
|
||||||
# Compile the graph once when the module is loaded
|
# Compile the graph once when the module is loaded
|
||||||
graph = build_graph()
|
graph = build_graph()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
||||||
|
|
|
@ -41,7 +41,7 @@ async def fetch_documents_by_ids(
|
||||||
Args:
|
Args:
|
||||||
document_ids: List of document IDs to fetch
|
document_ids: List of document IDs to fetch
|
||||||
user_id: The user ID to check ownership
|
user_id: The user ID to check ownership
|
||||||
db_session: The database session
|
db_session: The database session eg L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Tuple of (source_objects, document_chunks) - similar to ConnectorService pattern
|
Tuple of (source_objects, document_chunks) - similar to ConnectorService pattern
|
||||||
|
@ -168,7 +168,7 @@ async def fetch_documents_by_ids(
|
||||||
url = f"https://www.youtube.com/watch?v={video_id}" if video_id else ""
|
url = f"https://www.youtube.com/watch?v={video_id}" if video_id else ""
|
||||||
|
|
||||||
elif doc_type == "DISCORD_CONNECTOR":
|
elif doc_type == "DISCORD_CONNECTOR":
|
||||||
# Extract Discord-specific metadata
|
# Extract Discord-specific metadata L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
||||||
channel_name = metadata.get('channel_name', 'Unknown Channel')
|
channel_name = metadata.get('channel_name', 'Unknown Channel')
|
||||||
channel_id = metadata.get('channel_id', '')
|
channel_id = metadata.get('channel_id', '')
|
||||||
guild_id = metadata.get('guild_id', '')
|
guild_id = metadata.get('guild_id', '')
|
||||||
|
@ -535,7 +535,7 @@ async def fetch_relevant_documents(
|
||||||
search_mode=search_mode
|
search_mode=search_mode
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add to sources and raw documents
|
# Add to sources and raw documents L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
||||||
if source_object:
|
if source_object:
|
||||||
all_sources.append(source_object)
|
all_sources.append(source_object)
|
||||||
all_raw_documents.extend(slack_chunks)
|
all_raw_documents.extend(slack_chunks)
|
||||||
|
@ -787,7 +787,7 @@ async def process_sections(state: State, config: RunnableConfig, writer: StreamW
|
||||||
using the sub_section_writer graph with the shared document pool.
|
using the sub_section_writer graph with the shared document pool.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dict containing the final written report in the "final_written_report" key.
|
Dict containing the final written report in the "final_written_report" key L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==s.
|
||||||
"""
|
"""
|
||||||
# Get configuration and answer outline from state
|
# Get configuration and answer outline from state
|
||||||
configuration = Configuration.from_runnable_config(config)
|
configuration = Configuration.from_runnable_config(config)
|
||||||
|
@ -1080,7 +1080,7 @@ async def process_section_with_documents(
|
||||||
complete_answer.extend(content_lines)
|
complete_answer.extend(content_lines)
|
||||||
complete_answer.append("") # Empty line after content
|
complete_answer.append("") # Empty line after content
|
||||||
|
|
||||||
# Update answer in UI in real-time
|
# Update answer in UI in real-time L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
||||||
state.streaming_service.only_update_answer(complete_answer)
|
state.streaming_service.only_update_answer(complete_answer)
|
||||||
writer({"yeild_value": state.streaming_service._format_annotations()})
|
writer({"yeild_value": state.streaming_service._format_annotations()})
|
||||||
|
|
||||||
|
@ -1201,7 +1201,7 @@ async def handle_qna_workflow(state: State, config: RunnableConfig, writer: Stre
|
||||||
# Continue with empty documents - the QNA agent will handle this gracefully
|
# Continue with empty documents - the QNA agent will handle this gracefully
|
||||||
relevant_documents = []
|
relevant_documents = []
|
||||||
|
|
||||||
# Combine user-selected documents with connector-fetched documents
|
# Combine user-selected documents with connector-fetched documents L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
||||||
all_documents = user_selected_documents + relevant_documents
|
all_documents = user_selected_documents + relevant_documents
|
||||||
|
|
||||||
print(f"Fetched {len(relevant_documents)} relevant documents for QNA")
|
print(f"Fetched {len(relevant_documents)} relevant documents for QNA")
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
"""
|
||||||
|
File Hash: L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
||||||
|
"""
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -18,3 +18,105 @@ workflow.add_edge("answer_question", "__end__")
|
||||||
# Compile the workflow into an executable graph
|
# Compile the workflow into an executable graph
|
||||||
graph = workflow.compile()
|
graph = workflow.compile()
|
||||||
graph.name = "SurfSense QnA Agent" # This defines the custom name in LangSmith
|
graph.name = "SurfSense QnA Agent" # This defines the custom name in LangSmith
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
|
@ -1,4 +1,6 @@
|
||||||
"""Define the state structures for the agent."""
|
"""Define the state structures for the agent.
|
||||||
|
File Hash: L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
||||||
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
"""Define the state structures for the agent."""
|
"""Define the state structures for the agent.
|
||||||
|
File Hash: L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
||||||
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
|
@ -211,7 +211,7 @@ async def write_sub_section(state: State, config: RunnableConfig) -> Dict[str, A
|
||||||
HumanMessage(content=human_message_content)
|
HumanMessage(content=human_message_content)
|
||||||
]
|
]
|
||||||
|
|
||||||
# Log final token count
|
# Log final token count L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
||||||
total_tokens = calculate_token_count(messages_with_chat_history, llm.model)
|
total_tokens = calculate_token_count(messages_with_chat_history, llm.model)
|
||||||
print(f"Final token count: {total_tokens}")
|
print(f"Final token count: {total_tokens}")
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
"""Define the state structures for the agent."""
|
"""Define the state structures for the agent.
|
||||||
|
File Hash: L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
||||||
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
|
@ -82,6 +82,9 @@ def calculate_document_token_costs(documents: List[Dict[str, Any]], model: str)
|
||||||
))
|
))
|
||||||
|
|
||||||
return document_token_info
|
return document_token_info
|
||||||
|
"""
|
||||||
|
File Hash: L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
def find_optimal_documents_with_binary_search(
|
def find_optimal_documents_with_binary_search(
|
||||||
|
@ -182,3 +185,51 @@ def calculate_token_count(messages: List[BaseMessage], model_name: str) -> int:
|
||||||
model = model_name
|
model = model_name
|
||||||
messages_dict = convert_langchain_messages_to_dict(messages)
|
messages_dict = convert_langchain_messages_to_dict(messages)
|
||||||
return token_counter(messages=messages_dict, model=model)
|
return token_counter(messages=messages_dict, model=model)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
File Hash: L0o55JzTBlCYJNCRYbbxt8mxqRs5kPm6QO8NzVqEZtzqWtG0EklbHuQ3I5ZBdSy8n+EqrdQxcp+R3Yc57NIm79iNS2sxt4tVMSTLeAT6qpMS2SbBER4hRiLaH5BKpXBJoCRPoFMYpDf6pdIokZyJz/EQWQZj531TfLcBfFkxJuWEqvinKhvWJPjApBd1RldixOj57mNXybHN8WFe+FnayhYQhptesoFAVXAk1WuV2URSqXxs5/00Eo8osC55gsye6LXTYzieyUKxurLKw+uy3g==
|
||||||
|
"""
|
Loading…
Add table
Reference in a new issue