mirror of
https://github.com/lfnovo/open-notebook.git
synced 2026-05-01 04:50:01 +00:00
refactor: reorganize folder structure for better maintainability
Changes: - Move migrations/ under open_notebook/database/migrations/ - Extract AI models to open_notebook/ai/ (Model, ModelManager, provision) - Extract podcasts to open_notebook/podcasts/ (EpisodeProfile, SpeakerProfile, PodcastEpisode) - Reorganize prompts to mirror graphs structure (chat/, source_chat/) This improves code organization by: - Consolidating database concerns (migrations now with database code) - Separating AI infrastructure from domain entities - Isolating podcast feature into its own module - Creating consistent prompt/graph naming conventions All 52 tests pass.
This commit is contained in:
parent
93cda6c42a
commit
ab5560c9a2
48 changed files with 50 additions and 47 deletions
|
|
@ -5,16 +5,15 @@ from typing import Annotated, Dict, List, Optional
|
|||
from ai_prompter import Prompter
|
||||
from langchain_core.messages import AIMessage, SystemMessage
|
||||
from langchain_core.runnables import RunnableConfig
|
||||
|
||||
from open_notebook.utils import clean_thinking_content
|
||||
from langgraph.checkpoint.sqlite import SqliteSaver
|
||||
from langgraph.graph import END, START, StateGraph
|
||||
from langgraph.graph.message import add_messages
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from open_notebook.ai.provision import provision_langchain_model
|
||||
from open_notebook.config import LANGGRAPH_CHECKPOINT_FILE
|
||||
from open_notebook.domain.notebook import Source, SourceInsight
|
||||
from open_notebook.graphs.utils import provision_langchain_model
|
||||
from open_notebook.utils import clean_thinking_content
|
||||
from open_notebook.utils.context_builder import ContextBuilder
|
||||
|
||||
|
||||
|
|
@ -111,7 +110,7 @@ def call_model_with_source_context(
|
|||
}
|
||||
|
||||
# Apply the source_chat prompt template
|
||||
system_prompt = Prompter(prompt_template="source_chat").render(data=prompt_data)
|
||||
system_prompt = Prompter(prompt_template="source_chat/system").render(data=prompt_data)
|
||||
payload = [SystemMessage(content=system_prompt)] + state.get("messages", [])
|
||||
|
||||
# Handle async model provisioning from sync context
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue