temporary fix to config cache

This commit is contained in:
LUIS NOVO 2024-11-01 17:06:10 -03:00
parent cc9b0ac68c
commit b89250d3ca
18 changed files with 664 additions and 587 deletions

View file

@ -9,10 +9,12 @@ from langgraph.graph import END, START, StateGraph
from langgraph.graph.message import add_messages
from typing_extensions import TypedDict
from open_notebook.config import DEFAULT_MODELS, LANGGRAPH_CHECKPOINT_FILE
from open_notebook.config import LANGGRAPH_CHECKPOINT_FILE, load_default_models
from open_notebook.domain.notebook import Notebook
from open_notebook.graphs.utils import run_pattern
DEFAULT_MODELS, EMBEDDING_MODEL, SPEECH_TO_TEXT_MODEL = load_default_models()
class ThreadState(TypedDict):
messages: Annotated[list, add_messages]

View file

@ -4,7 +4,7 @@ from math import ceil
from loguru import logger
from pydub import AudioSegment
from open_notebook.config import SPEECH_TO_TEXT_MODEL
from open_notebook.config import load_default_models
from open_notebook.graphs.content_processing.state import SourceState
# future: parallelize the transcription process
@ -72,6 +72,8 @@ def split_audio(input_file, segment_length_minutes=15, output_prefix=None):
def extract_audio(data: SourceState):
DEFAULT_MODELS, EMBEDDING_MODEL, SPEECH_TO_TEXT_MODEL = load_default_models()
input_audio_path = data.get("file_path")
audio_files = []

View file

@ -1,14 +1,15 @@
import os
from langchain_core.runnables import (
RunnableConfig,
)
from langgraph.graph import END, START, StateGraph
from typing_extensions import TypedDict
from open_notebook.config import load_default_models
from open_notebook.domain.notebook import Note, Notebook, Source
from open_notebook.graphs.utils import run_pattern
DEFAULT_MODELS, EMBEDDING_MODEL, SPEECH_TO_TEXT_MODEL = load_default_models()
class DocQueryState(TypedDict):
doc_id: str
@ -20,7 +21,7 @@ class DocQueryState(TypedDict):
def call_model(state: dict, config: RunnableConfig) -> dict:
model_id = config.get("configurable", {}).get(
"model_id", os.environ.get("RETRIEVAL_MODEL")
"model_id", DEFAULT_MODELS.default_transformation_model
)
return {"answer": run_pattern("doc_query", model_id, state)}

View file

@ -7,9 +7,11 @@ from langchain_core.runnables import (
from langgraph.graph import END, START, StateGraph
from typing_extensions import Annotated, TypedDict
from open_notebook.config import DEFAULT_MODELS
from open_notebook.config import load_default_models
from open_notebook.graphs.utils import run_pattern
DEFAULT_MODELS, EMBEDDING_MODEL, SPEECH_TO_TEXT_MODEL = load_default_models()
class PatternChainState(TypedDict):
content_stack: Annotated[Sequence[str], operator.add]

View file

@ -4,9 +4,11 @@ from langchain_core.runnables import (
from langgraph.graph import END, START, StateGraph
from typing_extensions import TypedDict
from open_notebook.config import DEFAULT_MODELS
from open_notebook.config import load_default_models
from open_notebook.graphs.utils import run_pattern
DEFAULT_MODELS, EMBEDDING_MODEL, SPEECH_TO_TEXT_MODEL = load_default_models()
class PatternState(TypedDict):
input_text: str

View file

@ -7,10 +7,12 @@ from langchain_core.runnables import (
from langgraph.graph import END, START, StateGraph
from typing_extensions import TypedDict
from open_notebook.config import DEFAULT_MODELS
from open_notebook.config import load_default_models
from open_notebook.graphs.utils import run_pattern
from open_notebook.utils import split_text
DEFAULT_MODELS, EMBEDDING_MODEL, SPEECH_TO_TEXT_MODEL = load_default_models()
class TocState(TypedDict):
chunks: List[str]

View file

@ -9,10 +9,12 @@ from langgraph.graph import END, START, StateGraph
from pydantic import BaseModel
from typing_extensions import TypedDict
from open_notebook.config import DEFAULT_MODELS
from open_notebook.config import load_default_models
from open_notebook.graphs.utils import run_pattern
from open_notebook.utils import split_text
DEFAULT_MODELS, EMBEDDING_MODEL, SPEECH_TO_TEXT_MODEL = load_default_models()
class SummaryResponse(BaseModel):
"""This is schema of your response. Please provide a JSON object with the enclosed keys"""

View file

@ -1,7 +1,7 @@
from langchain.output_parsers import OutputFixingParser
from loguru import logger
from open_notebook.config import DEFAULT_MODELS
from open_notebook.config import load_default_models
from open_notebook.models import get_model
from open_notebook.prompter import Prompter
from open_notebook.utils import token_count
@ -18,7 +18,7 @@ def run_pattern(
system_prompt = Prompter(prompt_template=pattern_name, parser=parser).render(
data=state
)
DEFAULT_MODELS, EMBEDDING_MODEL, SPEECH_TO_TEXT_MODEL = load_default_models()
tokens = token_count(str(system_prompt) + str(messages))
model_id = (