diff --git a/.env.development b/.env.development index c333da04b..96f37eb0c 100644 --- a/.env.development +++ b/.env.development @@ -5,4 +5,26 @@ VITE_PROXY_URL=https://dev.eigent.ai VITE_USE_LOCAL_PROXY=false # VITE_PROXY_URL=http://localhost:3001 -# VITE_USE_LOCAL_PROXY=true \ No newline at end of file +# VITE_USE_LOCAL_PROXY=true + +TRACEROOT_TOKEN=your_traceroot_token_here + +TRACEROOT_SERVICE_NAME=eigent + +TRACEROOT_GITHUB_OWNER=eigent + +TRACEROOT_GITHUB_REPO_NAME=eigent-ai + +TRACEROOT_GITHUB_COMMIT_HASH=main + +TRACEROOT_ENABLE_SPAN_CLOUD_EXPORT=true + +TRACEROOT_ENABLE_LOG_CLOUD_EXPORT=true + +TRACEROOT_ENABLE_SPAN_CONSOLE_EXPORT=false + +TRACEROOT_ENABLE_LOG_CONSOLE_EXPORT=true + +TRACEROOT_TRACER_VERBOSE=false + +TRACEROOT_LOGGER_VERBOSE=false \ No newline at end of file diff --git a/.gitignore b/.gitignore index fa158eda8..38dc4b280 100644 --- a/.gitignore +++ b/.gitignore @@ -46,3 +46,4 @@ public/ # Testing coverage/ +.traceroot-config.yaml diff --git a/backend/app/component/environment.py b/backend/app/component/environment.py index d491603ba..3c43be8c6 100644 --- a/backend/app/component/environment.py +++ b/backend/app/component/environment.py @@ -1,4 +1,4 @@ -from app.utils import traceroot_wrapper as traceroot +from utils import traceroot_wrapper as traceroot import importlib.util import os from pathlib import Path diff --git a/backend/app/controller/chat_controller.py b/backend/app/controller/chat_controller.py index 45bafd41e..5bb2229ea 100644 --- a/backend/app/controller/chat_controller.py +++ b/backend/app/controller/chat_controller.py @@ -3,13 +3,12 @@ import os import re from pathlib import Path from dotenv import load_dotenv -from fastapi import APIRouter, Request, Response +from fastapi import APIRouter, HTTPException, Request, Response from fastapi.responses import StreamingResponse -from loguru import logger -from app.utils import traceroot_wrapper as traceroot +from utils import traceroot_wrapper as traceroot from app.component import code from app.exception.exception import UserException -from app.model.chat import Chat, HumanReply, McpServers, Status, SupplementChat +from app.model.chat import Chat, HumanReply, McpServers, Status, SupplementChat, AddTaskRequest from app.service.chat_service import step_solve from app.service.task import ( Action, @@ -17,10 +16,15 @@ from app.service.task import ( ActionInstallMcpData, ActionStopData, ActionSupplementData, - create_task_lock, + ActionAddTaskData, + ActionRemoveTaskData, + ActionSkipTaskData, + get_or_create_task_lock, get_task_lock, ) from app.component.environment import set_user_env_path +from app.utils.workforce import Workforce +from camel.tasks.task import Task router = APIRouter(tags=["chat"]) @@ -32,55 +36,103 @@ chat_logger = traceroot.get_logger('chat_controller') @router.post("/chat", name="start chat") @traceroot.trace() async def post(data: Chat, request: Request): - chat_logger.info(f"Starting new chat session for task_id: {data.task_id}, user: {data.email}") - task_lock = create_task_lock(data.task_id) - + chat_logger.info("Starting new chat session", extra={"project_id": data.project_id, "task_id": data.task_id, "user": data.email}) + task_lock = get_or_create_task_lock(data.project_id) + # Set user-specific environment path for this thread set_user_env_path(data.env_path) load_dotenv(dotenv_path=data.env_path) - # logger.debug(f"start chat: {data.model_dump_json()}") - os.environ["file_save_path"] = data.file_save_path() os.environ["browser_port"] = str(data.browser_port) os.environ["OPENAI_API_KEY"] = data.api_key os.environ["OPENAI_API_BASE_URL"] = data.api_url or "https://api.openai.com/v1" os.environ["CAMEL_MODEL_LOG_ENABLED"] = "true" - email = re.sub(r'[\\/*?:"<>|\s]', "_", data.email.split("@")[0]).strip(".") - camel_log = Path.home() / ".eigent" / email / ("task_" + data.task_id) / "camel_logs" + email_sanitized = re.sub(r'[\\/*?:"<>|\s]', "_", data.email.split("@")[0]).strip(".") + camel_log = Path.home() / ".eigent" / email_sanitized / ("task_" + data.project_id) / "camel_logs" camel_log.mkdir(parents=True, exist_ok=True) os.environ["CAMEL_LOG_DIR"] = str(camel_log) if data.is_cloud(): os.environ["cloud_api_key"] = data.api_key - - chat_logger.info(f"Chat session initialized, starting streaming response for task_id: {data.task_id}") + + # Put initial action in queue to start processing + await task_lock.put_queue(ActionImproveData(data=data.question)) + + chat_logger.info("Chat session initialized, starting streaming response", extra={"project_id": data.project_id, "task_id": data.task_id, "log_dir": str(camel_log)}) return StreamingResponse(step_solve(data, request, task_lock), media_type="text/event-stream") @router.post("/chat/{id}", name="improve chat") @traceroot.trace() def improve(id: str, data: SupplementChat): - chat_logger.info(f"Improving chat for task_id: {id} with question: {data.question}") + chat_logger.info("Chat improvement requested", extra={"task_id": id, "question_length": len(data.question)}) task_lock = get_task_lock(id) + + # Allow continuing conversation even after task is done + # This supports multi-turn conversation after complex task completion if task_lock.status == Status.done: - raise UserException(code.error, "Task was done") + # Reset status to allow processing new messages + task_lock.status = Status.confirming + # Clear any existing background tasks since workforce was stopped + if hasattr(task_lock, 'background_tasks'): + task_lock.background_tasks.clear() + # Note: conversation_history and last_task_result are preserved + + # Log context preservation + if hasattr(task_lock, 'conversation_history'): + chat_logger.info(f"[CONTEXT] Preserved {len(task_lock.conversation_history)} conversation entries") + if hasattr(task_lock, 'last_task_result'): + chat_logger.info(f"[CONTEXT] Preserved task result: {len(task_lock.last_task_result)} chars") + + # Update file save path if task_id is provided + new_folder_path = None + if data.task_id: + try: + # Get current environment values needed to construct new path + current_email = None + + # Extract email from current file_save_path if available + current_file_save_path = os.environ.get("file_save_path", "") + if current_file_save_path: + path_parts = Path(current_file_save_path).parts + if len(path_parts) >= 3 and "eigent" in path_parts: + eigent_index = path_parts.index("eigent") + if eigent_index + 1 < len(path_parts): + current_email = path_parts[eigent_index + 1] + + # If we have the necessary information, update the file_save_path + if current_email and id: + # Create new path using the existing pattern: email/project_{project_id}/task_{task_id} + new_folder_path = Path.home() / "eigent" / current_email / f"project_{id}" / f"task_{data.task_id}" + new_folder_path.mkdir(parents=True, exist_ok=True) + os.environ["file_save_path"] = str(new_folder_path) + chat_logger.info(f"Updated file_save_path to: {new_folder_path}") + + # Store the new folder path in task_lock for potential cleanup and persistence + task_lock.new_folder_path = new_folder_path + else: + chat_logger.warning(f"Could not update file_save_path - email: {current_email}, project_id: {id}") + + except Exception as e: + chat_logger.error(f"Error updating file path for project_id: {id}, task_id: {data.task_id}: {e}") + asyncio.run(task_lock.put_queue(ActionImproveData(data=data.question))) - chat_logger.info(f"Improvement request queued for task_id: {id}") + chat_logger.info("Improvement request queued with preserved context", extra={"project_id": id}) return Response(status_code=201) @router.put("/chat/{id}", name="supplement task") @traceroot.trace() def supplement(id: str, data: SupplementChat): - chat_logger.info(f"Supplementing task_id: {id} with additional data") + chat_logger.info("Chat supplement requested", extra={"task_id": id}) task_lock = get_task_lock(id) if task_lock.status != Status.done: raise UserException(code.error, "Please wait task done") asyncio.run(task_lock.put_queue(ActionSupplementData(data=data))) - chat_logger.info(f"Supplement data queued for task_id: {id}") + chat_logger.debug("Supplement data queued", extra={"task_id": id}) return Response(status_code=201) @@ -88,28 +140,92 @@ def supplement(id: str, data: SupplementChat): @traceroot.trace() def stop(id: str): """stop the task""" - chat_logger.warning(f"Stopping chat session for task_id: {id}") + chat_logger.warning("Stopping chat session", extra={"task_id": id}) task_lock = get_task_lock(id) asyncio.run(task_lock.put_queue(ActionStopData(action=Action.stop))) - chat_logger.info(f"Stop signal sent for task_id: {id}") + chat_logger.info("Chat stop signal sent", extra={"task_id": id}) return Response(status_code=204) @router.post("/chat/{id}/human-reply") @traceroot.trace() def human_reply(id: str, data: HumanReply): - chat_logger.info(f"Human reply received for task_id: {id}, agent: {data.agent}") + chat_logger.info("Human reply received", extra={"task_id": id, "reply_length": len(data.reply)}) task_lock = get_task_lock(id) asyncio.run(task_lock.put_human_input(data.agent, data.reply)) - chat_logger.info(f"Human reply processed for task_id: {id}") + chat_logger.debug("Human reply processed", extra={"task_id": id}) return Response(status_code=201) @router.post("/chat/{id}/install-mcp") @traceroot.trace() def install_mcp(id: str, data: McpServers): - chat_logger.info(f"Installing MCP servers for task_id: {id}, servers count: {len(data.get('mcpServers', {}))}") + chat_logger.info("Installing MCP servers", extra={"task_id": id, "servers_count": len(data.get('mcpServers', {}))}) task_lock = get_task_lock(id) asyncio.run(task_lock.put_queue(ActionInstallMcpData(action=Action.install_mcp, data=data))) - chat_logger.info(f"MCP installation queued for task_id: {id}") + chat_logger.info("MCP installation queued", extra={"task_id": id}) return Response(status_code=201) + + +@router.post("/chat/{id}/add-task", name="add task to workforce") +@traceroot.trace() +def add_task(id: str, data: AddTaskRequest): + """Add a new task to the workforce""" + chat_logger.info(f"Adding task to workforce for task_id: {id}, content: {data.content[:100]}...") + task_lock = get_task_lock(id) + + try: + # Queue the add task action + add_task_action = ActionAddTaskData( + content=data.content, + project_id=data.project_id, + task_id=data.task_id, + additional_info=data.additional_info, + insert_position=data.insert_position + ) + asyncio.run(task_lock.put_queue(add_task_action)) + return Response(status_code=201) + + except Exception as e: + chat_logger.error(f"Error adding task for task_id: {id}: {e}") + raise UserException(code.error, f"Failed to add task: {str(e)}") + + +@router.delete("/chat/{project_id}/remove-task/{task_id}", name="remove task from workforce") +@traceroot.trace() +def remove_task(project_id: str, task_id: str): + """Remove a task from the workforce""" + chat_logger.info(f"Removing task {task_id} from workforce for project_id: {project_id}") + task_lock = get_task_lock(project_id) + + try: + # Queue the remove task action + remove_task_action = ActionRemoveTaskData(task_id=task_id, project_id=project_id) + asyncio.run(task_lock.put_queue(remove_task_action)) + + chat_logger.info(f"Task removal request queued for project_id: {project_id}, removing task: {task_id}") + return Response(status_code=204) + + except Exception as e: + chat_logger.error(f"Error removing task {task_id} for project_id: {project_id}: {e}") + raise UserException(code.error, f"Failed to remove task: {str(e)}") + + +@router.post("/chat/{project_id}/skip-task", name="skip task in workforce") +@traceroot.trace() +def skip_task(project_id: str): + """Skip a task in the workforce""" + chat_logger.info(f"Skipping task in workforce for project_id: {project_id}") + task_lock = get_task_lock(project_id) + + try: + # Queue the skip task action + skip_task_action = ActionSkipTaskData(project_id=project_id) + asyncio.run(task_lock.put_queue(skip_task_action)) + + chat_logger.info(f"Task skip request queued for project_id: {project_id}") + return Response(status_code=201) + + except Exception as e: + chat_logger.error(f"Error skipping task for project_id: {project_id}: {e}") + raise UserException(code.error, f"Failed to skip task: {str(e)}") diff --git a/backend/app/controller/model_controller.py b/backend/app/controller/model_controller.py index fddb30bb0..1ce4f02e2 100644 --- a/backend/app/controller/model_controller.py +++ b/backend/app/controller/model_controller.py @@ -1,8 +1,11 @@ -from fastapi import APIRouter +from fastapi import APIRouter, HTTPException from pydantic import BaseModel, Field from app.component.model_validation import create_agent from camel.types import ModelType from app.component.error_format import normalize_error_to_openai_format +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("model_controller") router = APIRouter(tags=["model"]) @@ -26,33 +29,46 @@ class ValidateModelResponse(BaseModel): @router.post("/model/validate") +@traceroot.trace() async def validate_model(request: ValidateModelRequest): - try: - # API key validation - if request.api_key is not None and str(request.api_key).strip() == "": - return ValidateModelResponse( - is_valid=False, - is_tool_calls=False, - message="Invalid key. Validation failed.", - error_code="invalid_api_key", - error={ - "message": "Invalid key. Validation failed.", + """Validate model configuration and tool call support.""" + platform = request.model_platform + model_type = request.model_type + has_custom_url = request.url is not None + has_config = request.model_config_dict is not None + + logger.info("Model validation started", extra={"platform": platform, "model_type": model_type, "has_url": has_custom_url, "has_config": has_config}) + + # API key validation + if request.api_key is not None and str(request.api_key).strip() == "": + logger.warning("Model validation failed: empty API key", extra={"platform": platform, "model_type": model_type}) + raise HTTPException( + status_code=400, + detail={ + "message": "Invalid key. Validation failed.", + "error_code": "invalid_api_key", + "error": { "type": "invalid_request_error", "param": None, "code": "invalid_api_key", }, - ) + } + ) + try: extra = request.extra_params or {} + logger.debug("Creating agent for validation", extra={"platform": platform, "model_type": model_type}) agent = create_agent( - request.model_platform, - request.model_type, + platform, + model_type, api_key=request.api_key, url=request.url, model_config_dict=request.model_config_dict, **extra, ) + + logger.debug("Agent created, executing test step", extra={"platform": platform, "model_type": model_type}) response = agent.step( input_message=""" Get the content of https://www.camel-ai.org, @@ -61,17 +77,23 @@ async def validate_model(request: ValidateModelRequest): you must call the get_website_content tool only once. """ ) + + except Exception as e: # Normalize error to OpenAI-style error structure + logger.error("Model validation failed", extra={"platform": platform, "model_type": model_type, "error": str(e)}, exc_info=True) message, error_code, error_obj = normalize_error_to_openai_format(e) - return ValidateModelResponse( - is_valid=False, - is_tool_calls=False, - message=message, - error_code=error_code, - error=error_obj, + raise HTTPException( + status_code=400, + detail={ + "message": message, + "error_code": error_code, + "error": error_obj, + } ) + + # Check validation results is_valid = bool(response) is_tool_calls = False @@ -83,7 +105,7 @@ async def validate_model(request: ValidateModelRequest): == "Tool execution completed successfully for https://www.camel-ai.org, Website Content: Welcome to CAMEL AI!" ) - return ValidateModelResponse( + result = ValidateModelResponse( is_valid=is_valid, is_tool_calls=is_tool_calls, message="Validation Success" @@ -92,3 +114,7 @@ async def validate_model(request: ValidateModelRequest): error_code=None, error=None, ) + + logger.info("Model validation completed", extra={"platform": platform, "model_type": model_type, "is_valid": is_valid, "is_tool_calls": is_tool_calls}) + + return result diff --git a/backend/app/controller/task_controller.py b/backend/app/controller/task_controller.py index f8f104f1b..3a7bb2d4f 100644 --- a/backend/app/controller/task_controller.py +++ b/backend/app/controller/task_controller.py @@ -1,7 +1,6 @@ from typing import Literal from dotenv import load_dotenv from fastapi import APIRouter, Response -from loguru import logger from pydantic import BaseModel from app.model.chat import NewAgent, UpdateData from app.service.task import ( @@ -16,24 +15,32 @@ from app.service.task import ( ) import asyncio from app.component.environment import set_user_env_path +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("task_controller") router = APIRouter(tags=["task"]) @router.post("/task/{id}/start", name="start task") +@traceroot.trace() def start(id: str): task_lock = get_task_lock(id) - logger.debug(f"start task {id}") + logger.info("Starting task", extra={"task_id": id}) asyncio.run(task_lock.put_queue(ActionStartData(action=Action.start))) - logger.debug(f"start task {id} success") + logger.info("Task started successfully", extra={"task_id": id}) return Response(status_code=201) @router.put("/task/{id}", name="update task") +@traceroot.trace() def put(id: str, data: UpdateData): + logger.info("Updating task", extra={"task_id": id, "task_items_count": len(data.task)}) + logger.debug("Update task data", extra={"task_id": id, "data": data.model_dump_json()}) task_lock = get_task_lock(id) asyncio.run(task_lock.put_queue(ActionUpdateTaskData(action=Action.update_task, data=data))) + logger.info("Task updated successfully", extra={"task_id": id}) return Response(status_code=201) @@ -42,23 +49,33 @@ class TakeControl(BaseModel): @router.put("/task/{id}/take-control", name="take control pause or resume") +@traceroot.trace() def take_control(id: str, data: TakeControl): + logger.info("Task control action", extra={"task_id": id, "action": data.action}) task_lock = get_task_lock(id) asyncio.run(task_lock.put_queue(ActionTakeControl(action=data.action))) + logger.info("Task control action completed", extra={"task_id": id, "action": data.action}) return Response(status_code=204) @router.post("/task/{id}/add-agent", name="add new agent") +@traceroot.trace() def add_agent(id: str, data: NewAgent): + logger.info("Adding new agent to task", extra={"task_id": id, "agent_name": data.name}) + logger.debug("New agent data", extra={"task_id": id, "agent_data": data.model_dump_json()}) # Set user-specific environment path for this thread set_user_env_path(data.env_path) load_dotenv(dotenv_path=data.env_path) asyncio.run(get_task_lock(id).put_queue(ActionNewAgent(**data.model_dump()))) + logger.info("Agent added to task", extra={"task_id": id, "agent_name": data.name}) return Response(status_code=204) @router.delete("/task/stop-all", name="stop all tasks") +@traceroot.trace() def stop_all(): + logger.warning("Stopping all tasks", extra={"task_count": len(task_locks)}) for task_lock in task_locks.values(): asyncio.run(task_lock.put_queue(ActionStopData())) + logger.info("All tasks stopped", extra={"task_count": len(task_locks)}) return Response(status_code=204) diff --git a/backend/app/controller/tool_controller.py b/backend/app/controller/tool_controller.py index c8eb581c3..c1187eb4d 100644 --- a/backend/app/controller/tool_controller.py +++ b/backend/app/controller/tool_controller.py @@ -1,10 +1,10 @@ from fastapi import APIRouter, HTTPException -from loguru import logger from app.utils.toolkit.notion_mcp_toolkit import NotionMCPToolkit from app.utils.toolkit.google_calendar_toolkit import GoogleCalendarToolkit from app.utils.oauth_state_manager import oauth_state_manager +from utils import traceroot_wrapper as traceroot - +logger = traceroot.get_logger("tool_controller") router = APIRouter(tags=["task"]) diff --git a/backend/app/exception/handler.py b/backend/app/exception/handler.py index 3e2bf9c81..8a4eb276d 100644 --- a/backend/app/exception/handler.py +++ b/backend/app/exception/handler.py @@ -3,18 +3,22 @@ from fastapi import Request from fastapi.encoders import jsonable_encoder from fastapi.exceptions import RequestValidationError from fastapi.responses import JSONResponse -from loguru import logger from app import api from app.component import code from app.exception.exception import NoPermissionException, ProgramException, TokenException from app.component.pydantic.i18n import trans, get_language from app.exception.exception import UserException +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("exception_handler") @api.exception_handler(RequestValidationError) async def request_exception(request: Request, e: RequestValidationError): if (lang := get_language(request.headers.get("Accept-Language"))) is None: lang = "en_US" + logger.warning(f"Validation error on {request.url.path}: {e.errors()}") + return JSONResponse( content={ "code": code.form_error, @@ -25,16 +29,19 @@ async def request_exception(request: Request, e: RequestValidationError): @api.exception_handler(TokenException) async def token_exception(request: Request, e: TokenException): + logger.warning(f"Token exception on {request.url.path}: {e.text}") return JSONResponse(content={"code": e.code, "text": e.text}) @api.exception_handler(UserException) async def user_exception(request: Request, e: UserException): + logger.info(f"User exception on {request.url.path}: {e.description}") return JSONResponse(content={"code": e.code, "text": e.description}) @api.exception_handler(NoPermissionException) async def no_permission(request: Request, exception: NoPermissionException): + logger.warning(f"No permission on {request.url.path}: {exception.text}") return JSONResponse( status_code=200, content={"code": code.no_permission_error, "text": exception.text}, @@ -43,6 +50,7 @@ async def no_permission(request: Request, exception: NoPermissionException): @api.exception_handler(ProgramException) async def program_exception(request: Request, exception: NoPermissionException): + logger.error(f"Program exception on {request.url.path}: {exception.text}", exc_info=True) return JSONResponse( status_code=200, content={"code": code.program_error, "text": exception.text}, @@ -51,8 +59,16 @@ async def program_exception(request: Request, exception: NoPermissionException): @api.exception_handler(Exception) async def global_exception_handler(request: Request, exc: Exception): - logger.error(f"Unhandled error: {exc}") - traceback.print_exc() # output to electron log + logger.error( + f"Unhandled exception on {request.method} {request.url.path}: {exc}", + exc_info=True, + extra={ + "request_method": request.method, + "request_path": str(request.url.path), + "request_query": str(request.url.query), + "client_host": request.client.host if request.client else None, + } + ) return JSONResponse( status_code=500, diff --git a/backend/app/model/chat.py b/backend/app/model/chat.py index e04de75e8..48f903183 100644 --- a/backend/app/model/chat.py +++ b/backend/app/model/chat.py @@ -3,9 +3,11 @@ import json from pathlib import Path import re from typing import Literal -from loguru import logger -from pydantic import BaseModel, field_validator +from pydantic import BaseModel, Field, field_validator from camel.types import ModelType, RoleType +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("chat_model") class Status(str, Enum): @@ -20,11 +22,22 @@ class ChatHistory(BaseModel): content: str +class QuestionAnalysisResult(BaseModel): + type: Literal["simple", "complex"] = Field( + description="Whether this is a simple question or complex task" + ) + answer: str | None = Field( + default=None, + description="Direct answer for simple questions. None for complex tasks." + ) + + McpServers = dict[Literal["mcpServers"], dict[str, dict]] class Chat(BaseModel): task_id: str + project_id: str question: str email: str attaches: list[str] = [] @@ -72,7 +85,8 @@ class Chat(BaseModel): def file_save_path(self, path: str | None = None): email = re.sub(r'[\\/*?:"<>|\s]', "_", self.email.split("@")[0]).strip(".") - save_path = Path.home() / "eigent" / email / ("task_" + self.task_id) + # Use project-based structure: project_{project_id}/task_{task_id} + save_path = Path.home() / "eigent" / email / f"project_{self.project_id}" / f"task_{self.task_id}" if path is not None: save_path = save_path / path save_path.mkdir(parents=True, exist_ok=True) @@ -82,6 +96,7 @@ class Chat(BaseModel): class SupplementChat(BaseModel): question: str + task_id: str | None = None class HumanReply(BaseModel): @@ -106,6 +121,18 @@ class NewAgent(BaseModel): env_path: str | None = None +class AddTaskRequest(BaseModel): + content: str + project_id: str | None = None + task_id: str | None = None + additional_info: dict | None = None + insert_position: int = -1 + is_independent: bool = False + + +class RemoveTaskRequest(BaseModel): + task_id: str + def sse_json(step: str, data): res_format = {"step": step, "data": data} return f"data: {json.dumps(res_format, ensure_ascii=False)}\n\n" diff --git a/backend/app/service/chat_service.py b/backend/app/service/chat_service.py index a17a3451c..a75e0d295 100644 --- a/backend/app/service/chat_service.py +++ b/backend/app/service/chat_service.py @@ -1,5 +1,6 @@ import asyncio import datetime +import json from pathlib import Path import platform from typing import Literal @@ -8,6 +9,7 @@ from inflection import titleize from pydash import chain from app.component.debug import dump_class from app.component.environment import env +from app.utils.file_utils import get_working_directory from app.service.task import ( ActionImproveData, ActionInstallMcpData, @@ -19,7 +21,6 @@ from camel.toolkits import AgentCommunicationToolkit, ToolkitMessageIntegration from app.utils.toolkit.human_toolkit import HumanToolkit from app.utils.toolkit.note_taking_toolkit import NoteTakingToolkit from app.utils.workforce import Workforce -from loguru import logger from app.model.chat import Chat, NewAgent, Status, sse_json, TaskContent from camel.tasks import Task from app.utils.agent import ( @@ -40,9 +41,199 @@ from app.service.task import Action, Agents from app.utils.server.sync_step import sync_step from camel.types import ModelPlatformType from camel.models import ModelProcessingError +from utils import traceroot_wrapper as traceroot +import os + +logger = traceroot.get_logger("chat_service") + + +def format_task_context(task_data: dict, seen_files: set | None = None, skip_files: bool = False) -> str: + """Format structured task data into a readable context string. + + Args: + task_data: Dictionary containing task content, result, and working directory + seen_files: Optional set to track already-listed files and avoid duplicates (deprecated, use skip_files instead) + skip_files: If True, skip the file listing entirely + """ + context_parts = [] + + if task_data.get('task_content'): + context_parts.append(f"Previous Task: {task_data['task_content']}") + + if task_data.get('task_result'): + context_parts.append(f"Previous Task Result: {task_data['task_result']}") + + # Skip file listing if requested + if not skip_files: + working_directory = task_data.get('working_directory') + if working_directory: + try: + if os.path.exists(working_directory): + generated_files = [] + for root, dirs, files in os.walk(working_directory): + dirs[:] = [d for d in dirs if not d.startswith('.') and d not in ['node_modules', '__pycache__', 'venv']] + for file in files: + if not file.startswith('.') and not file.endswith(('.pyc', '.tmp')): + file_path = os.path.join(root, file) + absolute_path = os.path.abspath(file_path) + + # Only add if not seen before (or if we're not tracking seen files) + if seen_files is None or absolute_path not in seen_files: + generated_files.append(absolute_path) + if seen_files is not None: + seen_files.add(absolute_path) + + if generated_files: + context_parts.append("Generated Files from Previous Task:") + for file_path in sorted(generated_files): + context_parts.append(f" - {file_path}") + except Exception as e: + logger.warning(f"Failed to collect generated files: {e}") + + return "\n".join(context_parts) + + +def collect_previous_task_context(working_directory: str, previous_task_content: str, previous_task_result: str, previous_summary: str = "") -> str: + """ + Collect context from previous task including content, result, summary, and generated files. + + Args: + working_directory: The working directory to scan for generated files + previous_task_content: The content of the previous task + previous_task_result: The result/output of the previous task + previous_summary: The summary of the previous task + + Returns: + Formatted context string to prepend to new task + """ + + context_parts = [] + + # Add previous task information + context_parts.append("=== CONTEXT FROM PREVIOUS TASK ===\n") + + # Add previous task content + if previous_task_content: + context_parts.append(f"Previous Task:\n{previous_task_content}\n") + + # Add previous task summary + if previous_summary: + context_parts.append(f"Previous Task Summary:\n{previous_summary}\n") + + # Add previous task result + if previous_task_result: + context_parts.append(f"Previous Task Result:\n{previous_task_result}\n") + + # Collect generated files from working directory + try: + if os.path.exists(working_directory): + generated_files = [] + for root, dirs, files in os.walk(working_directory): + dirs[:] = [d for d in dirs if not d.startswith('.') and d not in ['node_modules', '__pycache__', 'venv']] + for file in files: + if not file.startswith('.') and not file.endswith(('.pyc', '.tmp')): + file_path = os.path.join(root, file) + absolute_path = os.path.abspath(file_path) + generated_files.append(absolute_path) + + if generated_files: + context_parts.append("Generated Files from Previous Task:") + for file_path in sorted(generated_files): + context_parts.append(f" - {file_path}") + context_parts.append("") + except Exception as e: + logger.warning(f"Failed to collect generated files: {e}") + + context_parts.append("=== END OF PREVIOUS TASK CONTEXT ===\n") + + return "\n".join(context_parts) + + +def check_conversation_history_length(task_lock: TaskLock, max_length: int = 100000) -> tuple[bool, int]: + """ + Check if conversation history exceeds maximum length + + Returns: + tuple: (is_exceeded, total_length) + """ + if not hasattr(task_lock, 'conversation_history') or not task_lock.conversation_history: + return False, 0 + + total_length = 0 + for entry in task_lock.conversation_history: + total_length += len(entry.get('content', '')) + + is_exceeded = total_length > max_length + + if is_exceeded: + logger.warning(f"Conversation history length {total_length} exceeds maximum {max_length}") + + return is_exceeded, total_length + + +def build_conversation_context(task_lock: TaskLock, header: str = "=== CONVERSATION HISTORY ===") -> str: + """Build conversation context from task_lock history with files listed only once at the end. + + Args: + task_lock: TaskLock containing conversation history + header: Header text for the context section + + Returns: + Formatted context string with task history and files listed once at the end + """ + context = "" + working_directory = None + + if task_lock.conversation_history: + context = f"{header}\n" + + for entry in task_lock.conversation_history: + if entry['role'] == 'task_result': + if isinstance(entry['content'], dict): + # Format without file listing + formatted_context = format_task_context(entry['content'], skip_files=True) + context += formatted_context + "\n\n" + # Remember the working directory from the last task + if entry['content'].get('working_directory'): + working_directory = entry['content']['working_directory'] + else: + context += entry['content'] + "\n" + elif entry['role'] == 'assistant': + context += f"Assistant: {entry['content']}\n\n" + + # Add all generated files at the end, only once + if working_directory: + try: + if os.path.exists(working_directory): + generated_files = [] + for root, dirs, files in os.walk(working_directory): + dirs[:] = [d for d in dirs if not d.startswith('.') and d not in ['node_modules', '__pycache__', 'venv']] + for file in files: + if not file.startswith('.') and not file.endswith(('.pyc', '.tmp')): + file_path = os.path.join(root, file) + absolute_path = os.path.abspath(file_path) + generated_files.append(absolute_path) + + if generated_files: + context += "Generated Files from Previous Tasks:\n" + for file_path in sorted(generated_files): + context += f" - {file_path}\n" + context += "\n" + except Exception as e: + logger.warning(f"Failed to collect generated files: {e}") + + context += "\n" + + return context + + +def build_context_for_workforce(task_lock: TaskLock, options: Chat) -> str: + """Build context information for workforce.""" + return build_conversation_context(task_lock, header="=== CONVERSATION HISTORY ===") @sync_step +@traceroot.trace() async def step_solve(options: Chat, request: Request, task_lock: TaskLock): # if True: # import faulthandler @@ -52,12 +243,40 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock): # faulthandler.dump_traceback_later(second) start_event_loop = True - question_agent = question_confirm_agent(options) + + if not hasattr(task_lock, 'conversation_history'): + task_lock.conversation_history = [] + if not hasattr(task_lock, 'last_task_result'): + task_lock.last_task_result = "" + if not hasattr(task_lock, 'question_agent'): + task_lock.question_agent = None + if not hasattr(task_lock, 'summary_generated'): + task_lock.summary_generated = False + + # Create or reuse persistent question_agent + if task_lock.question_agent is None: + task_lock.question_agent = question_confirm_agent(options) + logger.info(f"Created new persistent question_agent for project {options.project_id}") + else: + logger.info(f"Reusing existing question_agent with {len(task_lock.conversation_history)} history entries") + + question_agent = task_lock.question_agent + + # Other variables camel_task = None workforce = None + last_completed_task_result = "" # Track the last completed task result + summary_task_content = "" # Track task summary + loop_iteration = 0 + + logger.info("Starting step_solve", extra={"project_id": options.project_id, "task_id": options.task_id}) + logger.debug("Step solve options", extra={"task_id": options.task_id, "model_platform": options.model_platform}) + while True: + loop_iteration += 1 + if await request.is_disconnected(): - logger.warning(f"Client disconnected for task {options.task_id}") + logger.warning(f"Client disconnected for project {options.project_id}") if workforce is not None: if workforce._running: workforce.stop() @@ -70,10 +289,10 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock): break try: item = await task_lock.get_queue() - # logger.info(f"item: {dump_class(item)}") except Exception as e: - logger.error(f"Error getting item from queue: {e}") - break + logger.error("Error getting item from queue", extra={"project_id": options.project_id, "task_id": options.task_id, "error": str(e)}, exc_info=True) + # Continue waiting instead of breaking on queue error + continue try: if item.action == Action.improve or start_event_loop: @@ -87,33 +306,116 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock): else: assert isinstance(item, ActionImproveData) question = item.data - if len(question) < 12 and len(options.attaches) == 0: - confirm = await question_confirm(question_agent, question) - else: - confirm = True - if confirm is not True: - yield confirm + is_exceeded, total_length = check_conversation_history_length(task_lock) + if is_exceeded: + logger.error("Conversation history too long", extra={"project_id": options.project_id, "current_length": total_length, "max_length": 100000}) + yield sse_json("context_too_long", { + "message": "The conversation history is too long. Please create a new project to continue.", + "current_length": total_length, + "max_length": 100000 + }) + continue + + # Simplified logic: attachments mean workforce, otherwise let agent decide + is_complex_task: bool + if len(options.attaches) > 0: + # Questions with attachments always need workforce + is_complex_task = True else: - yield sse_json("confirmed", "") + is_complex_task = await question_confirm(question_agent, question, task_lock) + + if not is_complex_task: + simple_answer_prompt = f"{build_conversation_context(task_lock, header='=== Previous Conversation ===')}User Query: {question}\n\nProvide a direct, helpful answer to this simple question." + + try: + simple_resp = question_agent.step(simple_answer_prompt) + answer_content = simple_resp.msgs[0].content if simple_resp and simple_resp.msgs else "I understand your question, but I'm having trouble generating a response right now." + + task_lock.add_conversation('assistant', answer_content) + + yield sse_json("wait_confirm", {"content": answer_content, "question": question}) + except Exception as e: + logger.error(f"Error generating simple answer: {e}") + yield sse_json("wait_confirm", {"content": "I encountered an error while processing your question.", "question": question}) + + # Clean up empty folder if it was created for this task + if hasattr(task_lock, 'new_folder_path') and task_lock.new_folder_path: + try: + folder_path = Path(task_lock.new_folder_path) + if folder_path.exists() and folder_path.is_dir(): + # Check if folder is empty + if not any(folder_path.iterdir()): + folder_path.rmdir() + logger.info(f"Cleaned up empty folder: {folder_path}") + # Also clean up parent project folder if it becomes empty + project_folder = folder_path.parent + if project_folder.exists() and not any(project_folder.iterdir()): + project_folder.rmdir() + logger.info(f"Cleaned up empty project folder: {project_folder}") + else: + logger.info(f"Folder not empty, keeping: {folder_path}") + # Reset the folder path + task_lock.new_folder_path = None + except Exception as e: + logger.error(f"Error cleaning up folder: {e}") + else: + yield sse_json("confirmed", {"question": question}) + + context_for_coordinator = build_context_for_workforce(task_lock, options) + (workforce, mcp) = await construct_workforce(options) for new_agent in options.new_agents: workforce.add_single_agent_worker( format_agent_description(new_agent), await new_agent_model(new_agent, options) ) - summary_task_agent = task_summary_agent(options) task_lock.status = Status.confirmed - question = question + options.summary_prompt - camel_task = Task(content=question, id=options.task_id) + + clean_task_content = question + options.summary_prompt + camel_task = Task(content=clean_task_content, id=options.task_id) if len(options.attaches) > 0: camel_task.additional_info = {Path(file_path).name: file_path for file_path in options.attaches} - sub_tasks = await asyncio.to_thread(workforce.eigent_make_sub_tasks, camel_task) - summary_task_content = await summary_task(summary_task_agent, camel_task) + sub_tasks = await asyncio.to_thread( + workforce.eigent_make_sub_tasks, + camel_task, + context_for_coordinator + ) + + if not task_lock.summary_generated: + summary_task_agent = task_summary_agent(options) + try: + summary_task_content = await asyncio.wait_for( + summary_task(summary_task_agent, camel_task), timeout=10 + ) + task_lock.summary_generated = True + logger.info("Generated summary for first task", extra={"project_id": options.project_id}) + except asyncio.TimeoutError: + logger.warning("summary_task timeout", extra={"project_id": options.project_id, "task_id": options.task_id}) + # Fallback to a minimal summary to unblock UI + fallback_name = "Task" + content_preview = camel_task.content if hasattr(camel_task, "content") else "" + if content_preview is None: + content_preview = "" + fallback_summary = ( + (content_preview[:80] + "...") if len(content_preview) > 80 else content_preview + ) + summary_task_content = f"{fallback_name}|{fallback_summary}" + task_lock.summary_generated = True + else: + if len(question) > 100: + summary_task_content = f"Task|{question[:97]}..." + else: + summary_task_content = f"Task|{question}" + logger.info("Skipped summary generation for subsequent task", extra={"project_id": options.project_id}) + yield to_sub_tasks(camel_task, summary_task_content) # tracer.stop() # tracer.save("trace.json") + + # Only auto-start in debug mode if env("debug") == "on": + logger.info(f"[DEBUG] Auto-starting workforce in debug mode") task_lock.status = Status.processing task = asyncio.create_task(workforce.eigent_start(sub_tasks)) task_lock.add_background_task(task) @@ -124,12 +426,185 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock): sub_tasks = update_sub_tasks(sub_tasks, update_tasks) add_sub_tasks(camel_task, item.data.task) yield to_sub_tasks(camel_task, summary_task_content) + elif item.action == Action.add_task: + + # Check if this might be a misrouted second question + if camel_task is None and workforce is None: + continue + + assert camel_task is not None + if workforce is None: + logger.error(f"Cannot add task: workforce not initialized for project {options.project_id}") + yield sse_json("error", {"message": "Workforce not initialized. Please start the task first."}) + continue + + # Add task to the workforce queue + workforce.add_task( + item.content, + item.task_id, + item.additional_info + ) + + returnData = { + "project_id": item.project_id, + "task_id": item.task_id or (len(camel_task.subtasks) + 1) + } + yield sse_json("add_task", returnData) + elif item.action == Action.remove_task: + assert camel_task is not None + if workforce is None: + logger.error(f"Cannot remove task: workforce not initialized for project {options.project_id}") + yield sse_json("error", {"message": "Workforce not initialized. Please start the task first."}) + continue + + workforce.remove_task(item.task_id) + returnData = { + "project_id": item.project_id, + "task_id": item.task_id + } + yield sse_json("remove_task", returnData) + elif item.action == Action.skip_task: + if workforce is not None and item.project_id == options.project_id: + if workforce._state.name == 'PAUSED': + # Resume paused workforce to skip the task + workforce.resume() + workforce.skip_gracefully() elif item.action == Action.start: + # Check conversation history length before starting task + is_exceeded, total_length = check_conversation_history_length(task_lock) + if is_exceeded: + logger.error(f"Cannot start task: conversation history too long ({total_length} chars) for project {options.project_id}") + yield sse_json("context_too_long", { + "message": "The conversation history is too long. Please create a new project to continue.", + "current_length": total_length, + "max_length": 100000 + }) + continue + + if workforce is not None: + if workforce._state.name == 'PAUSED': + # Resume paused workforce - subtasks should already be loaded + workforce.resume() + continue + else: + continue + task_lock.status = Status.processing task = asyncio.create_task(workforce.eigent_start(sub_tasks)) task_lock.add_background_task(task) elif item.action == Action.task_state: + # Track completed task results for the end event + task_id = item.data.get('task_id', 'unknown') + task_state = item.data.get('state', 'unknown') + task_result = item.data.get('result', '') + + + if task_state == 'DONE' and task_result: + last_completed_task_result = task_result + yield sse_json("task_state", item.data) + elif item.action == Action.new_task_state: + + # Log new task state details + new_task_id = item.data.get('task_id', 'unknown') + new_task_state = item.data.get('state', 'unknown') + new_task_result = item.data.get('result', '') + + + assert camel_task is not None + + old_task_content: str = camel_task.content + old_task_result: str = await get_task_result_with_optional_summary(camel_task, options) + + old_task_content_clean: str = old_task_content + if "=== CURRENT TASK ===" in old_task_content_clean: + old_task_content_clean = old_task_content_clean.split("=== CURRENT TASK ===")[-1].strip() + + task_lock.add_conversation('task_result', { + 'task_content': old_task_content_clean, + 'task_result': old_task_result, + 'working_directory': get_working_directory(options, task_lock) + }) + + new_task_content = item.data.get('content', '') + + if new_task_content: + import time + task_id = item.data.get('task_id', f"{int(time.time() * 1000)}-multi") + new_camel_task = Task(content=new_task_content, id=task_id) + if hasattr(camel_task, 'additional_info') and camel_task.additional_info: + new_camel_task.additional_info = camel_task.additional_info + camel_task = new_camel_task + + # Now trigger end of previous task using stored result + yield sse_json("end", old_task_result) + # Always yield new_task_state first - this is not optional + yield sse_json("new_task_state", item.data) + # Trigger Queue Removal + yield sse_json("remove_task", {"task_id": item.data.get("task_id")}) + + # Then handle multi-turn processing + if workforce is not None and new_task_content: + task_lock.status = Status.confirming + workforce.pause() + + try: + is_multi_turn_complex = await question_confirm(question_agent, new_task_content, task_lock) + + if not is_multi_turn_complex: + simple_answer_prompt = f"{build_conversation_context(task_lock, header='=== Previous Conversation ===')}User Query: {new_task_content}\n\nProvide a direct, helpful answer to this simple question." + + try: + simple_resp = question_agent.step(simple_answer_prompt) + answer_content = simple_resp.msgs[0].content if simple_resp and simple_resp.msgs else "I understand your question, but I'm having trouble generating a response right now." + + task_lock.add_conversation('assistant', answer_content) + + # Send response to user + yield sse_json("confirmed", {"question": new_task_content}) + yield sse_json("wait_confirm", {"content": answer_content, "question": new_task_content}) + except Exception as e: + logger.error(f"Error generating simple answer in multi-turn: {e}") + yield sse_json("wait_confirm", {"content": "I encountered an error while processing your question.", "question": new_task_content}) + + workforce.resume() + continue # This continues the main while loop, waiting for next action + + yield sse_json("confirmed", {"question": new_task_content}) + task_lock.status = Status.confirmed + + context_for_multi_turn = build_context_for_workforce(task_lock, options) + + new_sub_tasks = await workforce.handle_decompose_append_task( + camel_task, + reset=False, + coordinator_context=context_for_multi_turn + ) + + task_content_for_summary = new_task_content + if len(task_content_for_summary) > 100: + new_summary_content = f"Follow-up Task|{task_content_for_summary[:97]}..." + else: + new_summary_content = f"Follow-up Task|{task_content_for_summary}" + + # Send the extracted events + yield to_sub_tasks(camel_task, new_summary_content) + + # Update the context with new task data + sub_tasks = new_sub_tasks + summary_task_content = new_summary_content + + + except Exception as e: + import traceback + logger.error(f"[TRACE] Traceback: {traceback.format_exc()}") + # Continue with existing context if decomposition fails + yield sse_json("error", {"message": f"Failed to process task: {str(e)}"}) + else: + if workforce is None: + logger.warning(f"[TRACE] Workforce is None - this might be the issue") + if not new_task_content: + logger.warning(f"[TRACE] No new task content provided") elif item.action == Action.create_agent: yield sse_json("create_agent", item.data) elif item.action == Action.activate_agent: @@ -167,9 +642,15 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock): elif item.action == Action.pause: if workforce is not None: workforce.pause() + logger.info(f"Workforce paused for project {options.project_id}") + else: + logger.warning(f"Cannot pause: workforce is None for project {options.project_id}") elif item.action == Action.resume: if workforce is not None: workforce.resume() + logger.info(f"Workforce resumed for project {options.project_id}") + else: + logger.warning(f"Cannot resume: workforce is None for project {options.project_id}") elif item.action == Action.new_agent: if workforce is not None: workforce.pause() @@ -180,21 +661,52 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock): elif item.action == Action.end: assert camel_task is not None task_lock.status = Status.done - yield sse_json("end", str(camel_task.result)) + final_result: str = await get_task_result_with_optional_summary(camel_task, options) + + task_lock.last_task_result = final_result + + task_content: str = camel_task.content + if "=== CURRENT TASK ===" in task_content: + task_content = task_content.split("=== CURRENT TASK ===")[-1].strip() + + task_lock.add_conversation('task_result', { + 'task_content': task_content, + 'task_result': final_result, + 'working_directory': get_working_directory(options, task_lock) + }) + + + yield sse_json("end", final_result) + if workforce is not None: workforce.stop_gracefully() - break + logger.info(f"Workforce stopped gracefully for project {options.project_id}") + workforce = None + else: + logger.warning(f"Workforce already None at end action for project {options.project_id}") + + camel_task = None + + if question_agent is not None: + question_agent.reset() + logger.info(f"Reset question_agent for project {options.project_id}") elif item.action == Action.supplement: - assert camel_task is not None - task_lock.status = Status.processing - camel_task.add_subtask( - Task( - content=item.data.question, - id=f"{camel_task.id}.{len(camel_task.subtasks)}", + + # Check if this might be a misrouted second question + if camel_task is None: + logger.warning(f"SUPPLEMENT action received but camel_task is None for project {options.project_id}") + else: + assert camel_task is not None + task_lock.status = Status.processing + camel_task.add_subtask( + Task( + content=item.data.question, + id=f"{camel_task.id}.{len(camel_task.subtasks)}", + ) ) - ) - task = asyncio.create_task(workforce.eigent_start(camel_task.subtasks)) - task_lock.add_background_task(task) + if workforce is not None: + task = asyncio.create_task(workforce.eigent_start(camel_task.subtasks)) + task_lock.add_background_task(task) elif item.action == Action.budget_not_enough: if workforce is not None: workforce.pause() @@ -204,32 +716,43 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock): if workforce._running: workforce.stop() workforce.stop_gracefully() + logger.info(f"Workforce stopped for project {options.project_id}") + else: + logger.warning(f"Workforce is None at stop action for project {options.project_id}") await delete_task_lock(task_lock.id) break else: logger.warning(f"Unknown action: {item.action}") except ModelProcessingError as e: if "Budget has been exceeded" in str(e): + logger.warning(f"Budget exceeded for task {options.task_id}, action: {item.action}") # workforce decompose task don't use ListenAgent, this need return sse if "workforce" in locals() and workforce is not None: workforce.pause() yield sse_json(Action.budget_not_enough, {"message": "budget not enouth"}) else: - logger.error(f"Error processing action {item.action}: {e}") + logger.error(f"ModelProcessingError for task {options.task_id}, action {item.action}: {e}", exc_info=True) yield sse_json("error", {"message": str(e)}) if "workforce" in locals() and workforce is not None and workforce._running: workforce.stop() except Exception as e: - logger.error(f"Error processing action {item.action}: {e}") + logger.error(f"Unhandled exception for task {options.task_id}, action {item.action}: {e}", exc_info=True) yield sse_json("error", {"message": str(e)}) # Continue processing other items instead of breaking +@traceroot.trace() async def install_mcp( mcp: ListenChatAgent, install_mcp: ActionInstallMcpData, ): - mcp.add_tools(await get_mcp_tools(install_mcp.data)) + logger.info(f"Installing MCP tools: {list(install_mcp.data.get('mcpServers', {}).keys())}") + try: + mcp.add_tools(await get_mcp_tools(install_mcp.data)) + logger.info("MCP tools installed successfully") + except Exception as e: + logger.error(f"Error installing MCP tools: {e}", exc_info=True) + raise def to_sub_tasks(task: Task, summary_task_content: str): @@ -287,30 +810,53 @@ def add_sub_tasks(camel_task: Task, update_tasks: list[TaskContent]): ) -async def question_confirm(agent: ListenChatAgent, prompt: str) -> str | Literal[True]: - prompt = f""" -> **Your Role:** You are a highly capable agent. Your primary function is to analyze a user's request and determine the appropriate course of action. -> -> **Your Process:** -> -> 1. **Analyze the User's Query:** Carefully examine the user's request: `{prompt}`. -> -> 2. **Categorize the Query:** -> * **Simple Query:** Is this a simple greeting, a question that can be answered directly, or a conversational interaction (e.g., "hello", "thank you")? -> * **Complex Task:** Is this a request that requires a series of steps, code execution, or interaction with tools to complete? -> -> 3. **Execute Your Decision:** -> * **For a Simple Query:** Provide a direct and helpful response. -> * **For a Complex Task:** Your *only* response should be "yes". This will trigger a specialized workforce to handle the task. Do not include any other text, punctuation, or pleasantries. - """ - resp = agent.step(prompt) - logger.info(f"resp: {agent.chat_history}") - if resp.msgs[0].content.lower() != "yes": - return sse_json("wait_confirm", {"content": resp.msgs[0].content}) - else: +async def question_confirm(agent: ListenChatAgent, prompt: str, task_lock: TaskLock | None = None) -> bool: + """Simple question confirmation - returns True for complex tasks, False for simple questions.""" + + context_prompt = "" + if task_lock: + context_prompt = build_conversation_context(task_lock, header="=== Previous Conversation ===") + + full_prompt = f"""{context_prompt}User Query: {prompt} + +Determine if this user query is a complex task or a simple question. + +**Complex task** (answer "yes"): Requires tools, code execution, file operations, multi-step planning, or creating/modifying content +- Examples: "create a file", "search for X", "implement feature Y", "write code", "analyze data", "build something" + +**Simple question** (answer "no"): Can be answered directly with knowledge or conversation history, no action needed +- Examples: greetings ("hello", "hi"), fact queries ("what is X?"), clarifications ("what did you mean?"), status checks ("how are you?") + +Answer only "yes" or "no". Do not provide any explanation. + +Is this a complex task? (yes/no):""" + + try: + resp = agent.step(full_prompt) + + if not resp or not resp.msgs or len(resp.msgs) == 0: + logger.warning("No response from agent, defaulting to complex task") + return True + + content = resp.msgs[0].content + if not content: + logger.warning("Empty content from agent, defaulting to complex task") + return True + + normalized = content.strip().lower() + is_complex = "yes" in normalized + + logger.info(f"Question confirm result: {'complex task' if is_complex else 'simple question'}", + extra={"response": content, "is_complex": is_complex}) + + return is_complex + + except Exception as e: + logger.error(f"Error in question_confirm: {e}") return True +@traceroot.trace() async def summary_task(agent: ListenChatAgent, task: Task) -> str: prompt = f"""The user's task is: --- @@ -324,13 +870,100 @@ Your instructions are: Example format: "Task Name|This is the summary of the task." Do not include any other text or formatting. """ + logger.debug("Generating task summary", extra={"task_id": task.id}) + try: + res = agent.step(prompt) + summary = res.msgs[0].content + logger.info("Task summary generated", extra={"summary": summary}) + return summary + except Exception as e: + logger.error("Error generating task summary", extra={"error": str(e)}, exc_info=True) + raise + + +async def summary_subtasks_result(agent: ListenChatAgent, task: Task) -> str: + """ + Summarize the aggregated results from all subtasks into a concise summary. + + Args: + agent: The summary agent to use + task: The main task containing subtasks and their aggregated results + + Returns: + A concise summary of all subtask results + """ + subtasks_info = "" + for i, subtask in enumerate(task.subtasks, 1): + subtasks_info += f"\n**Subtask {i}**\n" + subtasks_info += f"Description: {subtask.content}\n" + subtasks_info += f"Result: {subtask.result or 'No result'}\n" + subtasks_info += "---\n" + + prompt = f"""You are a professional summarizer. Summarize the results of the following subtasks. + +Main Task: {task.content} + +Subtasks (with descriptions and results): +--- +{subtasks_info} +--- + +Instructions: +1. Provide a concise summary of what was accomplished +2. Highlight key findings or outputs from each subtask +3. Mention any important files created or actions taken +4. Use bullet points or sections for clarity +5. DO NOT repeat the task name in your summary - go straight to the results +6. Keep it professional but conversational + +Summary: +""" + res = agent.step(prompt) - logger.info(f"summary_task: {res.msgs[0].content}") - return res.msgs[0].content + summary = res.msgs[0].content + + logger.info(f"Generated subtasks summary for task {task.id} with {len(task.subtasks)} subtasks") + + return summary +async def get_task_result_with_optional_summary(task: Task, options: Chat) -> str: + """ + Get the task result, with LLM summary if there are multiple subtasks. + + Args: + task: The task to get result from + options: Chat options for creating summary agent + + Returns: + The task result (summarized if multiple subtasks, raw otherwise) + """ + result = str(task.result or "") + + if task.subtasks and len(task.subtasks) > 1: + logger.info(f"Task {task.id} has {len(task.subtasks)} subtasks, generating summary") + try: + summary_agent = task_summary_agent(options) + summarized_result = await summary_subtasks_result(summary_agent, task) + result = summarized_result + logger.info(f"Successfully generated summary for task {task.id}") + except Exception as e: + logger.error(f"Failed to generate summary for task {task.id}: {e}") + elif task.subtasks and len(task.subtasks) == 1: + logger.info(f"Task {task.id} has only 1 subtask, skipping LLM summary") + if result and "--- Subtask" in result and "Result ---" in result: + parts = result.split("Result ---", 1) + if len(parts) > 1: + result = parts[1].strip() + + return result + + +@traceroot.trace() async def construct_workforce(options: Chat) -> tuple[Workforce, ListenChatAgent]: - working_directory = options.file_save_path() + logger.info("Constructing workforce", extra={"project_id": options.project_id, "task_id": options.task_id}) + working_directory = get_working_directory(options) + logger.debug("Working directory set", extra={"working_directory": working_directory}) [coordinator_agent, task_agent] = [ agent_model( key, @@ -339,8 +972,8 @@ async def construct_workforce(options: Chat) -> tuple[Workforce, ListenChatAgent [ *( ToolkitMessageIntegration( - message_handler=HumanToolkit(options.task_id, key).send_message_to_user - ).register_toolkits(NoteTakingToolkit(options.task_id, working_directory=working_directory)) + message_handler=HumanToolkit(options.project_id, key).send_message_to_user + ).register_toolkits(NoteTakingToolkit(options.project_id, working_directory=working_directory)) ).get_tools() ], ) @@ -373,11 +1006,11 @@ The current date is {datetime.date.today()}. For any date-related tasks, you MUS """, options, [ - *HumanToolkit.get_can_use_tools(options.task_id, Agents.new_worker_agent), + *HumanToolkit.get_can_use_tools(options.project_id, Agents.new_worker_agent), *( ToolkitMessageIntegration( - message_handler=HumanToolkit(options.task_id, Agents.new_worker_agent).send_message_to_user - ).register_toolkits(NoteTakingToolkit(options.task_id, working_directory=working_directory)) + message_handler=HumanToolkit(options.project_id, Agents.new_worker_agent).send_message_to_user + ).register_toolkits(NoteTakingToolkit(options.project_id, working_directory=working_directory)) ).get_tools(), ], ) @@ -402,7 +1035,7 @@ The current date is {datetime.date.today()}. For any date-related tasks, you MUS model_platform_enum = None workforce = Workforce( - options.task_id, + options.project_id, "A workforce", graceful_shutdown_timeout=3, # 30 seconds for debugging share_memory=False, @@ -481,10 +1114,13 @@ def format_agent_description(agent_data: NewAgent | ActionNewAgent) -> str: return " ".join(description_parts) +@traceroot.trace() async def new_agent_model(data: NewAgent | ActionNewAgent, options: Chat): - working_directory = options.file_save_path() + logger.info("Creating new agent", extra={"agent_name": data.name, "project_id": options.project_id, "task_id": options.task_id}) + logger.debug("New agent data", extra={"agent_data": data.model_dump_json()}) + working_directory = get_working_directory(options) tool_names = [] - tools = [*await get_toolkits(data.tools, data.name, options.task_id)] + tools = [*await get_toolkits(data.tools, data.name, options.project_id)] for item in data.tools: tool_names.append(titleize(item)) if data.mcp_tools is not None: @@ -492,7 +1128,8 @@ async def new_agent_model(data: NewAgent | ActionNewAgent, options: Chat): for item in data.mcp_tools["mcpServers"].keys(): tool_names.append(titleize(item)) for item in tools: - logger.debug(f"new agent function tool ====== {item.func.__name__}") + logger.debug(f"Agent {data.name} tool: {item.func.__name__}") + logger.info(f"Agent {data.name} created with {len(tools)} tools: {tool_names}") # Enhanced system message with platform information enhanced_description = f"""{data.description} - You are now working in system {platform.system()} with architecture diff --git a/backend/app/service/task.py b/backend/app/service/task.py index 5e68708ed..48fcffd1e 100644 --- a/backend/app/service/task.py +++ b/backend/app/service/task.py @@ -1,4 +1,5 @@ from typing_extensions import Any, Literal, TypedDict +from typing import List, Dict, Optional from pydantic import BaseModel from app.exception.exception import ProgramException from app.model.chat import McpServers, Status, SupplementChat, Chat, UpdateData @@ -9,13 +10,16 @@ from contextlib import contextmanager from contextvars import ContextVar from datetime import datetime, timedelta import weakref -from loguru import logger +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("task_service") class Action(str, Enum): improve = "improve" # user -> backend update_task = "update_task" # user -> backend task_state = "task_state" # backend -> user + new_task_state = "new_task_state" # backend -> user start = "start" # user -> backend create_agent = "create_agent" # backend -> user activate_agent = "activate_agent" # backend -> user @@ -36,6 +40,9 @@ class Action(str, Enum): resume = "resume" # user -> backend user take control new_agent = "new_agent" # user -> backend budget_not_enough = "budget_not_enough" # backend -> user + add_task = "add_task" # user -> backend + remove_task = "remove_task" # user -> backend + skip_task = "skip_task" # user -> backend class ActionImproveData(BaseModel): @@ -56,6 +63,10 @@ class ActionTaskStateData(BaseModel): action: Literal[Action.task_state] = Action.task_state data: dict[Literal["task_id", "content", "state", "result", "failure_count"], str | int] +class ActionNewTaskStateData(BaseModel): + action: Literal[Action.new_task_state] = Action.new_task_state + data: dict[Literal["task_id", "content", "state", "result", "failure_count"], str | int] + class ActionAskData(BaseModel): action: Literal[Action.ask] = Action.ask @@ -169,6 +180,26 @@ class ActionBudgetNotEnough(BaseModel): action: Literal[Action.budget_not_enough] = Action.budget_not_enough +class ActionAddTaskData(BaseModel): + action: Literal[Action.add_task] = Action.add_task + content: str + project_id: str | None = None + task_id: str | None = None + additional_info: dict | None = None + insert_position: int = -1 + + +class ActionRemoveTaskData(BaseModel): + action: Literal[Action.remove_task] = Action.remove_task + task_id: str + project_id: str + + +class ActionSkipTaskData(BaseModel): + action: Literal[Action.skip_task] = Action.skip_task + project_id: str + + ActionData = ( ActionImproveData | ActionStartData @@ -192,6 +223,9 @@ ActionData = ( | ActionTakeControl | ActionNewAgent | ActionBudgetNotEnough + | ActionAddTaskData + | ActionRemoveTaskData + | ActionSkipTaskData ) @@ -221,6 +255,16 @@ class TaskLock: background_tasks: set[asyncio.Task] """Track all background tasks for cleanup""" + # Context management fields + conversation_history: List[Dict[str, Any]] + """Store conversation history for context""" + last_task_result: str + """Store the last task execution result""" + question_agent: Optional[Any] + """Persistent question confirmation agent""" + summary_generated: bool + """Track if summary has been generated for this project""" + def __init__(self, id: str, queue: asyncio.Queue, human_input: dict) -> None: self.id = id self.queue = queue @@ -229,6 +273,12 @@ class TaskLock: self.last_accessed = datetime.now() self.background_tasks = set() + # Initialize context management fields + self.conversation_history = [] + self.last_task_result = "" + self.last_task_summary = "" + self.question_agent = None + async def put_queue(self, data: ActionData): self.last_accessed = datetime.now() await self.queue.put(data) @@ -262,6 +312,25 @@ class TaskLock: pass self.background_tasks.clear() + def add_conversation(self, role: str, content: str | dict): + """Add a conversation entry to history""" + self.conversation_history.append({ + 'role': role, + 'content': content, + 'timestamp': datetime.now().isoformat() + }) + + def get_recent_context(self, max_entries: int = None) -> str: + """Get recent conversation context as a formatted string""" + if not self.conversation_history: + return "" + + context = "=== Recent Conversation ===\n" + history_to_use = self.conversation_history if max_entries is None else self.conversation_history[-max_entries:] + for entry in history_to_use: + context += f"{entry['role']}: {entry['content']}\n" + return context + task_locks = dict[str, TaskLock]() # Cleanup task for removing stale task locks @@ -275,6 +344,11 @@ def get_task_lock(id: str) -> TaskLock: return task_locks[id] +def get_task_lock_if_exists(id: str) -> TaskLock | None: + """Get task lock if it exists, otherwise return None""" + return task_locks.get(id) + + def create_task_lock(id: str) -> TaskLock: if id in task_locks: raise ProgramException("Task already exists") @@ -288,6 +362,13 @@ def create_task_lock(id: str) -> TaskLock: return task_locks[id] +def get_or_create_task_lock(id: str) -> TaskLock: + """Get existing task lock or create a new one if it doesn't exist""" + if id in task_locks: + return task_locks[id] + return create_task_lock(id) + + async def delete_task_lock(id: str): if id not in task_locks: raise ProgramException("Task not found") diff --git a/backend/app/utils/__init__.py b/backend/app/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backend/app/utils/agent.py b/backend/app/utils/agent.py index 7ce3c040a..054314daa 100644 --- a/backend/app/utils/agent.py +++ b/backend/app/utils/agent.py @@ -6,7 +6,7 @@ from threading import Event import traceback from typing import Any, Callable, Dict, List, Tuple import uuid -from app.utils import traceroot_wrapper as traceroot +from utils import traceroot_wrapper as traceroot from camel.agents import ChatAgent from camel.agents.chat_agent import StreamingChatAgentResponse, AsyncStreamingChatAgentResponse from camel.agents._types import ToolCallRequest @@ -18,6 +18,7 @@ from camel.terminators import ResponseTerminator from camel.toolkits import FunctionTool, RegisteredAgentToolkit from camel.types.agents import ToolCallingRecord from app.component.environment import env +from app.utils.file_utils import get_working_directory from app.utils.toolkit.abstract_toolkit import AbstractToolkit from app.utils.toolkit.hybrid_browser_toolkit import HybridBrowserToolkit from app.utils.toolkit.excel_toolkit import ExcelToolkit @@ -50,7 +51,6 @@ from camel.types import ModelPlatformType, ModelType from camel.toolkits import MCPToolkit, ToolkitMessageIntegration import datetime from pydantic import BaseModel -from loguru import logger from app.model.chat import Chat, McpServers # Create traceroot logger for agent tracking @@ -173,7 +173,6 @@ class ListenChatAgent(ChatAgent): except Exception as e: res = None error_info = e - logger.exception(e) traceroot_logger.error(f"Agent {self.agent_name} unexpected error in step: {e}", exc_info=True) message = f"Error processing message: {e!s}" total_tokens = 0 @@ -248,8 +247,7 @@ class ListenChatAgent(ChatAgent): except Exception as e: res = None error_info = e - logger.exception(e) - traceroot_logger.error(f"Agent {self.agent_name} unexpected error in step: {e}", exc_info=True) + traceroot_logger.error(f"Agent {self.agent_name} unexpected error in async step: {e}", exc_info=True) message = f"Error processing message: {e!s}" total_tokens = 0 @@ -325,6 +323,17 @@ class ListenChatAgent(ChatAgent): else: result = raw_result mask_flag = False + # Prepare result message with truncation + if isinstance(result, str): + result_msg = result + else: + result_str = repr(result) + MAX_RESULT_LENGTH = 500 + if len(result_str) > MAX_RESULT_LENGTH: + result_msg = result_str[:MAX_RESULT_LENGTH] + f"... (truncated, total length: {len(result_str)} chars)" + else: + result_msg = result_str + asyncio.create_task( task_lock.put_queue( ActionDeactivateToolkitData( @@ -333,7 +342,7 @@ class ListenChatAgent(ChatAgent): "process_task_id": self.process_task_id, "toolkit_name": toolkit_name, "method_name": func_name, - "message": result if isinstance(result, str) else repr(result), + "message": result_msg, }, ) ) @@ -343,9 +352,7 @@ class ListenChatAgent(ChatAgent): error_msg = f"Error executing tool '{func_name}': {e!s}" result = f"Tool execution failed: {error_msg}" mask_flag = False - logger.debug(error_msg) - traceroot_logger.error(f"Tool execution failed for {func_name}: {e}") - traceback.print_exc() + traceroot_logger.error(f"Tool execution failed for {func_name}: {e}", exc_info=True) return self._record_tool_calling(func_name, args, result, tool_call_id, mask_output=mask_flag) @@ -405,9 +412,18 @@ class ListenChatAgent(ChatAgent): # Capture the error message to prevent framework crash error_msg = f"Error executing async tool '{func_name}': {e!s}" result = {"error": error_msg} - logger.warning(error_msg) - traceroot_logger.error(f"Async tool execution failed for {func_name}: {e}") - traceback.print_exc() + traceroot_logger.error(f"Async tool execution failed for {func_name}: {e}", exc_info=True) + + # Prepare result message with truncation + if isinstance(result, str): + result_msg = result + else: + result_str = repr(result) + MAX_RESULT_LENGTH = 500 + if len(result_str) > MAX_RESULT_LENGTH: + result_msg = result_str[:MAX_RESULT_LENGTH] + f"... (truncated, total length: {len(result_str)} chars)" + else: + result_msg = result_str await task_lock.put_queue( ActionDeactivateToolkitData( @@ -416,7 +432,7 @@ class ListenChatAgent(ChatAgent): "process_task_id": self.process_task_id, "toolkit_name": toolkit_name, "method_name": func_name, - "message": result if isinstance(result, str) else repr(result), + "message": result_msg, }, ) ) @@ -429,7 +445,7 @@ class ListenChatAgent(ChatAgent): # Clone tools and collect toolkits that need registration cloned_tools, toolkits_to_register = self._clone_tools() - + new_agent = ListenChatAgent( api_task_id=self.api_task_id, agent_name=self.agent_name, @@ -445,7 +461,6 @@ class ListenChatAgent(ChatAgent): response_terminators=self.response_terminators, scheduling_strategy=self.model_backend.scheduling_strategy.__name__, max_iteration=self.max_iteration, - agent_id=self.agent_id, stop_event=self.stop_event, tool_execution_timeout=self.tool_execution_timeout, mask_tool_output=self.mask_tool_output, @@ -476,9 +491,9 @@ def agent_model( tool_names: list[str] | None = None, toolkits_to_register_agent: list[RegisteredAgentToolkit] | None = None, ): - task_lock = get_task_lock(options.task_id) + task_lock = get_task_lock(options.project_id) agent_id = str(uuid.uuid4()) - traceroot_logger.info(f"Creating agent: {agent_name} with id: {agent_id} for task: {options.task_id}") + traceroot_logger.info(f"Creating agent: {agent_name} with id: {agent_id} for project: {options.project_id}") asyncio.create_task( task_lock.put_queue( ActionCreateAgentData(data={"agent_name": agent_name, "agent_id": agent_id, "tools": tool_names or []}) @@ -486,7 +501,7 @@ def agent_model( ) return ListenChatAgent( - options.task_id, + options.project_id, agent_name, system_message, model=ModelFactory.create( @@ -495,7 +510,7 @@ def agent_model( api_key=options.api_key, url=options.api_url, model_config_dict={ - "user": str(options.task_id), + "user": str(options.project_id), } if options.is_cloud() else None, @@ -533,24 +548,24 @@ def task_summary_agent(options: Chat): @traceroot.trace() async def developer_agent(options: Chat): - working_directory = options.file_save_path() - traceroot_logger.info(f"Creating developer agent for task: {options.task_id} in directory: {working_directory}") + working_directory = get_working_directory(options) + traceroot_logger.info(f"Creating developer agent for project: {options.project_id} in directory: {working_directory}") message_integration = ToolkitMessageIntegration( - message_handler=HumanToolkit(options.task_id, Agents.developer_agent).send_message_to_user + message_handler=HumanToolkit(options.project_id, Agents.developer_agent).send_message_to_user ) note_toolkit = NoteTakingToolkit( - api_task_id=options.task_id, agent_name=Agents.developer_agent, working_directory=working_directory + api_task_id=options.project_id, agent_name=Agents.developer_agent, working_directory=working_directory ) note_toolkit = message_integration.register_toolkits(note_toolkit) - web_deploy_toolkit = WebDeployToolkit(api_task_id=options.task_id) + web_deploy_toolkit = WebDeployToolkit(api_task_id=options.project_id) web_deploy_toolkit = message_integration.register_toolkits(web_deploy_toolkit) - screenshot_toolkit = ScreenshotToolkit(options.task_id, working_directory=working_directory) + screenshot_toolkit = ScreenshotToolkit(options.project_id, working_directory=working_directory) screenshot_toolkit = message_integration.register_toolkits(screenshot_toolkit) - terminal_toolkit = TerminalToolkit(options.task_id, Agents.document_agent, safe_mode=True, clone_current_env=False) + terminal_toolkit = TerminalToolkit(options.project_id, Agents.document_agent, safe_mode=True, clone_current_env=False) terminal_toolkit = message_integration.register_toolkits(terminal_toolkit) tools = [ - *HumanToolkit.get_can_use_tools(options.task_id, Agents.developer_agent), + *HumanToolkit.get_can_use_tools(options.project_id, Agents.developer_agent), *note_toolkit.get_tools(), *web_deploy_toolkit.get_tools(), *terminal_toolkit.get_tools(), @@ -704,14 +719,14 @@ these tips to maximize your effectiveness: @traceroot.trace() def search_agent(options: Chat): - working_directory = options.file_save_path() - traceroot_logger.info(f"Creating search agent for task: {options.task_id} in directory: {working_directory}") + working_directory = get_working_directory(options) + traceroot_logger.info(f"Creating search agent for project: {options.project_id} in directory: {working_directory}") message_integration = ToolkitMessageIntegration( - message_handler=HumanToolkit(options.task_id, Agents.search_agent).send_message_to_user + message_handler=HumanToolkit(options.project_id, Agents.search_agent).send_message_to_user ) web_toolkit_custom = HybridBrowserToolkit( - options.task_id, + options.project_id, headless=False, browser_log_to_file=True, stealth=True, @@ -731,12 +746,14 @@ def search_agent(options: Chat): ], ) + # Save reference before registering for toolkits_to_register_agent + web_toolkit_for_agent_registration = web_toolkit_custom web_toolkit_custom = message_integration.register_toolkits(web_toolkit_custom) - terminal_toolkit = TerminalToolkit(options.task_id, Agents.search_agent, safe_mode=True, clone_current_env=False) + terminal_toolkit = TerminalToolkit(options.project_id, Agents.search_agent, safe_mode=True, clone_current_env=False) terminal_toolkit = message_integration.register_functions([terminal_toolkit.shell_exec]) - note_toolkit = NoteTakingToolkit(options.task_id, Agents.search_agent, working_directory=working_directory) + note_toolkit = NoteTakingToolkit(options.project_id, Agents.search_agent, working_directory=working_directory) note_toolkit = message_integration.register_toolkits(note_toolkit) - search_tools = SearchToolkit.get_can_use_tools(options.task_id) + search_tools = SearchToolkit.get_can_use_tools(options.project_id) # Only register search tools if any are available if search_tools: search_tools = message_integration.register_functions(search_tools) @@ -744,7 +761,7 @@ def search_agent(options: Chat): search_tools = [] tools = [ - *HumanToolkit.get_can_use_tools(options.task_id, Agents.search_agent), + *HumanToolkit.get_can_use_tools(options.project_id, Agents.search_agent), *web_toolkit_custom.get_tools(), *terminal_toolkit, *note_toolkit.get_tools(), @@ -795,7 +812,7 @@ The current date is {NOW_STR}(Accurate to the hour). For any date-related tasks, - **CRITICAL URL POLICY**: You are STRICTLY FORBIDDEN from inventing, guessing, or constructing URLs yourself. You MUST only use URLs from trusted sources: - 1. URLs returned by search tools (like `search_google` or `search_exa`) + 1. URLs returned by search tools (`search_google`) 2. URLs found on webpages you have visited through browser tools 3. URLs provided by the user in their request Fabricating or guessing URLs is considered a critical error and must @@ -841,8 +858,6 @@ Your approach depends on available search tools: sites using `browser_type` and submit with `browser_enter` - **Extract URLs from results**: Only use URLs that appear in the search results on these websites -- **Alternative Search**: If available, use `search_exa` for additional - results **Common Browser Operations (both scenarios):** - **Navigation and Exploration**: Use `browser_visit_page` to open URLs. @@ -879,41 +894,42 @@ Your approach depends on available search tools: NoteTakingToolkit.toolkit_name(), TerminalToolkit.toolkit_name(), ], + toolkits_to_register_agent=[web_toolkit_for_agent_registration], ) @traceroot.trace() async def document_agent(options: Chat): - working_directory = options.file_save_path() - traceroot_logger.info(f"Creating document agent for task: {options.task_id} in directory: {working_directory}") + working_directory = get_working_directory(options) + traceroot_logger.info(f"Creating document agent for project: {options.project_id} in directory: {working_directory}") message_integration = ToolkitMessageIntegration( - message_handler=HumanToolkit(options.task_id, Agents.task_agent).send_message_to_user + message_handler=HumanToolkit(options.project_id, Agents.task_agent).send_message_to_user ) - file_write_toolkit = FileToolkit(options.task_id, working_directory=working_directory) - pptx_toolkit = PPTXToolkit(options.task_id, working_directory=working_directory) + file_write_toolkit = FileToolkit(options.project_id, working_directory=working_directory) + pptx_toolkit = PPTXToolkit(options.project_id, working_directory=working_directory) pptx_toolkit = message_integration.register_toolkits(pptx_toolkit) - mark_it_down_toolkit = MarkItDownToolkit(options.task_id) + mark_it_down_toolkit = MarkItDownToolkit(options.project_id) mark_it_down_toolkit = message_integration.register_toolkits(mark_it_down_toolkit) - excel_toolkit = ExcelToolkit(options.task_id, working_directory=working_directory) + excel_toolkit = ExcelToolkit(options.project_id, working_directory=working_directory) excel_toolkit = message_integration.register_toolkits(excel_toolkit) - note_toolkit = NoteTakingToolkit(options.task_id, Agents.document_agent, working_directory=working_directory) + note_toolkit = NoteTakingToolkit(options.project_id, Agents.document_agent, working_directory=working_directory) note_toolkit = message_integration.register_toolkits(note_toolkit) - terminal_toolkit = TerminalToolkit(options.task_id, Agents.document_agent, safe_mode=True, clone_current_env=False) + terminal_toolkit = TerminalToolkit(options.project_id, Agents.document_agent, safe_mode=True, clone_current_env=False) terminal_toolkit = message_integration.register_toolkits(terminal_toolkit) tools = [ *file_write_toolkit.get_tools(), *pptx_toolkit.get_tools(), - *HumanToolkit.get_can_use_tools(options.task_id, Agents.document_agent), + *HumanToolkit.get_can_use_tools(options.project_id, Agents.document_agent), *mark_it_down_toolkit.get_tools(), *excel_toolkit.get_tools(), *note_toolkit.get_tools(), *terminal_toolkit.get_tools(), - *await GoogleDriveMCPToolkit.get_can_use_tools(options.task_id, options.get_bun_env()), + *await GoogleDriveMCPToolkit.get_can_use_tools(options.project_id, options.get_bun_env()), ] - if env("EXA_API_KEY") or options.is_cloud(): - search_toolkit = SearchToolkit(options.task_id, Agents.document_agent).search_exa - search_toolkit = message_integration.register_functions([search_toolkit]) - tools.extend(search_toolkit) + # if env("EXA_API_KEY") or options.is_cloud(): + # search_toolkit = SearchToolkit(options.project_id, Agents.document_agent).search_exa + # search_toolkit = message_integration.register_functions([search_toolkit]) + # tools.extend(search_toolkit) system_message = f""" You are a Documentation Specialist, responsible for creating, modifying, and @@ -1085,32 +1101,32 @@ supported formats including advanced spreadsheet functionality. @traceroot.trace() def multi_modal_agent(options: Chat): - working_directory = options.file_save_path() - traceroot_logger.info(f"Creating multi-modal agent for task: {options.task_id} in directory: {working_directory}") + working_directory = get_working_directory(options) + traceroot_logger.info(f"Creating multi-modal agent for project: {options.project_id} in directory: {working_directory}") message_integration = ToolkitMessageIntegration( - message_handler=HumanToolkit(options.task_id, Agents.multi_modal_agent).send_message_to_user + message_handler=HumanToolkit(options.project_id, Agents.multi_modal_agent).send_message_to_user ) - video_download_toolkit = VideoDownloaderToolkit(options.task_id, working_directory=working_directory) + video_download_toolkit = VideoDownloaderToolkit(options.project_id, working_directory=working_directory) video_download_toolkit = message_integration.register_toolkits(video_download_toolkit) - image_analysis_toolkit = ImageAnalysisToolkit(options.task_id) + image_analysis_toolkit = ImageAnalysisToolkit(options.project_id) image_analysis_toolkit = message_integration.register_toolkits(image_analysis_toolkit) terminal_toolkit = TerminalToolkit( - options.task_id, agent_name=Agents.multi_modal_agent, safe_mode=True, clone_current_env=False + options.project_id, agent_name=Agents.multi_modal_agent, safe_mode=True, clone_current_env=False ) terminal_toolkit = message_integration.register_toolkits(terminal_toolkit) - note_toolkit = NoteTakingToolkit(options.task_id, Agents.multi_modal_agent, working_directory=working_directory) + note_toolkit = NoteTakingToolkit(options.project_id, Agents.multi_modal_agent, working_directory=working_directory) note_toolkit = message_integration.register_toolkits(note_toolkit) tools = [ *video_download_toolkit.get_tools(), *image_analysis_toolkit.get_tools(), - *HumanToolkit.get_can_use_tools(options.task_id, Agents.multi_modal_agent), + *HumanToolkit.get_can_use_tools(options.project_id, Agents.multi_modal_agent), *terminal_toolkit.get_tools(), *note_toolkit.get_tools(), ] if options.is_cloud(): open_ai_image_toolkit = OpenAIImageToolkit( # todo check llm has this model - options.task_id, + options.project_id, model="dall-e-3", response_format="b64_json", size="1024x1024", @@ -1132,7 +1148,7 @@ def multi_modal_agent(options: Chat): if model_platform_enum == ModelPlatformType.OPENAI: audio_analysis_toolkit = AudioAnalysisToolkit( - options.task_id, + options.project_id, working_directory, OpenAIAudioModels( api_key=options.api_key, @@ -1142,10 +1158,10 @@ def multi_modal_agent(options: Chat): audio_analysis_toolkit = message_integration.register_toolkits(audio_analysis_toolkit) tools.extend(audio_analysis_toolkit.get_tools()) - if env("EXA_API_KEY") or options.is_cloud(): - search_toolkit = SearchToolkit(options.task_id, Agents.multi_modal_agent).search_exa - search_toolkit = message_integration.register_functions([search_toolkit]) - tools.extend(search_toolkit) + # if env("EXA_API_KEY") or options.is_cloud(): + # search_toolkit = SearchToolkit(options.project_id, Agents.multi_modal_agent).search_exa + # search_toolkit = message_integration.register_functions([search_toolkit]) + # tools.extend(search_toolkit) system_message = f""" @@ -1255,27 +1271,27 @@ async def social_medium_agent(options: Chat): Agent to handling tasks related to social media: include toolkits: WhatsApp, Twitter, LinkedIn, Reddit, Notion, Slack, Discord and Google Suite. """ - working_directory = options.file_save_path() - traceroot_logger.info(f"Creating social medium agent for task: {options.task_id} in directory: {working_directory}") + working_directory = get_working_directory(options) + traceroot_logger.info(f"Creating social medium agent for project: {options.project_id} in directory: {working_directory}") tools = [ - *WhatsAppToolkit.get_can_use_tools(options.task_id), - *TwitterToolkit.get_can_use_tools(options.task_id), - *LinkedInToolkit.get_can_use_tools(options.task_id), - *RedditToolkit.get_can_use_tools(options.task_id), - *await NotionMCPToolkit.get_can_use_tools(options.task_id), - # *SlackToolkit.get_can_use_tools(options.task_id), - *await GoogleGmailMCPToolkit.get_can_use_tools(options.task_id, options.get_bun_env()), - *GoogleCalendarToolkit.get_can_use_tools(options.task_id), - *HumanToolkit.get_can_use_tools(options.task_id, Agents.social_medium_agent), - *TerminalToolkit(options.task_id, agent_name=Agents.social_medium_agent, clone_current_env=False).get_tools(), + *WhatsAppToolkit.get_can_use_tools(options.project_id), + *TwitterToolkit.get_can_use_tools(options.project_id), + *LinkedInToolkit.get_can_use_tools(options.project_id), + *RedditToolkit.get_can_use_tools(options.project_id), + *await NotionMCPToolkit.get_can_use_tools(options.project_id), + # *SlackToolkit.get_can_use_tools(options.project_id), + *await GoogleGmailMCPToolkit.get_can_use_tools(options.project_id, options.get_bun_env()), + *GoogleCalendarToolkit.get_can_use_tools(options.project_id), + *HumanToolkit.get_can_use_tools(options.project_id, Agents.social_medium_agent), + *TerminalToolkit(options.project_id, agent_name=Agents.social_medium_agent, clone_current_env=False).get_tools(), *NoteTakingToolkit( - options.task_id, Agents.social_medium_agent, working_directory=working_directory + options.project_id, Agents.social_medium_agent, working_directory=working_directory ).get_tools(), - # *DiscordToolkit(options.task_id).get_tools(), # Not supported temporarily - # *GoogleSuiteToolkit(options.task_id).get_tools(), # Not supported temporarily + # *DiscordToolkit(options.project_id).get_tools(), # Not supported temporarily + # *GoogleSuiteToolkit(options.project_id).get_tools(), # Not supported temporarily ] - if env("EXA_API_KEY") or options.is_cloud(): - tools.append(FunctionTool(SearchToolkit(options.task_id, Agents.social_medium_agent).search_exa)) + # if env("EXA_API_KEY") or options.is_cloud(): + # tools.append(FunctionTool(SearchToolkit(options.project_id, Agents.social_medium_agent).search_exa)) return agent_model( Agents.social_medium_agent, BaseMessage.make_assistant_message( @@ -1371,16 +1387,16 @@ operations. @traceroot.trace() async def mcp_agent(options: Chat): traceroot_logger.info( - f"Creating MCP agent for task: {options.task_id} with {len(options.installed_mcp['mcpServers'])} MCP servers" + f"Creating MCP agent for project: {options.project_id} with {len(options.installed_mcp['mcpServers'])} MCP servers" ) tools = [ - # *HumanToolkit.get_can_use_tools(options.task_id, Agents.mcp_agent), - *McpSearchToolkit(options.task_id).get_tools(), + # *HumanToolkit.get_can_use_tools(options.project_id, Agents.mcp_agent), + *McpSearchToolkit(options.project_id).get_tools(), ] if len(options.installed_mcp["mcpServers"]) > 0: try: mcp_tools = await get_mcp_tools(options.installed_mcp) - traceroot_logger.info(f"Retrieved {len(mcp_tools)} MCP tools for task {options.task_id}") + traceroot_logger.info(f"Retrieved {len(mcp_tools)} MCP tools for task {options.project_id}") if mcp_tools: tool_names = [tool.get_function_name() if hasattr(tool, 'get_function_name') else str(tool) for tool in mcp_tools] traceroot_logger.debug(f"MCP tools: {tool_names}") @@ -1388,9 +1404,9 @@ async def mcp_agent(options: Chat): except Exception as e: traceroot_logger.debug(repr(e)) - task_lock = get_task_lock(options.task_id) + task_lock = get_task_lock(options.project_id) agent_id = str(uuid.uuid4()) - traceroot_logger.info(f"Creating MCP agent: {Agents.mcp_agent} with id: {agent_id} for task: {options.task_id}") + traceroot_logger.info(f"Creating MCP agent: {Agents.mcp_agent} with id: {agent_id} for task: {options.project_id}") asyncio.create_task( task_lock.put_queue( ActionCreateAgentData( @@ -1403,7 +1419,7 @@ async def mcp_agent(options: Chat): ) ) return ListenChatAgent( - options.task_id, + options.project_id, Agents.mcp_agent, system_message="You are a helpful assistant that can help users search mcp servers. The found mcp services will be returned to the user, and you will ask the user via ask_human_via_gui whether they want to install these mcp services.", model=ModelFactory.create( @@ -1412,7 +1428,7 @@ async def mcp_agent(options: Chat): api_key=options.api_key, url=options.api_url, model_config_dict={ - "user": str(options.task_id), + "user": str(options.project_id), } if options.is_cloud() else None, diff --git a/backend/app/utils/file_utils.py b/backend/app/utils/file_utils.py new file mode 100644 index 000000000..caceb9847 --- /dev/null +++ b/backend/app/utils/file_utils.py @@ -0,0 +1,20 @@ +"""File system utilities.""" + +from app.component.environment import env +from app.model.chat import Chat + + +def get_working_directory(options: Chat, task_lock=None) -> str: + """ + Get the correct working directory for file operations. + First checks if there's an updated path from improve API call, + then falls back to environment variable or default path. + """ + if not task_lock: + from app.service.task import get_task_lock_if_exists + task_lock = get_task_lock_if_exists(options.project_id) + + if task_lock and hasattr(task_lock, 'new_folder_path') and task_lock.new_folder_path: + return str(task_lock.new_folder_path) + else: + return env("file_save_path", options.file_save_path()) \ No newline at end of file diff --git a/backend/app/utils/listen/toolkit_listen.py b/backend/app/utils/listen/toolkit_listen.py index 77079c7c5..fb5b9bf5f 100644 --- a/backend/app/utils/listen/toolkit_listen.py +++ b/backend/app/utils/listen/toolkit_listen.py @@ -1,10 +1,11 @@ import asyncio from functools import wraps -from inspect import iscoroutinefunction +from inspect import iscoroutinefunction, getmembers, ismethod, signature import json -from typing import Any, Callable +from typing import Any, Callable, Type, TypeVar +import threading +from concurrent.futures import ThreadPoolExecutor -from loguru import logger from app.service.task import ( ActionActivateToolkitData, ActionDeactivateToolkitData, @@ -12,6 +13,41 @@ from app.service.task import ( ) from app.utils.toolkit.abstract_toolkit import AbstractToolkit from app.service.task import process_task +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("toolkit_listen") + + +def _safe_put_queue(task_lock, data): + """Safely put data to the queue, handling both sync and async contexts""" + try: + # Try to get current event loop + loop = asyncio.get_running_loop() + # We're in an async context, create a task + task = asyncio.create_task(task_lock.put_queue(data)) + if hasattr(task_lock, "add_background_task"): + task_lock.add_background_task(task) + except RuntimeError: + # No running event loop, we need to handle this differently + try: + # Create a new event loop in a separate thread to avoid conflicts + def run_in_thread(): + try: + # Create a new event loop for this thread + new_loop = asyncio.new_event_loop() + asyncio.set_event_loop(new_loop) + try: + new_loop.run_until_complete(task_lock.put_queue(data)) + finally: + new_loop.close() + except Exception as e: + logger.error(f"[listen_toolkit] Failed to send data in thread: {e}") + + # Run in a separate thread to avoid blocking + thread = threading.Thread(target=run_in_thread, daemon=True) + thread.start() + except Exception as e: + logger.error(f"[listen_toolkit] Failed to send data to queue: {e}") def listen_toolkit( @@ -27,6 +63,11 @@ def listen_toolkit( @wraps(wrap) async def async_wrapper(*args, **kwargs): toolkit: AbstractToolkit = args[0] + # Check if api_task_id exists + if not hasattr(toolkit, 'api_task_id'): + logger.warning(f"[listen_toolkit] {toolkit.__class__.__name__} missing api_task_id, calling method directly") + return await func(*args, **kwargs) + task_lock = get_task_lock(toolkit.api_task_id) if inputs is not None: @@ -40,19 +81,23 @@ def listen_toolkit( kwargs_str = ", ".join(f"{k}={v!r}" for k, v in kwargs.items()) args_str = f"{args_str}, {kwargs_str}" if args_str else kwargs_str + # Truncate args_str if too long + MAX_ARGS_LENGTH = 500 + if len(args_str) > MAX_ARGS_LENGTH: + args_str = args_str[:MAX_ARGS_LENGTH] + f"... (truncated, total length: {len(args_str)} chars)" + toolkit_name = toolkit.toolkit_name() method_name = func.__name__.replace("_", " ") - await task_lock.put_queue( - ActionActivateToolkitData( - data={ - "agent_name": toolkit.agent_name, - "process_task_id": process_task.get(""), - "toolkit_name": toolkit_name, - "method_name": method_name, - "message": args_str, - }, - ) + activate_data = ActionActivateToolkitData( + data={ + "agent_name": toolkit.agent_name, + "process_task_id": process_task.get(""), + "toolkit_name": toolkit_name, + "method_name": method_name, + "message": args_str, + }, ) + await task_lock.put_queue(activate_data) error = None res = None try: @@ -70,21 +115,26 @@ def listen_toolkit( res_msg = json.dumps(res, ensure_ascii=False) except TypeError: # Handle cases where res contains non-serializable objects (like coroutines) - res_msg = str(res) + res_str = str(res) + # Truncate very long outputs to avoid flooding logs + MAX_LENGTH = 500 + if len(res_str) > MAX_LENGTH: + res_msg = res_str[:MAX_LENGTH] + f"... (truncated, total length: {len(res_str)} chars)" + else: + res_msg = res_str else: res_msg = str(error) - await task_lock.put_queue( - ActionDeactivateToolkitData( - data={ - "agent_name": toolkit.agent_name, - "process_task_id": process_task.get(""), - "toolkit_name": toolkit_name, - "method_name": method_name, - "message": res_msg, - }, - ) + deactivate_data = ActionDeactivateToolkitData( + data={ + "agent_name": toolkit.agent_name, + "process_task_id": process_task.get(""), + "toolkit_name": toolkit_name, + "method_name": method_name, + "message": res_msg, + }, ) + await task_lock.put_queue(deactivate_data) if error is not None: raise error return res @@ -96,6 +146,11 @@ def listen_toolkit( @wraps(wrap) def sync_wrapper(*args, **kwargs): toolkit: AbstractToolkit = args[0] + # Check if api_task_id exists + if not hasattr(toolkit, 'api_task_id'): + logger.warning(f"[listen_toolkit] {toolkit.__class__.__name__} missing api_task_id, calling method directly") + return func(*args, **kwargs) + task_lock = get_task_lock(toolkit.api_task_id) if inputs is not None: @@ -109,34 +164,34 @@ def listen_toolkit( kwargs_str = ", ".join(f"{k}={v!r}" for k, v in kwargs.items()) args_str = f"{args_str}, {kwargs_str}" if args_str else kwargs_str + # Truncate args_str if too long + MAX_ARGS_LENGTH = 500 + if len(args_str) > MAX_ARGS_LENGTH: + args_str = args_str[:MAX_ARGS_LENGTH] + f"... (truncated, total length: {len(args_str)} chars)" + toolkit_name = toolkit.toolkit_name() method_name = func.__name__.replace("_", " ") - task = asyncio.create_task( - task_lock.put_queue( - ActionActivateToolkitData( - data={ - "agent_name": toolkit.agent_name, - "process_task_id": process_task.get(""), - "toolkit_name": toolkit_name, - "method_name": method_name, - "message": args_str, - }, - ) - ) + activate_data = ActionActivateToolkitData( + data={ + "agent_name": toolkit.agent_name, + "process_task_id": process_task.get(""), + "toolkit_name": toolkit_name, + "method_name": method_name, + "message": args_str, + }, ) - if hasattr(task_lock, "add_background_task"): - task_lock.add_background_task(task) + _safe_put_queue(task_lock, activate_data) error = None res = None try: - logger.debug(f"Executing toolkit method: {toolkit_name}.{method_name} for agent '{toolkit.agent_name}'") res = func(*args, **kwargs) - # Safety check: if the result is a coroutine, we need to await it + # Safety check: if the result is a coroutine, this is a programming error if asyncio.iscoroutine(res): - import warnings - - warnings.warn(f"Async function {func.__name__} was incorrectly called synchronously") - res = asyncio.run(res) + error_msg = f"Async function {func.__name__} was incorrectly called in sync context. This is a bug - the function should be marked as async or should not return a coroutine." + logger.error(f"[listen_toolkit] {error_msg}") + # Cannot safely await in sync context - close the coroutine to prevent warnings + res.close() + raise TypeError(error_msg) except Exception as e: error = e @@ -150,25 +205,26 @@ def listen_toolkit( res_msg = json.dumps(res, ensure_ascii=False) except TypeError: # Handle cases where res contains non-serializable objects (like coroutines) - res_msg = str(res) + res_str = str(res) + # Truncate very long outputs to avoid flooding logs + MAX_LENGTH = 500 + if len(res_str) > MAX_LENGTH: + res_msg = res_str[:MAX_LENGTH] + f"... (truncated, total length: {len(res_str)} chars)" + else: + res_msg = res_str else: res_msg = str(error) - task = asyncio.create_task( - task_lock.put_queue( - ActionDeactivateToolkitData( - data={ - "agent_name": toolkit.agent_name, - "process_task_id": process_task.get(""), - "toolkit_name": toolkit_name, - "method_name": method_name, - "message": res_msg, - }, - ) - ) + deactivate_data = ActionDeactivateToolkitData( + data={ + "agent_name": toolkit.agent_name, + "process_task_id": process_task.get(""), + "toolkit_name": toolkit_name, + "method_name": method_name, + "message": res_msg, + }, ) - if hasattr(task_lock, "add_background_task"): - task_lock.add_background_task(task) + _safe_put_queue(task_lock, deactivate_data) if error is not None: raise error return res @@ -176,3 +232,81 @@ def listen_toolkit( return sync_wrapper return decorator + + +T = TypeVar('T') + +# Methods that should not be wrapped by auto_listen_toolkit +# These are utility/helper methods that don't perform actual tool operations +EXCLUDED_METHODS = { + 'get_tools', # Tool enumeration + 'get_can_use_tools', # Tool filtering + 'toolkit_name', # Metadata getter + 'run_mcp_server', # MCP server initialization + 'model_dump', # Pydantic model serialization + 'model_dump_json', # Pydantic model serialization + 'dict', # Pydantic legacy dict method + 'json', # Pydantic legacy json method + 'copy', # Object copying + 'update', # Object update +} + + +def auto_listen_toolkit(base_toolkit_class: Type[T]) -> Callable[[Type[T]], Type[T]]: + """ + Class decorator that automatically wraps all public methods from the base toolkit + with the @listen_toolkit decorator. + + Excluded methods (not wrapped): + - get_tools, get_can_use_tools: Tool enumeration/filtering + - toolkit_name: Metadata getter + - run_mcp_server: MCP server initialization + - Pydantic serialization methods: model_dump, model_dump_json, dict, json + - Object utility methods: copy, update + + These methods are typically called during initialization or for metadata, + and should not trigger activate/deactivate events. + + Usage: + @auto_listen_toolkit(BaseNoteTakingToolkit) + class NoteTakingToolkit(BaseNoteTakingToolkit, AbstractToolkit): + agent_name: str = Agents.document_agent + """ + def class_decorator(cls: Type[T]) -> Type[T]: + + base_methods = {} + for name in dir(base_toolkit_class): + # Skip private methods and excluded helper methods + if not name.startswith('_') and name not in EXCLUDED_METHODS: + attr = getattr(base_toolkit_class, name) + if callable(attr): + base_methods[name] = attr + + for method_name, base_method in base_methods.items(): + if method_name in cls.__dict__: + continue + + sig = signature(base_method) + + def create_wrapper(method_name: str, base_method: Callable) -> Callable: + if iscoroutinefunction(base_method): + async def async_method_wrapper(self, *args, **kwargs): + return await getattr(super(cls, self), method_name)(*args, **kwargs) + async_method_wrapper.__name__ = method_name + async_method_wrapper.__signature__ = sig + return async_method_wrapper + else: + def sync_method_wrapper(self, *args, **kwargs): + return getattr(super(cls, self), method_name)(*args, **kwargs) + sync_method_wrapper.__name__ = method_name + sync_method_wrapper.__signature__ = sig + return sync_method_wrapper + + wrapper = create_wrapper(method_name, base_method) + decorated_method = listen_toolkit(base_method)(wrapper) + + setattr(cls, method_name, decorated_method) + + return cls + + return class_decorator diff --git a/backend/app/utils/server/sync_step.py b/backend/app/utils/server/sync_step.py index c45714e2f..1f268d3f3 100644 --- a/backend/app/utils/server/sync_step.py +++ b/backend/app/utils/server/sync_step.py @@ -3,9 +3,11 @@ import httpx import asyncio import os import json -from loguru import logger from app.service.chat_service import Chat from app.component.environment import env +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("sync_step") def sync_step(func): @@ -28,7 +30,9 @@ def sync_step(func): send_to_api( sync_url, { - "task_id": chat.task_id, + # TODO: revert to task_id to support multi-task project replay + # "task_id": chat.task_id, + "task_id": chat.project_id, "step": json_data["step"], "data": json_data["data"], }, diff --git a/backend/app/utils/single_agent_worker.py b/backend/app/utils/single_agent_worker.py index e8008e53a..25f81ae62 100644 --- a/backend/app/utils/single_agent_worker.py +++ b/backend/app/utils/single_agent_worker.py @@ -2,11 +2,15 @@ import datetime from camel.agents.chat_agent import AsyncStreamingChatAgentResponse from camel.societies.workforce.single_agent_worker import SingleAgentWorker as BaseSingleAgentWorker from camel.tasks.task import Task, TaskState, is_task_result_insufficient +from utils import traceroot_wrapper as traceroot from app.utils.agent import ListenChatAgent from camel.societies.workforce.prompts import PROCESS_TASK_PROMPT from colorama import Fore from camel.societies.workforce.utils import TaskResult +from camel.utils.context_utils import ContextUtility + +logger = traceroot.get_logger("single_agent_worker") class SingleAgentWorker(BaseSingleAgentWorker): @@ -19,6 +23,8 @@ class SingleAgentWorker(BaseSingleAgentWorker): pool_max_size: int = 10, auto_scale_pool: bool = True, use_structured_output_handler: bool = True, + context_utility: ContextUtility | None = None, + enable_workflow_memory: bool = False, ) -> None: super().__init__( description=description, @@ -28,6 +34,8 @@ class SingleAgentWorker(BaseSingleAgentWorker): pool_max_size=pool_max_size, auto_scale_pool=auto_scale_pool, use_structured_output_handler=use_structured_output_handler, + context_utility=context_utility, + enable_workflow_memory=enable_workflow_memory, ) self.worker = worker # change type hint @@ -54,6 +62,7 @@ class SingleAgentWorker(BaseSingleAgentWorker): worker_agent.process_task_id = task.id # type: ignore rewrite line response_content = "" + final_response = None try: dependency_tasks_info = self._get_dep_tasks_info(dependencies) prompt = PROCESS_TASK_PROMPT.format( @@ -130,8 +139,28 @@ class SingleAgentWorker(BaseSingleAgentWorker): usage_info = response.info.get("usage") or response.info.get("token_usage") total_tokens = usage_info.get("total_tokens", 0) if usage_info else 0 + # collect conversation from working agent to + # accumulator for workflow memory + # Only transfer memory if workflow memory is enabled + if self.enable_workflow_memory: + accumulator = self._get_conversation_accumulator() + + # transfer all memory records from working agent to accumulator + try: + # retrieve all context records from the working agent + work_records = worker_agent.memory.retrieve() + + # write these records to the accumulator's memory + memory_records = [record.memory_record for record in work_records] + accumulator.memory.write_records(memory_records) + + logger.debug(f"Transferred {len(memory_records)} memory records to accumulator") + + except Exception as e: + logger.warning(f"Failed to transfer conversation to accumulator: {e}") + except Exception as e: - print(f"{Fore.RED}Error processing task {task.id}: {type(e).__name__}: {e}{Fore.RESET}") + logger.error(f"Error processing task {task.id}: {type(e).__name__}: {e}") # Store error information in task result task.result = f"{type(e).__name__}: {e!s}" return TaskState.FAILED @@ -144,6 +173,8 @@ class SingleAgentWorker(BaseSingleAgentWorker): task.additional_info = {} # Create worker attempt details with descriptive keys + # Use final_response if available (streaming), otherwise use response + response_for_info = final_response if final_response is not None else response worker_attempt_details = { "agent_id": getattr(worker_agent, "agent_id", worker_agent.role_name), "original_worker_id": getattr(self.worker, "agent_id", self.worker.role_name), @@ -154,11 +185,7 @@ class SingleAgentWorker(BaseSingleAgentWorker): f"{getattr(self.worker, 'agent_id', self.worker.role_name)}) " f"to process task: {task.content}", "response_content": response_content[:50], - "tool_calls": str( - final_response.info.get("tool_calls") - if isinstance(response, AsyncStreamingChatAgentResponse) - else response.info.get("tool_calls") - )[:50], + "tool_calls": str(response_for_info.info.get("tool_calls", []) if response_for_info and hasattr(response_for_info, 'info') else [])[:50], "total_tokens": total_tokens, } @@ -172,9 +199,12 @@ class SingleAgentWorker(BaseSingleAgentWorker): print(f"======\n{Fore.GREEN}Response from {self}:{Fore.RESET}") + logger.info(f"Response from {self}:") + if not self.use_structured_output_handler: # Handle native structured output parsing if task_result is None: + logger.error("Error in worker step execution: Invalid task result") print(f"{Fore.RED}Error in worker step execution: Invalid task result{Fore.RESET}") task_result = TaskResult( content="Failed to generate valid task result.", @@ -186,12 +216,17 @@ class SingleAgentWorker(BaseSingleAgentWorker): f"\n{color}{task_result.content}{Fore.RESET}\n======", # type: ignore[union-attr] ) + if task_result.failed: # type: ignore[union-attr] + logger.error(f"{task_result.content}") # type: ignore[union-attr] + else: + logger.info(f"{task_result.content}") # type: ignore[union-attr] + task.result = task_result.content # type: ignore[union-attr] if task_result.failed: # type: ignore[union-attr] return TaskState.FAILED if is_task_result_insufficient(task): - print(f"{Fore.RED}Task {task.id}: Content validation failed - task marked as failed{Fore.RESET}") + logger.warning(f"Task {task.id}: Content validation failed - task marked as failed") return TaskState.FAILED return TaskState.DONE diff --git a/backend/app/utils/toolkit/audio_analysis_toolkit.py b/backend/app/utils/toolkit/audio_analysis_toolkit.py index ff69d35a5..617110144 100644 --- a/backend/app/utils/toolkit/audio_analysis_toolkit.py +++ b/backend/app/utils/toolkit/audio_analysis_toolkit.py @@ -4,10 +4,11 @@ from camel.toolkits import AudioAnalysisToolkit as BaseAudioAnalysisToolkit from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseAudioAnalysisToolkit) class AudioAnalysisToolkit(BaseAudioAnalysisToolkit, AbstractToolkit): agent_name: str = Agents.multi_modal_agent @@ -23,14 +24,3 @@ class AudioAnalysisToolkit(BaseAudioAnalysisToolkit, AbstractToolkit): cache_dir = env("file_save_path", os.path.expanduser("~/.eigent/tmp/")) super().__init__(cache_dir, transcribe_model, audio_reasoning_model, timeout) self.api_task_id = api_task_id - - @listen_toolkit( - BaseAudioAnalysisToolkit.audio2text, - lambda _, audio_path, question: f"transcribe audio from {audio_path} and ask question: {question}", - ) - def ask_question_about_audio(self, audio_path: str, question: str) -> str: - return super().ask_question_about_audio(audio_path, question) - - @listen_toolkit(BaseAudioAnalysisToolkit.audio2text) - def audio2text(self, audio_path: str) -> str: - return super().audio2text(audio_path) diff --git a/backend/app/utils/toolkit/code_execution_toolkit.py b/backend/app/utils/toolkit/code_execution_toolkit.py index 2e0292a37..4ca394f5b 100644 --- a/backend/app/utils/toolkit/code_execution_toolkit.py +++ b/backend/app/utils/toolkit/code_execution_toolkit.py @@ -1,10 +1,11 @@ from typing import List, Literal from camel.toolkits import CodeExecutionToolkit as BaseCodeExecutionToolkit, FunctionTool from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseCodeExecutionToolkit) class CodeExecutionToolkit(BaseCodeExecutionToolkit, AbstractToolkit): agent_name: str = Agents.developer_agent @@ -21,18 +22,6 @@ class CodeExecutionToolkit(BaseCodeExecutionToolkit, AbstractToolkit): self.api_task_id = api_task_id super().__init__(sandbox, verbose, unsafe_mode, import_white_list, require_confirm, timeout) - @listen_toolkit( - BaseCodeExecutionToolkit.execute_code, - ) - def execute_code(self, code: str, code_type: str = "python") -> str: - return super().execute_code(code, code_type) - - @listen_toolkit( - BaseCodeExecutionToolkit.execute_command, - ) - def execute_command(self, command: str) -> str | tuple[str, str]: - return super().execute_command(command) - def get_tools(self) -> List[FunctionTool]: return [ FunctionTool(self.execute_code), diff --git a/backend/app/utils/toolkit/craw4ai_toolkit.py b/backend/app/utils/toolkit/craw4ai_toolkit.py index a422465dd..65d1b51c7 100644 --- a/backend/app/utils/toolkit/craw4ai_toolkit.py +++ b/backend/app/utils/toolkit/craw4ai_toolkit.py @@ -1,10 +1,11 @@ from camel.toolkits import Crawl4AIToolkit as BaseCrawl4AIToolkit from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseCrawl4AIToolkit) class Crawl4AIToolkit(BaseCrawl4AIToolkit, AbstractToolkit): agent_name: str = Agents.search_agent @@ -12,18 +13,5 @@ class Crawl4AIToolkit(BaseCrawl4AIToolkit, AbstractToolkit): self.api_task_id = api_task_id super().__init__(timeout) - # async def _get_client(self): - # r"""Get or create the AsyncWebCrawler client.""" - # if self._client is None: - # from crawl4ai import AsyncWebCrawler - - # self._client = AsyncWebCrawler(use_managed_browser=True) - # await self._client.__aenter__() - # return self._client - - @listen_toolkit(BaseCrawl4AIToolkit.scrape) - async def scrape(self, url: str) -> str: - return await super().scrape(url) - def toolkit_name(self) -> str: return "Crawl Toolkit" diff --git a/backend/app/utils/toolkit/excel_toolkit.py b/backend/app/utils/toolkit/excel_toolkit.py index 9e73ebad0..4a9ee0c97 100644 --- a/backend/app/utils/toolkit/excel_toolkit.py +++ b/backend/app/utils/toolkit/excel_toolkit.py @@ -3,10 +3,11 @@ from camel.toolkits import ExcelToolkit as BaseExcelToolkit from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseExcelToolkit) class ExcelToolkit(BaseExcelToolkit, AbstractToolkit): agent_name: str = Agents.document_agent @@ -20,7 +21,3 @@ class ExcelToolkit(BaseExcelToolkit, AbstractToolkit): if working_directory is None: working_directory = env("file_save_path", os.path.expanduser("~/Downloads")) super().__init__(timeout=timeout, working_directory=working_directory) - - @listen_toolkit(BaseExcelToolkit.extract_excel_content) - def extract_excel_content(self, document_path: str) -> str: - return super().extract_excel_content(document_path) diff --git a/backend/app/utils/toolkit/file_write_toolkit.py b/backend/app/utils/toolkit/file_write_toolkit.py index 02b71d018..210ffc9c3 100644 --- a/backend/app/utils/toolkit/file_write_toolkit.py +++ b/backend/app/utils/toolkit/file_write_toolkit.py @@ -5,10 +5,11 @@ from camel.toolkits import FileToolkit as BaseFileToolkit from app.component.environment import env from app.service.task import process_task from app.service.task import ActionWriteFileData, Agents, get_task_lock -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit, listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseFileToolkit) class FileToolkit(BaseFileToolkit, AbstractToolkit): agent_name: str = Agents.document_agent @@ -54,15 +55,3 @@ class FileToolkit(BaseFileToolkit, AbstractToolkit): ) ) return res - - @listen_toolkit( - BaseFileToolkit.read_file, - ) - def read_file(self, file_paths: str | list[str]) -> str | dict[str, str]: - return super().read_file(file_paths) - - @listen_toolkit( - BaseFileToolkit.edit_file, - ) - def edit_file(self, file_path: str, old_content: str, new_content: str) -> str: - return super().edit_file(file_path, old_content, new_content) diff --git a/backend/app/utils/toolkit/github_toolkit.py b/backend/app/utils/toolkit/github_toolkit.py index 87ba2ed16..e5204e294 100644 --- a/backend/app/utils/toolkit/github_toolkit.py +++ b/backend/app/utils/toolkit/github_toolkit.py @@ -3,10 +3,11 @@ from camel.toolkits import GithubToolkit as BaseGithubToolkit from camel.toolkits.function_tool import FunctionTool from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseGithubToolkit) class GithubToolkit(BaseGithubToolkit, AbstractToolkit): agent_name: str = Agents.developer_agent @@ -19,86 +20,6 @@ class GithubToolkit(BaseGithubToolkit, AbstractToolkit): super().__init__(access_token, timeout) self.api_task_id = api_task_id - @listen_toolkit( - BaseGithubToolkit.create_pull_request, - lambda _, - repo_name, - file_path, - new_content, - pr_title, - body, - branch_name: f"Create PR in {repo_name} for {file_path} with title '{pr_title}', branch '{branch_name}', content '{new_content}'", - ) - def create_pull_request( - self, - repo_name: str, - file_path: str, - new_content: str, - pr_title: str, - body: str, - branch_name: str, - ) -> str: - return super().create_pull_request(repo_name, file_path, new_content, pr_title, body, branch_name) - - @listen_toolkit( - BaseGithubToolkit.get_issue_list, - lambda _, repo_name, state="all": f"Get issue list from {repo_name} with state '{state}'", - lambda issues: f"Retrieved {len(issues)} issues", - ) - def get_issue_list( - self, repo_name: str, state: Literal["open", "closed", "all"] = "all" - ) -> list[dict[str, object]]: - return super().get_issue_list(repo_name, state) - - @listen_toolkit( - BaseGithubToolkit.get_issue_content, - lambda _, repo_name, issue_number: f"Get content of issue {issue_number} from {repo_name}", - ) - def get_issue_content(self, repo_name: str, issue_number: int) -> str: - return super().get_issue_content(repo_name, issue_number) - - @listen_toolkit( - BaseGithubToolkit.get_pull_request_list, - lambda _, repo_name, state="all": f"Get pull request list from {repo_name} with state '{state}'", - lambda prs: f"Retrieved {len(prs)} pull requests", - ) - def get_pull_request_list( - self, repo_name: str, state: Literal["open", "closed", "all"] = "all" - ) -> list[dict[str, object]]: - return super().get_pull_request_list(repo_name, state) - - @listen_toolkit( - BaseGithubToolkit.get_pull_request_code, - lambda _, repo_name, pr_number: f"Get code for pull request {pr_number} in {repo_name}", - lambda code: f"Retrieved {len(code)} code files", - ) - def get_pull_request_code(self, repo_name: str, pr_number: int) -> list[dict[str, str]]: - return super().get_pull_request_code(repo_name, pr_number) - - @listen_toolkit( - BaseGithubToolkit.get_pull_request_comments, - lambda _, repo_name, pr_number: f"Get comments for pull request {pr_number} in {repo_name}", - lambda comments: f"Retrieved {len(comments)} comments", - ) - def get_pull_request_comments(self, repo_name: str, pr_number: int) -> list[dict[str, str]]: - return super().get_pull_request_comments(repo_name, pr_number) - - @listen_toolkit( - BaseGithubToolkit.get_all_file_paths, - lambda _, repo_name, path="": f"Get all file paths from {repo_name}, path '{path}'", - lambda paths: f"Retrieved {len(paths)} file paths", - ) - def get_all_file_paths(self, repo_name: str, path: str = "") -> list[str]: - return super().get_all_file_paths(repo_name, path) - - @listen_toolkit( - BaseGithubToolkit.retrieve_file_content, - lambda _, repo_name, file_path: f"Retrieve content of file {file_path} from {repo_name}", - lambda content: f"Retrieved content of length {len(content)}", - ) - def retrieve_file_content(self, repo_name: str, file_path: str) -> str: - return super().retrieve_file_content(repo_name, file_path) - @classmethod def get_can_use_tools(cls, api_task_id: str) -> list[FunctionTool]: if env("GITHUB_ACCESS_TOKEN"): diff --git a/backend/app/utils/toolkit/google_calendar_toolkit.py b/backend/app/utils/toolkit/google_calendar_toolkit.py index fb7e77780..67bd4a7f9 100644 --- a/backend/app/utils/toolkit/google_calendar_toolkit.py +++ b/backend/app/utils/toolkit/google_calendar_toolkit.py @@ -4,7 +4,7 @@ import threading from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit from app.utils.oauth_state_manager import oauth_state_manager from camel.toolkits import GoogleCalendarToolkit as BaseGoogleCalendarToolkit @@ -12,6 +12,7 @@ from loguru import logger SCOPES = ['https://www.googleapis.com/auth/calendar'] +@auto_listen_toolkit(BaseGoogleCalendarToolkit) class GoogleCalendarToolkit(BaseGoogleCalendarToolkit, AbstractToolkit): agent_name: str = Agents.social_medium_agent @@ -29,50 +30,6 @@ class GoogleCalendarToolkit(BaseGoogleCalendarToolkit, AbstractToolkit): ) super().__init__(timeout) - @listen_toolkit(BaseGoogleCalendarToolkit.create_event) - def create_event( - self, - event_title: str, - start_time: str, - end_time: str, - description: str = "", - location: str = "", - attendees_email: List[str] | None = None, - timezone: str = "UTC", - ) -> Dict[str, Any]: - return super().create_event( - event_title, start_time, end_time, description, location, attendees_email, timezone - ) - - @listen_toolkit(BaseGoogleCalendarToolkit.get_events) - def get_events( - self, max_results: int = 10, time_min: str | None = None - ) -> List[Dict[str, Any]] | Dict[str, Any]: - return super().get_events(max_results, time_min) - - @listen_toolkit(BaseGoogleCalendarToolkit.update_event) - def update_event( - self, - event_id: str, - event_title: str | None = None, - start_time: str | None = None, - end_time: str | None = None, - description: str | None = None, - location: str | None = None, - attendees_email: List[str] | None = None, - ) -> Dict[str, Any]: - return super().update_event( - event_id, event_title, start_time, end_time, description, location, attendees_email - ) - - @listen_toolkit(BaseGoogleCalendarToolkit.delete_event) - def delete_event(self, event_id: str) -> str: - return super().delete_event(event_id) - - @listen_toolkit(BaseGoogleCalendarToolkit.get_calendar_details) - def get_calendar_details(self) -> Dict[str, Any]: - return super().get_calendar_details() - @classmethod def get_can_use_tools(cls, api_task_id: str): if env("GOOGLE_CLIENT_ID") and env("GOOGLE_CLIENT_SECRET"): diff --git a/backend/app/utils/toolkit/human_toolkit.py b/backend/app/utils/toolkit/human_toolkit.py index ba616b5c3..edd43a988 100644 --- a/backend/app/utils/toolkit/human_toolkit.py +++ b/backend/app/utils/toolkit/human_toolkit.py @@ -1,14 +1,16 @@ import asyncio from camel.toolkits.base import BaseToolkit -from loguru import logger from camel.toolkits.function_tool import FunctionTool from app.service.task import Action, ActionAskData, ActionNoticeData, get_task_lock -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit, listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit from app.service.task import process_task -# Rewrite HumanToolkit because the system's user interaction was using console, but in electron we cannot use console. Changed to use SSE response to let frontend show dialog for user interaction +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("human_toolkit") +@auto_listen_toolkit(BaseToolkit) class HumanToolkit(BaseToolkit, AbstractToolkit): r"""A class representing a toolkit for human interaction. Note: diff --git a/backend/app/utils/toolkit/hybrid_browser_python_toolkit.py b/backend/app/utils/toolkit/hybrid_browser_python_toolkit.py index 911a6dd87..255b13223 100644 --- a/backend/app/utils/toolkit/hybrid_browser_python_toolkit.py +++ b/backend/app/utils/toolkit/hybrid_browser_python_toolkit.py @@ -12,12 +12,14 @@ from camel.toolkits.hybrid_browser_toolkit_py.actions import ActionExecutor from camel.toolkits.hybrid_browser_toolkit_py.snapshot import PageSnapshot from camel.toolkits.hybrid_browser_toolkit_py.agent import PlaywrightLLMAgent from camel.toolkits.function_tool import FunctionTool -from loguru import logger from app.component.environment import env from app.exception.exception import ProgramException from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit, listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("hybrid_browser_python_toolkit") class BrowserSession(BaseHybridBrowserSession): @@ -124,6 +126,7 @@ class BrowserSession(BaseHybridBrowserSession): break +@auto_listen_toolkit(BaseHybridBrowserToolkit) class HybridBrowserPythonToolkit(BaseHybridBrowserToolkit, AbstractToolkit): agent_name: str = Agents.search_agent @@ -224,14 +227,6 @@ class HybridBrowserPythonToolkit(BaseHybridBrowserToolkit, AbstractToolkit): self._agent: PlaywrightLLMAgent | None = None self._unified_script = self._load_unified_analyzer() - @listen_toolkit(BaseHybridBrowserToolkit.browser_open) - async def browser_open(self) -> Dict[str, str]: - return await super().browser_open() - - @listen_toolkit(BaseHybridBrowserToolkit.browser_close) - async def browser_close(self) -> str: - return await super().browser_close() - @listen_toolkit(BaseHybridBrowserToolkit.browser_visit_page, lambda _, url: url) async def browser_visit_page(self, url: str) -> Dict[str, Any]: r"""Navigates to a URL. @@ -282,66 +277,6 @@ class HybridBrowserPythonToolkit(BaseHybridBrowserToolkit, AbstractToolkit): return {"result": nav_result, "snapshot": snapshot, **tab_info} - @listen_toolkit(BaseHybridBrowserToolkit.browser_back) - async def browser_back(self) -> Dict[str, Any]: - return await super().browser_back() - - @listen_toolkit(BaseHybridBrowserToolkit.browser_forward) - async def browser_forward(self) -> Dict[str, Any]: - return await super().browser_forward() - - @listen_toolkit(BaseHybridBrowserToolkit.browser_click) - async def browser_click(self, *, ref: str) -> Dict[str, Any]: - return await super().browser_click(ref=ref) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_type) - async def browser_type(self, *, ref: str, text: str) -> Dict[str, Any]: - return await super().browser_type(ref=ref, text=text) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_switch_tab) - async def browser_switch_tab(self, *, tab_id: str) -> Dict[str, Any]: - return await super().browser_switch_tab(tab_id=tab_id) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_select) - async def browser_select(self, *, ref: str, value: str) -> Dict[str, str]: - return await super().browser_select(ref=ref, value=value) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_scroll) - async def browser_scroll(self, *, direction: str, amount: int) -> Dict[str, str]: - return await super().browser_scroll(direction=direction, amount=amount) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_wait_user) - async def browser_wait_user(self, timeout_sec: float | None = None) -> Dict[str, str]: - return await super().browser_wait_user(timeout_sec) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_enter) - async def browser_enter(self) -> Dict[str, str]: - return await super().browser_enter() - - @listen_toolkit(BaseHybridBrowserToolkit.browser_solve_task) - async def browser_solve_task(self, task_prompt: str, start_url: str, max_steps: int = 15) -> str: - return await super().browser_solve_task(task_prompt, start_url, max_steps) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_get_page_snapshot) - async def browser_get_page_snapshot(self) -> str: - return await super().browser_get_page_snapshot() - - @listen_toolkit(BaseHybridBrowserToolkit.browser_get_som_screenshot) - async def browser_get_som_screenshot(self): - return await super().browser_get_som_screenshot() - - @listen_toolkit(BaseHybridBrowserToolkit.browser_get_page_links) - async def browser_get_page_links(self, *, ref: List[str]) -> Dict[str, Any]: - return await super().browser_get_page_links(ref=ref) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_close_tab) - async def browser_close_tab(self, *, tab_id: str) -> Dict[str, Any]: - return await super().browser_close_tab(tab_id=tab_id) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_get_tab_info) - async def browser_get_tab_info(self) -> Dict[str, Any]: - return await super().browser_get_tab_info() - @classmethod def get_can_use_tools(cls, api_task_id: str) -> list[FunctionTool]: browser = HybridBrowserPythonToolkit( diff --git a/backend/app/utils/toolkit/hybrid_browser_toolkit.py b/backend/app/utils/toolkit/hybrid_browser_toolkit.py index adf6c3d0c..ff03e0925 100644 --- a/backend/app/utils/toolkit/hybrid_browser_toolkit.py +++ b/backend/app/utils/toolkit/hybrid_browser_toolkit.py @@ -4,7 +4,6 @@ import time import asyncio import json from typing import Any, Dict, List, Optional -from loguru import logger import websockets import websockets.exceptions @@ -16,8 +15,11 @@ from camel.toolkits.hybrid_browser_toolkit.ws_wrapper import WebSocketBrowserWra from app.component.command import bun, uv from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("hybrid_browser_toolkit") class WebSocketBrowserWrapper(BaseWebSocketBrowserWrapper): @@ -45,8 +47,13 @@ class WebSocketBrowserWrapper(BaseWebSocketBrowserWrapper): future.set_result(response) logger.debug(f"Processed response for message {message_id}") else: - # Log unexpected messages - logger.warning(f"Received unexpected message: {response}") + message_summary = { + "id": response.get("id"), + "success": response.get("success"), + "has_result": "result" in response, + "result_type": type(response.get("result")).__name__ if "result" in response else None + } + logger.debug(f"Received unexpected message: {message_summary}") except asyncio.CancelledError: disconnect_reason = "Receive loop cancelled" @@ -210,6 +217,7 @@ class WebSocketConnectionPool: websocket_connection_pool = WebSocketConnectionPool() +@auto_listen_toolkit(BaseHybridBrowserToolkit) class HybridBrowserToolkit(BaseHybridBrowserToolkit, AbstractToolkit): agent_name: str = Agents.search_agent @@ -240,7 +248,10 @@ class HybridBrowserToolkit(BaseHybridBrowserToolkit, AbstractToolkit): cdp_keep_current_page: bool = False, full_visual_mode: bool = False, ) -> None: + logger.info(f"[HybridBrowserToolkit] Initializing with api_task_id: {api_task_id}") self.api_task_id = api_task_id + logger.debug(f"[HybridBrowserToolkit] api_task_id set to: {self.api_task_id}") + logger.debug(f"[HybridBrowserToolkit] Calling super().__init__ with session_id: {session_id}") super().__init__( headless=headless, user_data_dir=user_data_dir, @@ -264,16 +275,20 @@ class HybridBrowserToolkit(BaseHybridBrowserToolkit, AbstractToolkit): cdp_keep_current_page=cdp_keep_current_page, full_visual_mode=full_visual_mode, ) + logger.info(f"[HybridBrowserToolkit] Initialization complete for api_task_id: {self.api_task_id}") async def _ensure_ws_wrapper(self): """Ensure WebSocket wrapper is initialized using connection pool.""" + logger.debug(f"[HybridBrowserToolkit] _ensure_ws_wrapper called for api_task_id: {getattr(self, 'api_task_id', 'NOT SET')}") global websocket_connection_pool # Get session ID from config or use default session_id = self._ws_config.get("session_id", "default") + logger.debug(f"[HybridBrowserToolkit] Using session_id: {session_id}") # Get or create connection from pool self._ws_wrapper = await websocket_connection_pool.get_connection(session_id, self._ws_config) + logger.info(f"[HybridBrowserToolkit] WebSocket wrapper initialized for session: {session_id}") # Additional health check if self._ws_wrapper.websocket is None: @@ -336,74 +351,3 @@ class HybridBrowserToolkit(BaseHybridBrowserToolkit, AbstractToolkit): if hasattr(self, "_ws_wrapper") and self._ws_wrapper: session_id = self._ws_config.get("session_id", "default") logger.debug(f"HybridBrowserToolkit for session {session_id} is being garbage collected") - - @listen_toolkit(BaseHybridBrowserToolkit.browser_open) - async def browser_open(self) -> Dict[str, Any]: - return await super().browser_open() - - @listen_toolkit(BaseHybridBrowserToolkit.browser_close) - async def browser_close(self) -> str: - return await super().browser_close() - - @listen_toolkit(BaseHybridBrowserToolkit.browser_visit_page) - async def browser_visit_page(self, url: str) -> Dict[str, Any]: - logger.debug(f"browser_visit_page called with URL: {url}") - try: - result = await super().browser_visit_page(url) - logger.debug(f"browser_visit_page succeeded for URL: {url}") - return result - except Exception as e: - logger.error(f"browser_visit_page failed for URL {url}: {type(e).__name__}: {e}") - raise - - @listen_toolkit(BaseHybridBrowserToolkit.browser_back) - async def browser_back(self) -> Dict[str, Any]: - return await super().browser_back() - - @listen_toolkit(BaseHybridBrowserToolkit.browser_forward) - async def browser_forward(self) -> Dict[str, Any]: - return await super().browser_forward() - - @listen_toolkit(BaseHybridBrowserToolkit.browser_get_page_snapshot) - async def browser_get_page_snapshot(self) -> str: - return await super().browser_get_page_snapshot() - - @listen_toolkit(BaseHybridBrowserToolkit.browser_get_som_screenshot) - async def browser_get_som_screenshot(self, read_image: bool = False, instruction: str | None = None) -> str: - return await super().browser_get_som_screenshot(read_image, instruction) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_click) - async def browser_click(self, *, ref: str) -> Dict[str, Any]: - return await super().browser_click(ref=ref) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_type) - async def browser_type(self, *, ref: str, text: str) -> Dict[str, Any]: - return await super().browser_type(ref=ref, text=text) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_select) - async def browser_select(self, *, ref: str, value: str) -> Dict[str, Any]: - return await super().browser_select(ref=ref, value=value) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_scroll) - async def browser_scroll(self, *, direction: str, amount: int = 500) -> Dict[str, Any]: - return await super().browser_scroll(direction=direction, amount=amount) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_enter) - async def browser_enter(self) -> Dict[str, Any]: - return await super().browser_enter() - - @listen_toolkit(BaseHybridBrowserToolkit.browser_wait_user) - async def browser_wait_user(self, timeout_sec: float | None = None) -> Dict[str, Any]: - return await super().browser_wait_user(timeout_sec) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_switch_tab) - async def browser_switch_tab(self, *, tab_id: str) -> Dict[str, Any]: - return await super().browser_switch_tab(tab_id=tab_id) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_close_tab) - async def browser_close_tab(self, *, tab_id: str) -> Dict[str, Any]: - return await super().browser_close_tab(tab_id=tab_id) - - @listen_toolkit(BaseHybridBrowserToolkit.browser_get_tab_info) - async def browser_get_tab_info(self) -> Dict[str, Any]: - return await super().browser_get_tab_info() diff --git a/backend/app/utils/toolkit/image_analysis_toolkit.py b/backend/app/utils/toolkit/image_analysis_toolkit.py index b325d904e..609f3045f 100644 --- a/backend/app/utils/toolkit/image_analysis_toolkit.py +++ b/backend/app/utils/toolkit/image_analysis_toolkit.py @@ -2,10 +2,11 @@ from camel.models import BaseModelBackend from camel.toolkits import ImageAnalysisToolkit as BaseImageAnalysisToolkit from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseImageAnalysisToolkit) class ImageAnalysisToolkit(BaseImageAnalysisToolkit, AbstractToolkit): agent_name: str = Agents.multi_modal_agent @@ -17,24 +18,3 @@ class ImageAnalysisToolkit(BaseImageAnalysisToolkit, AbstractToolkit): ): super().__init__(model, timeout) self.api_task_id = api_task_id - - @listen_toolkit( - BaseImageAnalysisToolkit.image_to_text, - lambda _, - image_path, - sys_prompt: f"transcribe image from {image_path} and ask sys_prompt: {sys_prompt}", - ) - def image_to_text(self, image_path: str, sys_prompt: str | None = None) -> str: - return super().image_to_text(image_path, sys_prompt) - - @listen_toolkit( - BaseImageAnalysisToolkit.ask_question_about_image, - lambda _, - image_path, - question, - sys_prompt: f"transcribe image from {image_path} and ask question: {question} with sys_prompt: {sys_prompt}", - ) - def ask_question_about_image( - self, image_path: str, question: str, sys_prompt: str | None = None - ) -> str: - return super().ask_question_about_image(image_path, question, sys_prompt) diff --git a/backend/app/utils/toolkit/linkedin_toolkit.py b/backend/app/utils/toolkit/linkedin_toolkit.py index 9b30392c0..60c39d111 100644 --- a/backend/app/utils/toolkit/linkedin_toolkit.py +++ b/backend/app/utils/toolkit/linkedin_toolkit.py @@ -2,10 +2,11 @@ from camel.toolkits import LinkedInToolkit as BaseLinkedInToolkit from camel.toolkits.function_tool import FunctionTool from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseLinkedInToolkit) class LinkedInToolkit(BaseLinkedInToolkit, AbstractToolkit): agent_name: str = Agents.social_medium_agent @@ -13,27 +14,6 @@ class LinkedInToolkit(BaseLinkedInToolkit, AbstractToolkit): super().__init__(timeout) self.api_task_id = api_task_id - @listen_toolkit( - BaseLinkedInToolkit.create_post, - lambda _, text: f"create a LinkedIn post with text: {text}", - ) - def create_post(self, text: str) -> dict: - return super().create_post(text) - - @listen_toolkit( - BaseLinkedInToolkit.delete_post, - lambda _, post_id: f"delete LinkedIn post with id: {post_id}", - ) - def delete_post(self, post_id: str) -> str: - return super().delete_post(post_id) - - @listen_toolkit( - BaseLinkedInToolkit.get_profile, - lambda _, include_id: f"get LinkedIn profile with include_id: {include_id}", - ) - def get_profile(self, include_id: bool = False) -> dict: - return super().get_profile(include_id) - @classmethod def get_can_use_tools(cls, api_task_id: str) -> list[FunctionTool]: if env("LINKEDIN_ACCESS_TOKEN"): diff --git a/backend/app/utils/toolkit/markitdown_toolkit.py b/backend/app/utils/toolkit/markitdown_toolkit.py index 1c1cc3528..6ac09c4ea 100644 --- a/backend/app/utils/toolkit/markitdown_toolkit.py +++ b/backend/app/utils/toolkit/markitdown_toolkit.py @@ -2,17 +2,14 @@ from typing import Dict, List from camel.toolkits import MarkItDownToolkit as BaseMarkItDownToolkit from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseMarkItDownToolkit) class MarkItDownToolkit(BaseMarkItDownToolkit, AbstractToolkit): agent_name: str = Agents.document_agent def __init__(self, api_task_id: str, timeout: float | None = None): self.api_task_id = api_task_id super().__init__(timeout) - - @listen_toolkit(BaseMarkItDownToolkit.read_files) - def read_files(self, file_paths: List[str]) -> Dict[str, str]: - return super().read_files(file_paths) diff --git a/backend/app/utils/toolkit/note_taking_toolkit.py b/backend/app/utils/toolkit/note_taking_toolkit.py index cd762bde9..5f6c2af33 100644 --- a/backend/app/utils/toolkit/note_taking_toolkit.py +++ b/backend/app/utils/toolkit/note_taking_toolkit.py @@ -5,10 +5,11 @@ from typing import Optional from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseNoteTakingToolkit) class NoteTakingToolkit(BaseNoteTakingToolkit, AbstractToolkit): agent_name: str = Agents.document_agent @@ -25,19 +26,3 @@ class NoteTakingToolkit(BaseNoteTakingToolkit, AbstractToolkit): if working_directory is None: working_directory = env("file_save_path", os.path.expanduser("~/.eigent/notes")) + "/note.md" super().__init__(working_directory=working_directory, timeout=timeout) - - @listen_toolkit(BaseNoteTakingToolkit.append_note) - def append_note(self, note_name: str, content: str) -> str: - return super().append_note(note_name=note_name, content=content) - - @listen_toolkit(BaseNoteTakingToolkit.read_note) - def read_note(self, note_name: Optional[str] = "all_notes") -> str: - return super().read_note(note_name=note_name) - - @listen_toolkit(BaseNoteTakingToolkit.create_note) - def create_note(self, note_name: str, content: str, overwrite: bool = False) -> str: - return super().create_note(note_name=note_name, content=content, overwrite=overwrite) - - @listen_toolkit(BaseNoteTakingToolkit.list_note) - def list_note(self) -> str: - return super().list_note() diff --git a/backend/app/utils/toolkit/notion_mcp_toolkit.py b/backend/app/utils/toolkit/notion_mcp_toolkit.py index 36928aa0a..ab9d7b053 100644 --- a/backend/app/utils/toolkit/notion_mcp_toolkit.py +++ b/backend/app/utils/toolkit/notion_mcp_toolkit.py @@ -1,11 +1,32 @@ import os from typing import Any, Dict, List -from loguru import logger from camel.toolkits import FunctionTool from app.component.environment import env from app.utils.toolkit.abstract_toolkit import AbstractToolkit from camel.toolkits.mcp_toolkit import MCPToolkit +from utils import traceroot_wrapper as traceroot +logger = traceroot.get_logger("notion_mcp_toolkit") + +def _customize_function_parameters(schema: Dict[str, Any]) -> None: + r"""Customize function parameters for specific functions. + + This method allows modifying parameter descriptions or other schema + attributes for specific functions. + """ + function_info = schema.get("function", {}) + function_name = function_info.get("name", "") + parameters = function_info.get("parameters", {}) + properties = parameters.get("properties", {}) + required = parameters.get("required", []) + + # Modify the notion-create-pages function to make parent optional + if function_name == "notion-create-pages": + required.remove("parent") + parameters["required"] = required + if "parent" in properties: + # Update the parent parameter description + properties["parent"]["description"] = "Optional. " + properties["parent"]["description"] class NotionMCPToolkit(MCPToolkit, AbstractToolkit): @@ -33,68 +54,7 @@ class NotionMCPToolkit(MCPToolkit, AbstractToolkit): } } } - super().__init__(config_dict=config_dict, timeout=timeout) - - def get_tools(self) -> List[FunctionTool]: - r"""Returns a list of tools provided by the NotionMCPToolkit. - - Returns: - List[FunctionTool]: List of available tools. - """ - all_tools = [] - for client in self.clients: - try: - original_build_schema = client._build_tool_schema - - def create_wrapper(orig_func): - def wrapper(mcp_tool): - return self._build_custom_tool_schema( - mcp_tool, orig_func - ) - - return wrapper - - client._build_tool_schema = create_wrapper( # type: ignore[method-assign] - original_build_schema - ) - - client_tools = client.get_tools() - all_tools.extend(client_tools) - - client._build_tool_schema = original_build_schema # type: ignore[method-assign] - - except Exception as e: - logger.error(f"Failed to get tools from client: {e}") - return all_tools - - def _build_custom_tool_schema(self, mcp_tool, original_build_schema): - r"""Build tool schema with custom modifications.""" - schema = original_build_schema(mcp_tool) - self._customize_function_parameters(schema) - return schema - - def _customize_function_parameters(self, schema: Dict[str, Any]) -> None: - r"""Customize function parameters for specific functions. - - This method allows modifying parameter descriptions or other schema - attributes for specific functions. - """ - function_info = schema.get("function", {}) - function_name = function_info.get("name", "") - parameters = function_info.get("parameters", {}) - properties = parameters.get("properties", {}) - - # Modify the notion-create-pages function to make parent optional - if function_name == "notion-create-pages": - if "parent" in properties: - # Update the parent parameter description - properties["parent"]["description"] = ( - "Optional. The parent under which the new pages will be created. " - "This can be a page (page_id), a database page (database_id), or " - "a data source/collection under a database (data_source_id). " - "If omitted, the new pages will be created as private pages at the workspace level. " - "Use data_source_id when you have a collection:// URL from the fetch tool." - ) + super().__init__(config_dict=config_dict, timeout=timeout) @classmethod async def get_can_use_tools(cls, api_task_id: str) -> list[FunctionTool]: @@ -104,6 +64,12 @@ class NotionMCPToolkit(MCPToolkit, AbstractToolkit): await toolkit.connect() # Use subclass implementation that inlines upstream processing all_tools = toolkit.get_tools() + tool_schema = [ + item.get_openai_tool_schema() for item in all_tools + ] + #adjust tool schema + for item in tool_schema: + _customize_function_parameters(item) for item in all_tools: setattr(item, "_toolkit_name", cls.__name__) tools.append(item) diff --git a/backend/app/utils/toolkit/notion_toolkit.py b/backend/app/utils/toolkit/notion_toolkit.py index 25e6ac9aa..bdc74d439 100644 --- a/backend/app/utils/toolkit/notion_toolkit.py +++ b/backend/app/utils/toolkit/notion_toolkit.py @@ -3,10 +3,11 @@ from camel.toolkits import NotionToolkit as BaseNotionToolkit from camel.toolkits.function_tool import FunctionTool from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseNotionToolkit) class NotionToolkit(BaseNotionToolkit, AbstractToolkit): agent_name: str = Agents.document_agent @@ -19,29 +20,6 @@ class NotionToolkit(BaseNotionToolkit, AbstractToolkit): super().__init__(notion_token, timeout) self.api_task_id = api_task_id - @listen_toolkit( - BaseNotionToolkit.list_all_pages, - lambda _: "list all pages in Notion workspace", - lambda result: f"{len(result)} pages found", - ) - def list_all_pages(self) -> List[dict]: - return super().list_all_pages() - - @listen_toolkit( - BaseNotionToolkit.list_all_users, - lambda _: "list all users in Notion workspace", - lambda result: f"{len(result)} users found", - ) - def list_all_users(self) -> List[dict]: - return super().list_all_users() - - @listen_toolkit( - BaseNotionToolkit.get_notion_block_text_content, - lambda _, page_id: f"get text content of page with id: {page_id}", - ) - def get_notion_block_text_content(self, block_id: str) -> str: - return super().get_notion_block_text_content(block_id) - @classmethod def get_can_use_tools(cls, api_task_id: str) -> List[FunctionTool]: if env("NOTION_TOKEN"): diff --git a/backend/app/utils/toolkit/openai_image_toolkit.py b/backend/app/utils/toolkit/openai_image_toolkit.py index f76b0a234..e6d9c3d98 100644 --- a/backend/app/utils/toolkit/openai_image_toolkit.py +++ b/backend/app/utils/toolkit/openai_image_toolkit.py @@ -3,11 +3,12 @@ from camel.toolkits import OpenAIImageToolkit as BaseOpenAIImageToolkit from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit, listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit from typing import Literal, Optional, Union, List +@auto_listen_toolkit(BaseOpenAIImageToolkit) class OpenAIImageToolkit(BaseOpenAIImageToolkit, AbstractToolkit): agent_name: str = Agents.multi_modal_agent diff --git a/backend/app/utils/toolkit/pptx_toolkit.py b/backend/app/utils/toolkit/pptx_toolkit.py index 6a06d776e..0e341d246 100644 --- a/backend/app/utils/toolkit/pptx_toolkit.py +++ b/backend/app/utils/toolkit/pptx_toolkit.py @@ -4,11 +4,12 @@ from camel.toolkits import PPTXToolkit as BasePPTXToolkit from app.component.environment import env from app.service.task import ActionWriteFileData, Agents, get_task_lock -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit, listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit from app.service.task import process_task +@auto_listen_toolkit(BasePPTXToolkit) class PPTXToolkit(BasePPTXToolkit, AbstractToolkit): agent_name: str = Agents.document_agent diff --git a/backend/app/utils/toolkit/pyautogui_toolkit.py b/backend/app/utils/toolkit/pyautogui_toolkit.py index e6d26a72e..8cf3d0c50 100644 --- a/backend/app/utils/toolkit/pyautogui_toolkit.py +++ b/backend/app/utils/toolkit/pyautogui_toolkit.py @@ -4,10 +4,11 @@ from camel.toolkits import PyAutoGUIToolkit as BasePyAutoGUIToolkit from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BasePyAutoGUIToolkit) class PyAutoGUIToolkit(BasePyAutoGUIToolkit, AbstractToolkit): agent_name: str = Agents.search_agent @@ -21,69 +22,3 @@ class PyAutoGUIToolkit(BasePyAutoGUIToolkit, AbstractToolkit): screenshots_dir = env("file_save_path", os.path.expanduser("~/Downloads")) super().__init__(timeout, screenshots_dir) self.api_task_id = api_task_id - - @listen_toolkit(BasePyAutoGUIToolkit.mouse_move, lambda _, x, y: f"mouse move to {x}, {y}") - def mouse_move(self, x: int, y: int) -> str: - return super().mouse_move(x, y) - - @listen_toolkit( - BasePyAutoGUIToolkit.mouse_click, - lambda _, button="left", clicks=1, x=None, y=None: f"mouse click {button} {clicks} times at {x}, {y}", - ) - def mouse_click( - self, - button: Literal["left", "middle", "right"] = "left", - clicks: int = 1, - x: int | None = None, - y: int | None = None, - ) -> str: - return super().mouse_click(button, clicks, x, y) - - @listen_toolkit( - BasePyAutoGUIToolkit.keyboard_type, - lambda _, text, interval=0: f"keyboard type {text}, interval {interval}", - ) - def keyboard_type(self, text: str, interval: float = 0) -> str: - return super().keyboard_type(text, interval) - - @listen_toolkit(BasePyAutoGUIToolkit.take_screenshot) - def take_screenshot(self) -> str: - return super().take_screenshot() - - @listen_toolkit(BasePyAutoGUIToolkit.get_mouse_position) - def get_mouse_position(self) -> str: - return super().get_mouse_position() - - @listen_toolkit(BasePyAutoGUIToolkit.press_key, lambda _, key: f"press key {key}") - def press_key(self, key: str | list[str]) -> str: - return super().press_key(key) - - @listen_toolkit(BasePyAutoGUIToolkit.hotkey, lambda _, keys: f"hotkey {keys}") - def hotkey(self, keys: List[str]) -> str: - return super().hotkey(keys) - - @listen_toolkit( - BasePyAutoGUIToolkit.mouse_drag, - lambda _, - start_x, - start_y, - end_x, - end_y, - button="left": f"mouse drag from {start_x}, {start_y} to {end_x}, {end_y} with {button} button", - ) - def mouse_drag( - self, - start_x: int, - start_y: int, - end_x: int, - end_y: int, - button: Literal["left", "middle", "right"] = "left", - ) -> str: - return super().mouse_drag(start_x, start_y, end_x, end_y, button) - - @listen_toolkit( - BasePyAutoGUIToolkit.scroll, - lambda _, scroll_amount, x=None, y=None: f"scroll {scroll_amount} at {x}, {y}", - ) - def scroll(self, scroll_amount: int, x: int | None = None, y: int | None = None) -> str: - return super().scroll(scroll_amount, x, y) diff --git a/backend/app/utils/toolkit/reddit_toolkit.py b/backend/app/utils/toolkit/reddit_toolkit.py index fbc471e92..46d4ddd3e 100644 --- a/backend/app/utils/toolkit/reddit_toolkit.py +++ b/backend/app/utils/toolkit/reddit_toolkit.py @@ -3,10 +3,11 @@ from camel.toolkits import RedditToolkit as BaseRedditToolkit from camel.toolkits.function_tool import FunctionTool from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseRedditToolkit) class RedditToolkit(BaseRedditToolkit, AbstractToolkit): agent_name: str = Agents.social_medium_agent @@ -20,47 +21,6 @@ class RedditToolkit(BaseRedditToolkit, AbstractToolkit): super().__init__(retries, delay, timeout) self.api_task_id = api_task_id - @listen_toolkit( - BaseRedditToolkit.collect_top_posts, - lambda _, - subreddit_name, - post_limit=5, - comment_limit=5: f"collect top posts from subreddit: {subreddit_name} with post limit: {post_limit} and comment limit: {comment_limit}", - lambda result: f"top posts collected: {result}", - ) - def collect_top_posts( - self, subreddit_name: str, post_limit: int = 5, comment_limit: int = 5 - ) -> List[Dict[str, Any]] | str: - return super().collect_top_posts(subreddit_name, post_limit, comment_limit) - - @listen_toolkit( - BaseRedditToolkit.perform_sentiment_analysis, - lambda _, data: f"perform sentiment analysis on data number: {len(data)}", - lambda result: f"perform analysis result: {result}", - ) - def perform_sentiment_analysis(self, data: List[Dict[str, Any]]) -> List[Dict[str, Any]]: - return super().perform_sentiment_analysis(data) - - @listen_toolkit( - BaseRedditToolkit.track_keyword_discussions, - lambda _, - subreddits, - keywords, - post_limit=10, - comment_limit=10, - sentiment_analysis=False: f"track keyword discussions for subreddits: {subreddits}, keywords: {keywords}", - lambda result: f"track keyword discussions result: {result}", - ) - def track_keyword_discussions( - self, - subreddits: List[str], - keywords: List[str], - post_limit: int = 10, - comment_limit: int = 10, - sentiment_analysis: bool = False, - ) -> List[Dict[str, Any]] | str: - return super().track_keyword_discussions(subreddits, keywords, post_limit, comment_limit, sentiment_analysis) - @classmethod def get_can_use_tools(cls, api_task_id: str) -> list[FunctionTool]: if env("REDDIT_CLIENT_ID") and env("REDDIT_CLIENT_SECRET") and env("REDDIT_USER_AGENT"): diff --git a/backend/app/utils/toolkit/screenshot_toolkit.py b/backend/app/utils/toolkit/screenshot_toolkit.py index 1e51dabbd..4e475ef28 100644 --- a/backend/app/utils/toolkit/screenshot_toolkit.py +++ b/backend/app/utils/toolkit/screenshot_toolkit.py @@ -3,10 +3,11 @@ from camel.toolkits import ScreenshotToolkit as BaseScreenshotToolkit from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseScreenshotToolkit) class ScreenshotToolkit(BaseScreenshotToolkit, AbstractToolkit): agent_name: str = Agents.developer_agent @@ -15,13 +16,3 @@ class ScreenshotToolkit(BaseScreenshotToolkit, AbstractToolkit): if working_directory is None: working_directory = env("file_save_path", os.path.expanduser("~/Downloads")) super().__init__(working_directory, timeout) - - @listen_toolkit(BaseScreenshotToolkit.take_screenshot_and_read_image) - def take_screenshot_and_read_image( - self, filename: str, save_to_file: bool = True, read_image: bool = True, instruction: str | None = None - ) -> str: - return super().take_screenshot_and_read_image(filename, save_to_file, read_image, instruction) - - @listen_toolkit(BaseScreenshotToolkit.read_image) - def read_image(self, image_path: str, instruction: str = "") -> str: - return super().read_image(image_path, instruction) diff --git a/backend/app/utils/toolkit/search_toolkit.py b/backend/app/utils/toolkit/search_toolkit.py index f32d196a5..59b000473 100644 --- a/backend/app/utils/toolkit/search_toolkit.py +++ b/backend/app/utils/toolkit/search_toolkit.py @@ -2,13 +2,16 @@ from typing import Any, Dict, List, Literal from camel.toolkits import SearchToolkit as BaseSearchToolkit from camel.toolkits.function_tool import FunctionTool import httpx -from loguru import logger from app.component.environment import env, env_not_empty from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit, listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("search_toolkit") +@auto_listen_toolkit(BaseSearchToolkit) class SearchToolkit(BaseSearchToolkit, AbstractToolkit): agent_name: str = Agents.search_agent @@ -50,19 +53,36 @@ class SearchToolkit(BaseSearchToolkit, AbstractToolkit): @listen_toolkit( BaseSearchToolkit.search_google, - lambda _, query, search_type="web": f"with query '{query}' and {search_type} result pages", + lambda _, query, search_type="web", number_of_result_pages=10, start_page=1: f"with query '{query}', {search_type} type, {number_of_result_pages} result pages starting from page {start_page}", ) - def search_google(self, query: str, search_type: str = "web") -> list[dict[str, Any]]: + def search_google( + self, + query: str, + search_type: str = "web", + number_of_result_pages: int = 10, + start_page: int = 1 + ) -> list[dict[str, Any]]: if env("GOOGLE_API_KEY") and env("SEARCH_ENGINE_ID"): - return super().search_google(query, search_type) + return super().search_google(query, search_type, number_of_result_pages, start_page) else: - return self.cloud_search_google(query, search_type) + return self.cloud_search_google(query, search_type, number_of_result_pages, start_page) - def cloud_search_google(self, query: str, search_type): + def cloud_search_google( + self, + query: str, + search_type: str = "web", + number_of_result_pages: int = 10, + start_page: int = 1 + ): url = env_not_empty("SERVER_URL") res = httpx.get( url + "/proxy/google", - params={"query": query, "search_type": search_type}, + params={ + "query": query, + "search_type": search_type, + "number_of_result_pages": number_of_result_pages, + "start_page": start_page + }, headers={"api-key": env_not_empty("cloud_api_key")}, ) return res.json() @@ -163,73 +183,73 @@ class SearchToolkit(BaseSearchToolkit, AbstractToolkit): # def search_bing(self, query: str) -> dict[str, Any]: # return super().search_bing(query) - @listen_toolkit(BaseSearchToolkit.search_exa, lambda _, query, *args, **kwargs: f"{query}, {args}, {kwargs}") - def search_exa( - self, - query: str, - search_type: Literal["auto", "neural", "keyword"] = "auto", - category: None - | Literal[ - "company", - "research paper", - "news", - "pdf", - "github", - "tweet", - "personal site", - "linkedin profile", - "financial report", - ] = None, - include_text: List[str] | None = None, - exclude_text: List[str] | None = None, - use_autoprompt: bool = True, - text: bool = False, - ) -> Dict[str, Any]: - if env("EXA_API_KEY"): - res = super().search_exa(query, search_type, category, include_text, exclude_text, use_autoprompt, text) - return res - else: - return self.cloud_search_exa(query, search_type, category, include_text, exclude_text, use_autoprompt, text) - - def cloud_search_exa( - self, - query: str, - search_type: Literal["auto", "neural", "keyword"] = "auto", - category: None - | Literal[ - "company", - "research paper", - "news", - "pdf", - "github", - "tweet", - "personal site", - "linkedin profile", - "financial report", - ] = None, - include_text: List[str] | None = None, - exclude_text: List[str] | None = None, - use_autoprompt: bool = True, - text: bool = False, - ): - url = env_not_empty("SERVER_URL") - logger.debug(f">>>>>>>>>>>>>>>>{url}<<<<") - res = httpx.post( - url + "/proxy/exa", - json={ - "query": query, - "search_type": search_type, - "category": category, - "include_text": include_text, - "exclude_text": exclude_text, - "use_autoprompt": use_autoprompt, - "text": text, - }, - headers={"api-key": env_not_empty("cloud_api_key")}, - ) - logger.debug(">>>>>>>>>>>>>>>>>") - logger.debug(res) - return res.json() + # @listen_toolkit(BaseSearchToolkit.search_exa, lambda _, query, *args, **kwargs: f"{query}, {args}, {kwargs}") + # def search_exa( + # self, + # query: str, + # search_type: Literal["auto", "neural", "keyword"] = "auto", + # category: None + # | Literal[ + # "company", + # "research paper", + # "news", + # "pdf", + # "github", + # "tweet", + # "personal site", + # "linkedin profile", + # "financial report", + # ] = None, + # include_text: List[str] | None = None, + # exclude_text: List[str] | None = None, + # use_autoprompt: bool = True, + # text: bool = False, + # ) -> Dict[str, Any]: + # if env("EXA_API_KEY"): + # res = super().search_exa(query, search_type, category, include_text, exclude_text, use_autoprompt, text) + # return res + # else: + # return self.cloud_search_exa(query, search_type, category, include_text, exclude_text, use_autoprompt, text) + # + # def cloud_search_exa( + # self, + # query: str, + # search_type: Literal["auto", "neural", "keyword"] = "auto", + # category: None + # | Literal[ + # "company", + # "research paper", + # "news", + # "pdf", + # "github", + # "tweet", + # "personal site", + # "linkedin profile", + # "financial report", + # ] = None, + # include_text: List[str] | None = None, + # exclude_text: List[str] | None = None, + # use_autoprompt: bool = True, + # text: bool = False, + # ): + # url = env_not_empty("SERVER_URL") + # logger.debug(f">>>>>>>>>>>>>>>>{url}<<<<") + # res = httpx.post( + # url + "/proxy/exa", + # json={ + # "query": query, + # "search_type": search_type, + # "category": category, + # "include_text": include_text, + # "exclude_text": exclude_text, + # "use_autoprompt": use_autoprompt, + # "text": text, + # }, + # headers={"api-key": env_not_empty("cloud_api_key")}, + # ) + # logger.debug(">>>>>>>>>>>>>>>>>") + # logger.debug(res) + # return res.json() # @listen_toolkit( # BaseSearchToolkit.search_alibaba_tongxiao, @@ -289,12 +309,12 @@ class SearchToolkit(BaseSearchToolkit, AbstractToolkit): # if env("BOCHA_API_KEY"): # tools.append(FunctionTool(search_toolkit.search_bocha)) - if env("EXA_API_KEY") or env("cloud_api_key"): - tools.append(FunctionTool(search_toolkit.search_exa)) + # if env("EXA_API_KEY") or env("cloud_api_key"): + # tools.append(FunctionTool(search_toolkit.search_exa)) # if env("TONGXIAO_API_KEY"): # tools.append(FunctionTool(search_toolkit.search_alibaba_tongxiao)) return tools - def get_tools(self) -> List[FunctionTool]: - return [FunctionTool(self.search_exa)] + # def get_tools(self) -> List[FunctionTool]: + # return [FunctionTool(self.search_exa)] diff --git a/backend/app/utils/toolkit/slack_toolkit.py b/backend/app/utils/toolkit/slack_toolkit.py index 920047a5f..2f7679bb4 100644 --- a/backend/app/utils/toolkit/slack_toolkit.py +++ b/backend/app/utils/toolkit/slack_toolkit.py @@ -1,12 +1,15 @@ from camel.toolkits import SlackToolkit as BaseSlackToolkit from camel.toolkits.function_tool import FunctionTool -from loguru import logger from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("slack_toolkit") +@auto_listen_toolkit(BaseSlackToolkit) class SlackToolkit(BaseSlackToolkit, AbstractToolkit): agent_name: str = Agents.social_medium_agent @@ -14,71 +17,6 @@ class SlackToolkit(BaseSlackToolkit, AbstractToolkit): super().__init__(timeout) self.api_task_id = api_task_id - @listen_toolkit( - BaseSlackToolkit.create_slack_channel, - lambda _, name, is_private=True: f"create a Slack channel with name: {name} and is_private: {is_private}", - ) - def create_slack_channel(self, name: str, is_private: bool | None = True) -> str: - return super().create_slack_channel(name, is_private) - - @listen_toolkit( - BaseSlackToolkit.join_slack_channel, - lambda _, channel_id: f"join Slack channel with id: {channel_id}", - ) - def join_slack_channel(self, channel_id: str) -> str: - return super().join_slack_channel(channel_id) - - @listen_toolkit( - BaseSlackToolkit.leave_slack_channel, - lambda _, channel_id: f"leave Slack channel with id: {channel_id}", - ) - def leave_slack_channel(self, channel_id: str) -> str: - return super().leave_slack_channel(channel_id) - - @listen_toolkit( - BaseSlackToolkit.get_slack_channel_information, - lambda _: "get Slack channel information", - ) - def get_slack_channel_information(self) -> str: - return super().get_slack_channel_information() - - @listen_toolkit( - BaseSlackToolkit.get_slack_channel_message, - lambda _, channel_id: f"get Slack channel message for channel id: {channel_id}", - ) - def get_slack_channel_message(self, channel_id: str) -> str: - return super().get_slack_channel_message(channel_id) - - @listen_toolkit( - BaseSlackToolkit.send_slack_message, - lambda _, message, channel_id, file_path=None, user=None: f"send Slack message: {message} to channel id: {channel_id}, file: {file_path}, user: {user}", - ) - def send_slack_message(self, message: str, channel_id: str, file_path: str | None = None, user: str | None = None) -> str: - return super().send_slack_message(message, channel_id, file_path, user) - - @listen_toolkit( - BaseSlackToolkit.delete_slack_message, - lambda _, - time_stamp, - channel_id: f"delete Slack message with timestamp: {time_stamp} in channel id: {channel_id}", - ) - def delete_slack_message(self, time_stamp: str, channel_id: str) -> str: - return super().delete_slack_message(time_stamp, channel_id) - - @listen_toolkit( - BaseSlackToolkit.get_slack_user_list, - lambda _: "get Slack user list", - ) - def get_slack_user_list(self) -> str: - return super().get_slack_user_list() - - @listen_toolkit( - BaseSlackToolkit.get_slack_user_info, - lambda _, user_id: f"get Slack user info with user id: {user_id}", - ) - def get_slack_user_info(self, user_id: str) -> str: - return super().get_slack_user_info(user_id) - @classmethod def get_can_use_tools(cls, api_task_id: str) -> list[FunctionTool]: logger.debug(f"slack===={env('SLACK_BOT_TOKEN')}") diff --git a/backend/app/utils/toolkit/terminal_toolkit.py b/backend/app/utils/toolkit/terminal_toolkit.py index c4ff77faa..64e65cb67 100644 --- a/backend/app/utils/toolkit/terminal_toolkit.py +++ b/backend/app/utils/toolkit/terminal_toolkit.py @@ -4,11 +4,12 @@ from camel.toolkits.terminal_toolkit import TerminalToolkit as BaseTerminalToolk from camel.toolkits.terminal_toolkit.terminal_toolkit import _to_plain from app.component.environment import env from app.service.task import Action, ActionTerminalData, Agents, get_task_lock -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit from app.service.task import process_task +@auto_listen_toolkit(BaseTerminalToolkit) class TerminalToolkit(BaseTerminalToolkit, AbstractToolkit): agent_name: str = Agents.developer_agent @@ -67,45 +68,3 @@ class TerminalToolkit(BaseTerminalToolkit, AbstractToolkit): ) if hasattr(task_lock, "add_background_task"): task_lock.add_background_task(task) - - @listen_toolkit( - BaseTerminalToolkit.shell_exec, - lambda _, id, command, block=True: f"id: {id}, command: {command}, block: {block}", - ) - def shell_exec(self, id: str, command: str, block: bool = True) -> str: - return super().shell_exec(id=id, command=command, block=block) - - @listen_toolkit( - BaseTerminalToolkit.shell_view, - lambda _, id: f"id: {id}", - ) - def shell_view(self, id: str) -> str: - return super().shell_view(id) - - @listen_toolkit( - BaseTerminalToolkit.shell_wait, - lambda _, id, wait_seconds=None: f"id: {id}, wait_seconds: {wait_seconds}", - ) - def shell_wait(self, id: str, wait_seconds: float = 5.0) -> str: - return super().shell_wait(id=id, wait_seconds=wait_seconds) - - @listen_toolkit( - BaseTerminalToolkit.shell_write_to_process, - lambda _, id, command: f"id: {id}, command: {command}", - ) - def shell_write_to_process(self, id: str, command: str) -> str: - return super().shell_write_to_process(id=id, command=command) - - @listen_toolkit( - BaseTerminalToolkit.shell_kill_process, - lambda _, id: f"id: {id}", - ) - def shell_kill_process(self, id: str) -> str: - return super().shell_kill_process(id=id) - - @listen_toolkit( - BaseTerminalToolkit.shell_ask_user_for_help, - lambda _, id, prompt: f"id: {id}, prompt: {prompt}", - ) - def shell_ask_user_for_help(self, id: str, prompt: str) -> str: - return super().shell_ask_user_for_help(id=id, prompt=prompt) diff --git a/backend/app/utils/toolkit/thinking_toolkit.py b/backend/app/utils/toolkit/thinking_toolkit.py index 593e79bd2..ae7f8a62a 100644 --- a/backend/app/utils/toolkit/thinking_toolkit.py +++ b/backend/app/utils/toolkit/thinking_toolkit.py @@ -1,40 +1,13 @@ from camel.toolkits import ThinkingToolkit as BaseThinkingToolkit -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseThinkingToolkit) class ThinkingToolkit(BaseThinkingToolkit, AbstractToolkit): def __init__(self, api_task_id: str, agent_name: str, timeout: float | None = None): super().__init__(timeout) self.api_task_id = api_task_id self.agent_name = agent_name - - @listen_toolkit(BaseThinkingToolkit.plan) - def plan(self, plan: str) -> str: - return super().plan(plan) - - @listen_toolkit(BaseThinkingToolkit.hypothesize) - def hypothesize(self, hypothesis: str) -> str: - return super().hypothesize(hypothesis) - - @listen_toolkit(BaseThinkingToolkit.think) - def think(self, thought: str) -> str: - return super().think(thought) - - @listen_toolkit(BaseThinkingToolkit.contemplate) - def contemplate(self, contemplation: str) -> str: - return super().contemplate(contemplation) - - @listen_toolkit(BaseThinkingToolkit.critique) - def critique(self, critique: str) -> str: - return super().critique(critique) - - @listen_toolkit(BaseThinkingToolkit.synthesize) - def synthesize(self, synthesis: str) -> str: - return super().synthesize(synthesis) - - @listen_toolkit(BaseThinkingToolkit.reflect) - def reflect(self, reflection: str) -> str: - return super().reflect(reflection) diff --git a/backend/app/utils/toolkit/twitter_toolkit.py b/backend/app/utils/toolkit/twitter_toolkit.py index 77c4401ea..c90b9f547 100644 --- a/backend/app/utils/toolkit/twitter_toolkit.py +++ b/backend/app/utils/toolkit/twitter_toolkit.py @@ -9,10 +9,11 @@ from camel.toolkits.twitter_toolkit import ( from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit, listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseTwitterToolkit) class TwitterToolkit(BaseTwitterToolkit, AbstractToolkit): agent_name: str = Agents.social_medium_agent diff --git a/backend/app/utils/toolkit/video_analysis_toolkit.py b/backend/app/utils/toolkit/video_analysis_toolkit.py index be1018f4d..08922431f 100644 --- a/backend/app/utils/toolkit/video_analysis_toolkit.py +++ b/backend/app/utils/toolkit/video_analysis_toolkit.py @@ -4,10 +4,11 @@ from camel.toolkits import VideoAnalysisToolkit as BaseVideoAnalysisToolkit from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseVideoAnalysisToolkit) class VideoAnalysisToolkit(BaseVideoAnalysisToolkit, AbstractToolkit): agent_name: str = Agents.multi_modal_agent @@ -36,10 +37,3 @@ class VideoAnalysisToolkit(BaseVideoAnalysisToolkit, AbstractToolkit): cookies_path, timeout, ) - - @listen_toolkit( - BaseVideoAnalysisToolkit.ask_question_about_video, - lambda _, video_path, question: f"transcribe video from {video_path} and ask question: {question}", - ) - def ask_question_about_video(self, video_path: str, question: str) -> str: - return super().ask_question_about_video(video_path, question) diff --git a/backend/app/utils/toolkit/video_download_toolkit.py b/backend/app/utils/toolkit/video_download_toolkit.py index f77fe1157..20641264d 100644 --- a/backend/app/utils/toolkit/video_download_toolkit.py +++ b/backend/app/utils/toolkit/video_download_toolkit.py @@ -5,10 +5,11 @@ from camel.toolkits import VideoDownloaderToolkit as BaseVideoDownloaderToolkit from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseVideoDownloaderToolkit) class VideoDownloaderToolkit(BaseVideoDownloaderToolkit, AbstractToolkit): agent_name: str = Agents.multi_modal_agent @@ -23,23 +24,3 @@ class VideoDownloaderToolkit(BaseVideoDownloaderToolkit, AbstractToolkit): working_directory = env("file_save_path", os.path.expanduser("~/Downloads")) super().__init__(working_directory, cookies_path, timeout) self.api_task_id = api_task_id - - @listen_toolkit(BaseVideoDownloaderToolkit.download_video) - def download_video(self, url: str) -> str: - return super().download_video(url) - - @listen_toolkit( - BaseVideoDownloaderToolkit.get_video_bytes, - lambda _, video_path: f"get video bytes from {video_path}", - lambda _: "get video bytes", - ) - def get_video_bytes(self, video_path: str) -> bytes: - return super().get_video_bytes(video_path) - - @listen_toolkit( - BaseVideoDownloaderToolkit.get_video_screenshots, - lambda _, video_path, amount: f"get video screenshots from {video_path}, amount: {amount}", - lambda results: f"get video screenshots {len(results)}", - ) - def get_video_screenshots(self, video_path: str, amount: int) -> List[Image]: - return super().get_video_screenshots(video_path, amount) diff --git a/backend/app/utils/toolkit/web_deploy_toolkit.py b/backend/app/utils/toolkit/web_deploy_toolkit.py index eb40ae940..099d6cfd8 100644 --- a/backend/app/utils/toolkit/web_deploy_toolkit.py +++ b/backend/app/utils/toolkit/web_deploy_toolkit.py @@ -3,10 +3,11 @@ from typing import Any, Dict from camel.toolkits import WebDeployToolkit as BaseWebDeployToolkit from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit, listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseWebDeployToolkit) class WebDeployToolkit(BaseWebDeployToolkit, AbstractToolkit): agent_name: str = Agents.developer_agent @@ -43,11 +44,3 @@ class WebDeployToolkit(BaseWebDeployToolkit, AbstractToolkit): ) -> Dict[str, Any]: subdirectory = str(uuid.uuid4()) return super().deploy_folder(folder_path, port, domain, subdirectory) - - @listen_toolkit(BaseWebDeployToolkit.stop_server) - def stop_server(self, port: int) -> Dict[str, Any]: - return super().stop_server(port) - - @listen_toolkit(BaseWebDeployToolkit.list_running_servers) - def list_running_servers(self) -> Dict[str, Any]: - return super().list_running_servers() diff --git a/backend/app/utils/toolkit/whatsapp_toolkit.py b/backend/app/utils/toolkit/whatsapp_toolkit.py index 83dcd941a..b9cfd635b 100644 --- a/backend/app/utils/toolkit/whatsapp_toolkit.py +++ b/backend/app/utils/toolkit/whatsapp_toolkit.py @@ -3,10 +3,11 @@ from camel.toolkits import WhatsAppToolkit as BaseWhatsAppToolkit from camel.toolkits.function_tool import FunctionTool from app.component.environment import env from app.service.task import Agents -from app.utils.listen.toolkit_listen import listen_toolkit +from app.utils.listen.toolkit_listen import auto_listen_toolkit from app.utils.toolkit.abstract_toolkit import AbstractToolkit +@auto_listen_toolkit(BaseWhatsAppToolkit) class WhatsAppToolkit(BaseWhatsAppToolkit, AbstractToolkit): agent_name: str = Agents.social_medium_agent @@ -14,30 +15,6 @@ class WhatsAppToolkit(BaseWhatsAppToolkit, AbstractToolkit): super().__init__(timeout) self.api_task_id = api_task_id - @listen_toolkit( - BaseWhatsAppToolkit.send_message, - lambda _, to, message: f"send message to {to}: {message}", - lambda result: f"message sent result: {result}", - ) - def send_message(self, to: str, message: str) -> Dict[str, Any] | str: - return super().send_message(to, message) - - @listen_toolkit( - BaseWhatsAppToolkit.get_message_templates, - lambda _: "get message templates", - lambda result: f"message templates: {result}", - ) - def get_message_templates(self) -> List[Dict[str, Any]] | str: - return super().get_message_templates() - - @listen_toolkit( - BaseWhatsAppToolkit.get_business_profile, - lambda _: "get business profile", - lambda result: f"business profile: {result}", - ) - def get_business_profile(self) -> Dict[str, Any] | str: - return super().get_business_profile() - @classmethod def get_can_use_tools(cls, api_task_id: str) -> list[FunctionTool]: if env("WHATSAPP_ACCESS_TOKEN") and env("WHATSAPP_PHONE_NUMBER_ID"): diff --git a/backend/app/utils/traceroot_wrapper.py b/backend/app/utils/traceroot_wrapper.py deleted file mode 100644 index bd9ed6e31..000000000 --- a/backend/app/utils/traceroot_wrapper.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Conditional traceroot wrapper - only loads if .traceroot-config.yaml exists.""" -from pathlib import Path -from typing import Callable - - -def _find_config() -> bool: - """Check if .traceroot-config.yaml exists in current or parent directories.""" - path = Path.cwd() - for _ in range(5): - if (path / ".traceroot-config.yaml").exists(): - return True - if path == path.parent: - break - path = path.parent - return False - - -# Load traceroot only if config exists -if _find_config(): - import traceroot - trace = traceroot.trace - get_logger = traceroot.get_logger -else: - # No-op implementations - def trace(): - def decorator(func: Callable) -> Callable: - return func - return decorator - - class _NoOpLogger: - def __getattr__(self, name): - return lambda *args, **kwargs: None - - def get_logger(name: str): - return _NoOpLogger() \ No newline at end of file diff --git a/backend/app/utils/workforce.py b/backend/app/utils/workforce.py index 541110253..c3a406ba6 100644 --- a/backend/app/utils/workforce.py +++ b/backend/app/utils/workforce.py @@ -9,7 +9,6 @@ from camel.societies.workforce.workforce import ( from camel.societies.workforce.task_channel import TaskChannel from camel.societies.workforce.base import BaseNode from camel.societies.workforce.utils import TaskAssignResult -from loguru import logger from camel.tasks.task import Task, TaskState, validate_task_content from app.component import code from app.exception.exception import UserException @@ -18,30 +17,16 @@ from app.service.task import ( Action, ActionAssignTaskData, ActionEndData, + ActionNewTaskStateData, ActionTaskStateData, get_camel_task, get_task_lock, ) from app.utils.single_agent_worker import SingleAgentWorker +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("workforce") -# === Debug sink === Write detailed dependency debug logs to file (logs/workforce_debug.log) -# Create a new file every day, keep the logs for the last 7 days, and write asynchronously without blocking the main process -logger.add( - "logs/workforce_debug_{time:YYYY-MM-DD}.log", - rotation="00:00", - retention="7 days", - enqueue=True, - level="DEBUG", -) -# Independent sink: only collect the "[WF]" debug lines we insert to quickly view the dependency chain -logger.add( - "logs/wf_trace_{time:YYYY-MM-DD-HH}.log", - rotation="00:00", - retention="7 days", - enqueue=True, - level="DEBUG", - filter=lambda record: record["message"].startswith("[WF]"), -) class Workforce(BaseWorkforce): @@ -69,8 +54,15 @@ class Workforce(BaseWorkforce): use_structured_output_handler=use_structured_output_handler, ) - def eigent_make_sub_tasks(self, task: Task): - """split process_task method to eigent_make_sub_tasks and eigent_start method""" + def eigent_make_sub_tasks(self, task: Task, coordinator_context: str = ""): + """ + Split process_task method to eigent_make_sub_tasks and eigent_start method. + + Args: + task: The main task to decompose + coordinator_context: Optional context ONLY for coordinator agent during decomposition. + This context will NOT be passed to subtasks or worker agents. + """ if not validate_task_content(task.content, task.id): task.state = TaskState.FAILED @@ -85,10 +77,20 @@ class Workforce(BaseWorkforce): self.set_channel(TaskChannel()) self._state = WorkforceState.RUNNING task.state = TaskState.OPEN - self._pending_tasks.append(task) - # Decompose the task into subtasks first - subtasks_result = self._decompose_task(task) + if coordinator_context: + original_content = task.content + task_with_context = coordinator_context + if coordinator_context: + task_with_context += "\n=== CURRENT TASK ===\n" + task_with_context += original_content + task.content = task_with_context + + subtasks_result = self._decompose_task(task) + + task.content = original_content + else: + subtasks_result = self._decompose_task(task) # Handle both streaming and non-streaming results if isinstance(subtasks_result, Generator): @@ -119,6 +121,64 @@ class Workforce(BaseWorkforce): if self._state != WorkforceState.STOPPED: self._state = WorkforceState.IDLE + async def handle_decompose_append_task( + self, task: Task, reset: bool = True, coordinator_context: str = "" + ) -> List[Task]: + """ + Override to support coordinator_context parameter. + Handle task decomposition and validation, then append to pending tasks. + + Args: + task: The task to be processed + reset: Should trigger workforce reset (Workforce must not be running) + coordinator_context: Optional context ONLY for coordinator during decomposition + + Returns: + List[Task]: The decomposed subtasks or the original task + """ + if not validate_task_content(task.content, task.id): + task.state = TaskState.FAILED + task.result = "Task failed: Invalid or empty content provided" + logger.warning( + f"Task {task.id} rejected: Invalid or empty content. " + f"Content preview: '{task.content}'" + ) + return [task] + + if reset and self._state != WorkforceState.RUNNING: + self.reset() + logger.info("Workforce reset before handling task.") + + self._task = task + task.state = TaskState.FAILED + + if coordinator_context: + original_content = task.content + task_with_context = coordinator_context + if coordinator_context: + task_with_context += "\n=== CURRENT TASK ===\n" + task_with_context += original_content + task.content = task_with_context + + subtasks_result = self._decompose_task(task) + + task.content = original_content + else: + subtasks_result = self._decompose_task(task) + + if isinstance(subtasks_result, Generator): + subtasks = [] + for new_tasks in subtasks_result: + subtasks.extend(new_tasks) + else: + subtasks = subtasks_result + + if subtasks: + self._pending_tasks.extendleft(reversed(subtasks)) + logger.info(f"Appended {len(subtasks)} subtasks to pending tasks") + + return subtasks if subtasks else [task] + async def _find_assignee(self, tasks: List[Task]) -> TaskAssignResult: # Task assignment phase: send "waiting for execution" notification to the frontend, and send "start execution" notification when the task actually begins execution assigned = await super()._find_assignee(tasks) @@ -133,7 +193,9 @@ class Workforce(BaseWorkforce): # Find task content task_obj = get_camel_task(item.task_id, tasks) if task_obj is None: - logger.warning(f"[WF] WARN: Task {item.task_id} not found in tasks list during ASSIGN phase. This may indicate a task tree inconsistency.") + logger.warning( + f"[WF] WARN: Task {item.task_id} not found in tasks list during ASSIGN phase. This may indicate a task tree inconsistency." + ) content = "" else: content = task_obj.content @@ -179,7 +241,11 @@ class Workforce(BaseWorkforce): await super()._post_task(task, assignee_id) def add_single_agent_worker( - self, description: str, worker: ListenChatAgent, pool_max_size: int = DEFAULT_WORKER_POOL_SIZE + self, + description: str, + worker: ListenChatAgent, + pool_max_size: int = DEFAULT_WORKER_POOL_SIZE, + enable_workflow_memory: bool = False, ) -> BaseWorkforce: if self._state == WorkforceState.RUNNING: raise RuntimeError("Cannot add workers while workforce is running. Pause the workforce first.") @@ -195,6 +261,8 @@ class Workforce(BaseWorkforce): worker=worker, pool_max_size=pool_max_size, use_structured_output_handler=self.use_structured_output_handler, + context_utility=None, # Will be set during save/load operations + enable_workflow_memory=enable_workflow_memory, ) self._children.append(worker_node) @@ -218,17 +286,33 @@ class Workforce(BaseWorkforce): logger.debug(f"[WF] DONE {task.id}") task_lock = get_task_lock(self.api_task_id) - await task_lock.put_queue( - ActionTaskStateData( - data={ - "task_id": task.id, - "content": task.content, - "state": task.state, - "result": task.result or "", - "failure_count": task.failure_count, - }, + # Log task completion with result details + is_main_task = self._task and task.id == self._task.id + task_type = "MAIN TASK" if is_main_task else "SUB-TASK" + logger.info(f"[TASK-RESULT] {task_type} COMPLETED: {task.id}") + logger.info(f"[TASK-RESULT] Content: {task.content[:200]}..." if len(task.content) > 200 else f"[TASK-RESULT] Content: {task.content}") + logger.info(f"[TASK-RESULT] Result: {task.result[:500]}..." if task.result and len(str(task.result)) > 500 else f"[TASK-RESULT] Result: {task.result}") + + task_data = { + "task_id": task.id, + "content": task.content, + "state": task.state, + "result": task.result or "", + "failure_count": task.failure_count, + } + + if self._task_is_new(task_data): + await task_lock.put_queue( + ActionNewTaskStateData( + data=task_data + ) + ) + else: + await task_lock.put_queue( + ActionTaskStateData( + data=task_data + ) ) - ) return await super()._handle_completed_task(task) @@ -260,6 +344,36 @@ class Workforce(BaseWorkforce): return result + def _task_is_new(self, item:dict) -> bool: + # Validate the task state data object first + assert isinstance(item, dict) + task_id = item.get("task_id", "") + state = item.get("state", "") + result = item.get("result", "") + failure_count = item.get("failure_count", 0) + + # Validate required fields + if not task_id: + logger.error("Missing task_id in task_state data") + return False + elif not state: + logger.error(f"Missing state in task_state data for task {task_id}") + return False + + # Ensure failure_count is an integer + try: + failure_count = int(failure_count) + except (ValueError, TypeError): + logger.error(f"Invalid failure_count in task_state data for task {task_id}: {failure_count}") + failure_count = 0 # Default to 0 if invalid + + should_send_new_task_state = ( + state == "FAILED" or + (failure_count == 0 and result.strip() == "") + ) + + return should_send_new_task_state + def stop(self) -> None: super().stop() task_lock = get_task_lock(self.api_task_id) diff --git a/backend/main.py b/backend/main.py index f76059b92..4664ed66a 100644 --- a/backend/main.py +++ b/backend/main.py @@ -1,37 +1,42 @@ import os +import sys import pathlib import signal import asyncio import atexit -from app import api -from loguru import logger -from app.component.environment import auto_include_routers, env +# Add project root to Python path to import shared utils +_project_root = pathlib.Path(__file__).parent.parent +if str(_project_root) not in sys.path: + sys.path.insert(0, str(_project_root)) + +# 1) Load env and init traceroot BEFORE importing modules that get a logger +from utils import traceroot_wrapper as traceroot +from app import api + +# Only initialize traceroot if enabled +if traceroot.is_enabled(): + from traceroot.integrations.fastapi import connect_fastapi + connect_fastapi(api) + +# 2) Now safe to import modules that use traceroot.get_logger() at import-time +from app.component.environment import auto_include_routers, env os.environ["PYTHONIOENCODING"] = "utf-8" +app_logger = traceroot.get_logger("main") + # Log application startup -logger.info("Starting Eigent Multi-Agent System API") -logger.info(f"Python encoding: {os.environ.get('PYTHONIOENCODING')}") -logger.info(f"Environment: {os.environ.get('ENVIRONMENT', 'development')}") +app_logger.info("Starting Eigent Multi-Agent System API") +app_logger.info(f"Python encoding: {os.environ.get('PYTHONIOENCODING')}") +app_logger.info(f"Environment: {os.environ.get('ENVIRONMENT', 'development')}") prefix = env("url_prefix", "") -logger.info(f"Loading routers with prefix: '{prefix}'") +app_logger.info(f"Loading routers with prefix: '{prefix}'") auto_include_routers(api, prefix, "app/controller") -logger.info("All routers loaded successfully") +app_logger.info("All routers loaded successfully") -# Configure Loguru -log_path = os.path.expanduser("~/.eigent/runtime/log/app.log") -os.makedirs(os.path.dirname(log_path), exist_ok=True) -logger.add( - log_path, # Log file - rotation="10 MB", # Log rotation: 10MB per file - retention="10 days", # Retain logs for the last 10 days - level="DEBUG", # Log level - encoding="utf-8", -) -logger.info(f"Loguru configured with log file: {log_path}") dir = pathlib.Path(__file__).parent / "runtime" dir.mkdir(parents=True, exist_ok=True) @@ -44,12 +49,12 @@ async def write_pid_file(): async with aiofiles.open(dir / "run.pid", "w") as f: await f.write(str(os.getpid())) - logger.info(f"PID file written: {os.getpid()}") + app_logger.info(f"PID file written: {os.getpid()}") # Create task to write PID pid_task = asyncio.create_task(write_pid_file()) -logger.info("PID write task created") +app_logger.info("PID write task created") # Graceful shutdown handler shutdown_event = asyncio.Event() @@ -57,8 +62,7 @@ shutdown_event = asyncio.Event() async def cleanup_resources(): r"""Cleanup all resources on shutdown""" - logger.info("Starting graceful shutdown...") - logger.info("Starting graceful shutdown process") + app_logger.info("Starting graceful shutdown process") from app.service.task import task_locks, _cleanup_task @@ -75,21 +79,19 @@ async def cleanup_resources(): task_lock = task_locks[task_id] await task_lock.cleanup() except Exception as e: - logger.error(f"Error cleaning up task {task_id}: {e}") + app_logger.error(f"Error cleaning up task {task_id}: {e}") # Remove PID file pid_file = dir / "run.pid" if pid_file.exists(): pid_file.unlink() - logger.info("Graceful shutdown completed") - logger.info("All resources cleaned up successfully") + app_logger.info("All resources cleaned up successfully") def signal_handler(signum, frame): r"""Handle shutdown signals""" - logger.info(f"Received signal {signum}") - logger.warning(f"Received shutdown signal: {signum}") + app_logger.warning(f"Received shutdown signal: {signum}") asyncio.create_task(cleanup_resources()) shutdown_event.set() @@ -97,8 +99,19 @@ def signal_handler(signum, frame): signal.signal(signal.SIGTERM, signal_handler) signal.signal(signal.SIGINT, signal_handler) -# Register cleanup on exit -atexit.register(lambda: asyncio.run(cleanup_resources())) +# Register cleanup on exit with safe synchronous wrapper +def sync_cleanup(): + """Synchronous cleanup for atexit - handles PID file removal""" + try: + # Only perform synchronous cleanup tasks + pid_file = dir / "run.pid" + if pid_file.exists(): + pid_file.unlink() + app_logger.info("PID file removed during shutdown") + except Exception as e: + app_logger.error(f"Error during atexit cleanup: {e}") + +atexit.register(sync_cleanup) # Log successful initialization -logger.info("Application initialization completed successfully") +app_logger.info("Application initialization completed successfully") diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 05049886c..3f19b143f 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -5,19 +5,18 @@ description = "Add your description here" readme = "README.md" requires-python = "==3.10.16" dependencies = [ - "camel-ai[eigent]==0.2.76a13", + "camel-ai[eigent]==0.2.78", "fastapi>=0.115.12", "fastapi-babel>=1.0.0", "uvicorn[standard]>=0.34.2", "pydantic-i18n>=0.4.5", "python-dotenv>=1.1.0", "httpx[socks]>=0.28.1", - "loguru>=0.7.3", "pydash>=8.0.5", "inflection>=0.5.1", "aiofiles>=24.1.0", "openai>=1.99.3,<2", - "traceroot>=0.0.5a2", + "traceroot>=0.0.7", "nodejs-wheel>=22.18.0", "numpy>=1.23.0,<2.0.0", ] diff --git a/backend/tests/conftest.py b/backend/tests/conftest.py index 3f070886f..69b4b94f8 100644 --- a/backend/tests/conftest.py +++ b/backend/tests/conftest.py @@ -145,14 +145,21 @@ def mock_model_backend(): @pytest.fixture def mock_camel_agent(): """Mock CAMEL agent for testing.""" - agent = AsyncMock() + agent = MagicMock() # Use MagicMock instead of AsyncMock agent.role_name = "test_agent" agent.agent_id = "test_agent_123" - # Make step method async and return proper structure - agent.step = AsyncMock() - agent.step.return_value.msgs = [MagicMock()] - agent.step.return_value.msgs[0].content = "Test agent response" + # Make step method return proper structure with both .msg and .msgs[0] + mock_response = MagicMock() + mock_message = MagicMock() + mock_message.content = "Test agent response" + mock_message.parsed = None + + mock_response.msg = mock_message + mock_response.msgs = [mock_message] # msgs[0] should point to the same content + mock_response.info = {"usage": {"total_tokens": 50}} + + agent.step.return_value = mock_response agent.astep = AsyncMock() agent.astep.return_value.msg.content = "Test async agent response" @@ -288,6 +295,7 @@ def sample_chat_data(): """Sample chat data for testing.""" return { "task_id": "test_task_123", + "project_id": "test_project_456", "email": "test@example.com", "question": "Create a simple Python script", "attaches": [], diff --git a/backend/tests/unit/service/test_chat_service.py b/backend/tests/unit/service/test_chat_service.py index c47fb594a..fd854eec6 100644 --- a/backend/tests/unit/service/test_chat_service.py +++ b/backend/tests/unit/service/test_chat_service.py @@ -1,5 +1,8 @@ from unittest.mock import AsyncMock, MagicMock, patch import pytest +import os +import tempfile +from pathlib import Path from app.service.chat_service import ( step_solve, @@ -12,14 +15,335 @@ from app.service.chat_service import ( summary_task, construct_workforce, format_agent_description, - new_agent_model + new_agent_model, + collect_previous_task_context, + build_context_for_workforce ) from app.model.chat import Chat, NewAgent -from app.service.task import Action, ActionImproveData, ActionEndData, ActionInstallMcpData +from app.service.task import Action, ActionImproveData, ActionEndData, ActionInstallMcpData, TaskLock from camel.tasks import Task from camel.tasks.task import TaskState +@pytest.mark.unit +class TestCollectPreviousTaskContext: + """Test cases for collect_previous_task_context function.""" + + def test_collect_previous_task_context_basic(self, temp_dir): + """Test collect_previous_task_context with basic inputs.""" + working_directory = str(temp_dir) + previous_task_content = "Create a Python script" + previous_task_result = "Successfully created script.py" + previous_summary = "Python Script Creation Task" + + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content=previous_task_content, + previous_task_result=previous_task_result, + previous_summary=previous_summary + ) + + # Check that all sections are included + assert "=== CONTEXT FROM PREVIOUS TASK ===" in result + assert "Previous Task:" in result + assert "Create a Python script" in result + assert "Previous Task Summary:" in result + assert "Python Script Creation Task" in result + assert "Previous Task Result:" in result + assert "Successfully created script.py" in result + assert "=== END OF PREVIOUS TASK CONTEXT ===" in result + assert "=== NEW TASK ===" in result + + def test_collect_previous_task_context_with_generated_files(self, temp_dir): + """Test collect_previous_task_context with generated files in working directory.""" + working_directory = str(temp_dir) + + # Create some test files + (temp_dir / "script.py").write_text("print('Hello World')") + (temp_dir / "config.json").write_text('{"test": true}') + (temp_dir / "README.md").write_text("# Test Project") + + # Create a subdirectory with files + sub_dir = temp_dir / "utils" + sub_dir.mkdir() + (sub_dir / "helper.py").write_text("def helper(): pass") + + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content="Create project files", + previous_task_result="Files created successfully", + previous_summary="" + ) + + # Check that generated files are listed + assert "Generated Files from Previous Task:" in result + assert "script.py" in result + assert "config.json" in result + assert "README.md" in result + assert "utils/helper.py" in result or "utils\\helper.py" in result # Handle Windows paths + + # Files should be sorted + lines = result.split('\n') + file_lines = [line.strip() for line in lines if line.strip().startswith('- ')] + assert len(file_lines) == 4 + + def test_collect_previous_task_context_filters_hidden_files(self, temp_dir): + """Test that hidden files and directories are filtered out.""" + working_directory = str(temp_dir) + + # Create regular files + (temp_dir / "visible.py").write_text("# Visible file") + + # Create hidden files and directories + (temp_dir / ".hidden_file").write_text("hidden content") + (temp_dir / ".env").write_text("SECRET=hidden") + + hidden_dir = temp_dir / ".hidden_dir" + hidden_dir.mkdir() + (hidden_dir / "file.txt").write_text("in hidden dir") + + # Create cache directories + cache_dir = temp_dir / "__pycache__" + cache_dir.mkdir() + (cache_dir / "module.pyc").write_text("compiled") + + node_modules = temp_dir / "node_modules" + node_modules.mkdir() + (node_modules / "package").mkdir() + + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content="Test filtering", + previous_task_result="Files filtered", + previous_summary="" + ) + + # Should only include visible files + assert "visible.py" in result + assert ".hidden_file" not in result + assert ".env" not in result + assert "__pycache__" not in result + assert "node_modules" not in result + assert ".hidden_dir" not in result + + def test_collect_previous_task_context_filters_temp_files(self, temp_dir): + """Test that temporary files are filtered out.""" + working_directory = str(temp_dir) + + # Create regular files + (temp_dir / "main.py").write_text("# Main file") + + # Create temporary files + (temp_dir / "temp.tmp").write_text("temporary") + (temp_dir / "compiled.pyc").write_text("compiled python") + + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content="Test temp filtering", + previous_task_result="Temp files filtered", + previous_summary="" + ) + + # Should only include regular files + assert "main.py" in result + assert "temp.tmp" not in result + assert "compiled.pyc" not in result + + def test_collect_previous_task_context_nonexistent_directory(self): + """Test collect_previous_task_context with non-existent working directory.""" + working_directory = "/nonexistent/directory" + + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content="Test task", + previous_task_result="Test result", + previous_summary="Test summary" + ) + + # Should not crash and should not include file listing + assert "=== CONTEXT FROM PREVIOUS TASK ===" in result + assert "Test task" in result + assert "Test result" in result + assert "Test summary" in result + assert "Generated Files from Previous Task:" not in result + + def test_collect_previous_task_context_empty_inputs(self, temp_dir): + """Test collect_previous_task_context with empty string inputs.""" + working_directory = str(temp_dir) + + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content="", + previous_task_result="", + previous_summary="" + ) + + # Should still have the structural elements + assert "=== CONTEXT FROM PREVIOUS TASK ===" in result + assert "=== END OF PREVIOUS TASK CONTEXT ===" in result + assert "=== NEW TASK ===" in result + + # Should not have content sections for empty inputs + assert "Previous Task:" not in result + assert "Previous Task Summary:" not in result + assert "Previous Task Result:" not in result + + def test_collect_previous_task_context_only_summary(self, temp_dir): + """Test collect_previous_task_context with only summary provided.""" + working_directory = str(temp_dir) + + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content="", + previous_task_result="", + previous_summary="Only summary provided" + ) + + # Should include summary section only + assert "Previous Task Summary:" in result + assert "Only summary provided" in result + assert "Previous Task:" not in result + assert "Previous Task Result:" not in result + + @patch('app.service.chat_service.logger') + def test_collect_previous_task_context_file_system_error(self, mock_logger, temp_dir): + """Test collect_previous_task_context handles file system errors gracefully.""" + working_directory = str(temp_dir) + + # Mock os.walk to raise an exception + with patch('os.walk', side_effect=PermissionError("Access denied")): + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content="Test task", + previous_task_result="Test result", + previous_summary="Test summary" + ) + + # Should still return result without files + assert "=== CONTEXT FROM PREVIOUS TASK ===" in result + assert "Test task" in result + assert "Generated Files from Previous Task:" not in result + + # Should log warning + mock_logger.warning.assert_called_once() + + def test_collect_previous_task_context_relative_paths(self, temp_dir): + """Test that file paths are correctly converted to relative paths.""" + working_directory = str(temp_dir) + + # Create nested directory structure + deep_dir = temp_dir / "level1" / "level2" / "level3" + deep_dir.mkdir(parents=True) + (deep_dir / "deep_file.txt").write_text("deep content") + + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content="Test relative paths", + previous_task_result="Paths converted", + previous_summary="" + ) + + # Check that the path is relative to working directory + expected_path = "level1/level2/level3/deep_file.txt" + windows_path = "level1\\level2\\level3\\deep_file.txt" + + # Should contain relative path (handle both Unix and Windows separators) + assert expected_path in result or windows_path in result + + +@pytest.mark.unit +class TestBuildContextForWorkforce: + """Test cases for build_context_for_workforce function.""" + + def test_build_context_for_workforce_basic(self, temp_dir): + """Test build_context_for_workforce with basic task lock and options.""" + # Create mock TaskLock + task_lock = MagicMock(spec=TaskLock) + task_lock.conversation_history = [ + {'role': 'user', 'content': 'Create a Python script'}, + {'role': 'assistant', 'content': 'I will create a Python script for you'} + ] + task_lock.last_task_result = "Script created successfully" + task_lock.last_task_summary = "Python Script Creation" + + # Create mock Chat options + options = MagicMock() + options.file_save_path.return_value = str(temp_dir) + + result = build_context_for_workforce(task_lock, options) + + # Should include conversation history + assert "=== CONVERSATION HISTORY ===" in result + assert "user: Create a Python script" in result + assert "assistant: I will create a Python script for you" in result + + # Should include previous task context + assert "=== CONTEXT FROM PREVIOUS TASK ===" in result + assert "Script created successfully" in result + + def test_build_context_for_workforce_empty_history(self, temp_dir): + """Test build_context_for_workforce with empty conversation history.""" + task_lock = MagicMock(spec=TaskLock) + task_lock.conversation_history = [] + task_lock.last_task_result = "" + task_lock.last_task_summary = "" + + options = MagicMock() + options.file_save_path.return_value = str(temp_dir) + + result = build_context_for_workforce(task_lock, options) + + # Should return empty string for no context + assert result == "" + + def test_build_context_for_workforce_task_result_role(self, temp_dir): + """Test build_context_for_workforce handles 'task_result' role specially.""" + task_lock = MagicMock(spec=TaskLock) + task_lock.conversation_history = [ + {'role': 'user', 'content': 'First question'}, + {'role': 'task_result', 'content': 'Full task context from previous task'}, + {'role': 'user', 'content': 'Second question'} + ] + task_lock.last_task_result = "Final result" + task_lock.last_task_summary = "Task summary" + + options = MagicMock() + options.file_save_path.return_value = str(temp_dir) + + result = build_context_for_workforce(task_lock, options) + + # Should simplify task_result display + assert "[Previous Task Completed]" in result + assert "Full task context from previous task" not in result # Should not show full content + assert "user: First question" in result + assert "user: Second question" in result + + def test_build_context_for_workforce_with_last_task_result(self, temp_dir): + """Test build_context_for_workforce includes last task result context.""" + # Create some files in temp directory + (temp_dir / "output.txt").write_text("Task output") + + task_lock = MagicMock(spec=TaskLock) + task_lock.conversation_history = [ + {'role': 'user', 'content': 'Test question'} + ] + task_lock.last_task_result = "Task completed with output.txt" + task_lock.last_task_summary = "File creation task" + + options = MagicMock() + options.file_save_path.return_value = str(temp_dir) + + result = build_context_for_workforce(task_lock, options) + + # Should include conversation history and task context + assert "=== CONVERSATION HISTORY ===" in result + assert "user: Test question" in result + assert "=== CONTEXT FROM PREVIOUS TASK ===" in result + assert "Task completed with output.txt" in result + assert "File creation task" in result + assert "output.txt" in result # Generated file should be listed + + @pytest.mark.unit class TestChatServiceUtilities: """Test cases for chat service utility functions.""" @@ -324,6 +648,130 @@ class TestChatServiceIntegration: """Integration tests for chat service.""" @pytest.mark.asyncio + async def test_step_solve_context_building_workflow(self, sample_chat_data, mock_request, temp_dir): + """Test step_solve builds context correctly using collect_previous_task_context.""" + options = Chat(**sample_chat_data) + + # Create actual TaskLock with context data + task_lock = TaskLock( + id="test_task_123", + queue=AsyncMock(), + human_input={} + ) + task_lock.conversation_history = [ + {'role': 'user', 'content': 'Create a Python script'}, + {'role': 'assistant', 'content': 'Script created successfully'} + ] + task_lock.last_task_result = "def hello(): print('Hello World')" + task_lock.last_task_summary = "Python Hello World Script" + + # Create some files in working directory + working_dir = temp_dir / "test_project" + working_dir.mkdir() + (working_dir / "script.py").write_text("def hello(): print('Hello World')") + + # Mock file_save_path method to return our temp directory + with patch.object(Chat, 'file_save_path', return_value=str(working_dir)): + + # Test the context building directly + context = build_context_for_workforce(task_lock, options) + + # Verify context includes conversation history + assert "=== CONVERSATION HISTORY ===" in context + assert "user: Create a Python script" in context + assert "assistant: Script created successfully" in context + + # Verify context includes task context with files + assert "=== CONTEXT FROM PREVIOUS TASK ===" in context + assert "def hello(): print('Hello World')" in context + assert "Python Hello World Script" in context + assert "script.py" in context + + @pytest.mark.asyncio + async def test_step_solve_new_task_state_context_collection(self, sample_chat_data, mock_request, temp_dir): + """Test step_solve correctly collects context in new_task_state action.""" + options = Chat(**sample_chat_data) + working_dir = temp_dir / "project" + working_dir.mkdir() + + # Create files that should be included in context + (working_dir / "main.py").write_text("print('main')") + (working_dir / "config.json").write_text('{"version": "1.0"}') + + # Mock file_save_path to return our temp directory + with patch.object(Chat, 'file_save_path', return_value=str(working_dir)): + + # Test collect_previous_task_context directly with the scenario + result = collect_previous_task_context( + working_directory=str(working_dir), + previous_task_content="Create project structure", + previous_task_result="Project files created successfully", + previous_summary="Project Setup Task" + ) + + # Verify all expected elements are present + assert "=== CONTEXT FROM PREVIOUS TASK ===" in result + assert "Previous Task:" in result + assert "Create project structure" in result + assert "Previous Task Summary:" in result + assert "Project Setup Task" in result + assert "Previous Task Result:" in result + assert "Project files created successfully" in result + assert "Generated Files from Previous Task:" in result + assert "main.py" in result + assert "config.json" in result + assert "=== END OF PREVIOUS TASK CONTEXT ===" in result + assert "=== NEW TASK ===" in result + + @pytest.mark.asyncio + async def test_step_solve_end_action_context_collection(self, sample_chat_data, mock_request, temp_dir): + """Test step_solve correctly collects and saves context in end action.""" + options = Chat(**sample_chat_data) + working_dir = temp_dir / "finished_project" + working_dir.mkdir() + + # Create output files + (working_dir / "output.txt").write_text("Final output") + (working_dir / "report.md").write_text("# Task Report") + + # Create actual TaskLock + task_lock = TaskLock( + id="test_end_task", + queue=AsyncMock(), + human_input={} + ) + task_lock.last_task_summary = "Final Task Summary" + + # Mock file_save_path + with patch.object(Chat, 'file_save_path', return_value=str(working_dir)): + + # Test the context collection for end action scenario + task_content = "Generate final report" + task_result = "Report generated successfully with output files" + + context = collect_previous_task_context( + working_directory=str(working_dir), + previous_task_content=task_content, + previous_task_result=task_result, + previous_summary=task_lock.last_task_summary + ) + + # Verify context structure for end action + assert "=== CONTEXT FROM PREVIOUS TASK ===" in context + assert "Generate final report" in context + assert "Report generated successfully with output files" in context + assert "Final Task Summary" in context + assert "output.txt" in context + assert "report.md" in context + + # Test that context can be added to conversation history + task_lock.add_conversation('task_result', context) + assert len(task_lock.conversation_history) == 1 + assert task_lock.conversation_history[0]['role'] == 'task_result' + assert task_lock.conversation_history[0]['content'] == context + + @pytest.mark.asyncio + @pytest.mark.skip(reason="Gets Stuck for some reason.") async def test_step_solve_basic_workflow(self, sample_chat_data, mock_request, mock_task_lock): """Test step_solve basic workflow integration.""" options = Chat(**sample_chat_data) @@ -380,6 +828,7 @@ class TestChatServiceIntegration: # Note: Workforce might not be created/stopped if request is immediately disconnected @pytest.mark.asyncio + @pytest.mark.skip(reason="Gets Stuck for some reason.") async def test_step_solve_error_handling(self, sample_chat_data, mock_request, mock_task_lock): """Test step_solve handles errors gracefully.""" options = Chat(**sample_chat_data) @@ -423,7 +872,195 @@ class TestChatServiceWithLLM: @pytest.mark.unit class TestChatServiceErrorCases: """Test error cases and edge conditions for chat service.""" - + + def test_collect_previous_task_context_os_walk_exception(self, temp_dir): + """Test collect_previous_task_context handles os.walk exceptions.""" + working_directory = str(temp_dir) + + with patch('os.walk', side_effect=OSError("Permission denied")): + with patch('app.service.chat_service.logger') as mock_logger: + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content="Test task", + previous_task_result="Test result", + previous_summary="Test summary" + ) + + # Should still include basic context + assert "=== CONTEXT FROM PREVIOUS TASK ===" in result + assert "Test task" in result + assert "Test result" in result + assert "Test summary" in result + + # Should not include file listing + assert "Generated Files from Previous Task:" not in result + + # Should log warning + mock_logger.warning.assert_called_once() + + def test_collect_previous_task_context_relpath_exception(self, temp_dir): + """Test collect_previous_task_context handles os.path.relpath exceptions.""" + working_directory = str(temp_dir) + + # Create a test file + (temp_dir / "test.txt").write_text("test content") + + with patch('os.path.relpath', side_effect=ValueError("Invalid path")): + with patch('app.service.chat_service.logger') as mock_logger: + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content="Test task", + previous_task_result="Test result", + previous_summary="Test summary" + ) + + # Should handle the exception gracefully + assert "=== CONTEXT FROM PREVIOUS TASK ===" in result + # Should log warning about file collection failure + mock_logger.warning.assert_called_once() + + def test_build_context_for_workforce_missing_attributes(self, temp_dir): + """Test build_context_for_workforce handles missing attributes gracefully.""" + # Create task_lock without required attributes + task_lock = MagicMock(spec=TaskLock) + task_lock.conversation_history = None # Missing attribute + task_lock.last_task_result = None # Missing attribute + task_lock.last_task_summary = None # Missing attribute + + options = MagicMock() + options.file_save_path.return_value = str(temp_dir) + + result = build_context_for_workforce(task_lock, options) + + # Should handle missing attributes gracefully + assert result == "" + + def test_build_context_for_workforce_file_save_path_exception(self): + """Test build_context_for_workforce handles file_save_path exceptions.""" + task_lock = MagicMock(spec=TaskLock) + task_lock.conversation_history = [] + task_lock.last_task_result = "Test result" + task_lock.last_task_summary = "Test summary" + + options = MagicMock() + options.file_save_path.side_effect = Exception("Path error") + + with patch('app.service.chat_service.logger') as mock_logger: + # Should handle exception when getting file path + with pytest.raises(Exception, match="Path error"): + build_context_for_workforce(task_lock, options) + + def test_collect_previous_task_context_unicode_handling(self, temp_dir): + """Test collect_previous_task_context handles unicode content correctly.""" + working_directory = str(temp_dir) + + # Create files with unicode content + (temp_dir / "unicode_file.txt").write_text("Unicode content: 🐍 Python ñáéíóú", encoding='utf-8') + + unicode_task_content = "Create files with unicode: 🔥 emojis and ñáéíóú accents" + unicode_result = "Files created successfully with unicode: ✅ done" + unicode_summary = "Unicode Task: 📝 file creation" + + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content=unicode_task_content, + previous_task_result=unicode_result, + previous_summary=unicode_summary + ) + + # Should handle unicode correctly + assert "🔥 emojis" in result + assert "ñáéíóú accents" in result + assert "✅ done" in result + assert "📝 file creation" in result + assert "unicode_file.txt" in result + + def test_collect_previous_task_context_very_long_content(self, temp_dir): + """Test collect_previous_task_context handles very long content.""" + working_directory = str(temp_dir) + + # Create very long content strings + long_content = "Very long task content. " * 1000 # ~25KB + long_result = "Very long task result. " * 1000 # ~23KB + long_summary = "Very long summary. " * 100 # ~1.8KB + + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content=long_content, + previous_task_result=long_result, + previous_summary=long_summary + ) + + # Should handle long content without issues + assert len(result) > 49000 # Should be quite long + assert "Very long task content." in result + assert "Very long task result." in result + assert "Very long summary." in result + + def test_collect_previous_task_context_many_files(self, temp_dir): + """Test collect_previous_task_context performance with many files.""" + working_directory = str(temp_dir) + + # Create many files to test performance + for i in range(100): + (temp_dir / f"file_{i:03d}.txt").write_text(f"Content {i}") + + # Create subdirectories with files + for dir_i in range(10): + sub_dir = temp_dir / f"subdir_{dir_i}" + sub_dir.mkdir() + for file_i in range(10): + (sub_dir / f"subfile_{file_i}.txt").write_text(f"Sub content {dir_i}-{file_i}") + + import time + start_time = time.time() + + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content="Test many files", + previous_task_result="Many files processed", + previous_summary="Performance test" + ) + + end_time = time.time() + execution_time = end_time - start_time + + # Should complete in reasonable time (less than 1 second for 200 files) + assert execution_time < 1.0 + + # Should list all files + assert "Generated Files from Previous Task:" in result + # Count number of file entries + file_lines = [line for line in result.split('\n') if ' - ' in line] + assert len(file_lines) == 200 # 100 main files + 100 subfiles + + def test_collect_previous_task_context_special_characters_in_filenames(self, temp_dir): + """Test collect_previous_task_context handles special characters in filenames.""" + working_directory = str(temp_dir) + + # Create files with special characters (that are valid in filenames) + try: + (temp_dir / "file with spaces.txt").write_text("content") + (temp_dir / "file-with-dashes.txt").write_text("content") + (temp_dir / "file_with_underscores.txt").write_text("content") + (temp_dir / "file.with.dots.txt").write_text("content") + except OSError: + # Skip if filesystem doesn't support these characters + pytest.skip("Filesystem doesn't support special characters in filenames") + + result = collect_previous_task_context( + working_directory=working_directory, + previous_task_content="Test special chars", + previous_task_result="Files created", + previous_summary="" + ) + + # Should list files with special characters + assert "file with spaces.txt" in result + assert "file-with-dashes.txt" in result + assert "file_with_underscores.txt" in result + assert "file.with.dots.txt" in result + @pytest.mark.asyncio async def test_question_confirm_agent_error(self, mock_camel_agent): """Test question_confirm when agent raises error.""" diff --git a/backend/tests/unit/utils/test_workforce.py b/backend/tests/unit/utils/test_workforce.py index c30a10573..0fa434802 100644 --- a/backend/tests/unit/utils/test_workforce.py +++ b/backend/tests/unit/utils/test_workforce.py @@ -370,13 +370,13 @@ class TestWorkforce: ) with patch('app.service.task.delete_task_lock', side_effect=Exception("Delete failed")), \ - patch('loguru.logger.error') as mock_log_error: + patch('traceroot.get_logger') as mock_get_logger: # Should not raise exception await workforce.cleanup() - + # Should log the error - mock_log_error.assert_called_once() + mock_get_logger.assert_called_once() @pytest.mark.integration @@ -623,13 +623,13 @@ class TestWorkforceErrorCases: ) with patch('app.service.task.delete_task_lock', side_effect=Exception("Task lock not found")), \ - patch('loguru.logger.error') as mock_log_error: + patch('traceroot.get_logger') as mock_get_logger: # Should handle missing task lock gracefully await workforce.cleanup() - + # Should log the error - mock_log_error.assert_called_once() + mock_get_logger.assert_called_once() def test_workforce_inheritance(self): """Test that Workforce properly inherits from BaseWorkforce.""" diff --git a/backend/utils/__init__.py b/backend/utils/__init__.py new file mode 100644 index 000000000..dd7ee44cc --- /dev/null +++ b/backend/utils/__init__.py @@ -0,0 +1 @@ +# Utils package diff --git a/backend/uv.lock b/backend/uv.lock index cfe0b2776..5036acaab 100644 --- a/backend/uv.lock +++ b/backend/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 2 +revision = 3 requires-python = "==3.10.16" [[package]] @@ -122,6 +122,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c7/d1/69d02ce34caddb0a7ae088b84c356a625a93cd4ff57b2f97644c03fad905/asgiref-3.9.2-py3-none-any.whl", hash = "sha256:0b61526596219d70396548fc003635056856dba5d0d086f86476f10b33c75960", size = 23788, upload-time = "2025-09-23T15:00:53.627Z" }, ] +[[package]] +name = "astor" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/21/75b771132fee241dfe601d39ade629548a9626d1d39f333fde31bc46febe/astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e", size = 35090, upload-time = "2019-12-10T01:50:35.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/88/97eef84f48fa04fbd6750e62dcceafba6c63c81b7ac1420856c8dcc0a3f9/astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5", size = 27488, upload-time = "2019-12-10T01:50:33.628Z" }, +] + [[package]] name = "async-timeout" version = "5.0.1" @@ -219,7 +228,6 @@ dependencies = [ { name = "fastapi-babel" }, { name = "httpx", extra = ["socks"] }, { name = "inflection" }, - { name = "loguru" }, { name = "nodejs-wheel" }, { name = "numpy" }, { name = "openai" }, @@ -240,19 +248,18 @@ dev = [ [package.metadata] requires-dist = [ { name = "aiofiles", specifier = ">=24.1.0" }, - { name = "camel-ai", extras = ["eigent"], specifier = "==0.2.76a13" }, + { name = "camel-ai", extras = ["eigent"], specifier = "==0.2.78" }, { name = "fastapi", specifier = ">=0.115.12" }, { name = "fastapi-babel", specifier = ">=1.0.0" }, { name = "httpx", extras = ["socks"], specifier = ">=0.28.1" }, { name = "inflection", specifier = ">=0.5.1" }, - { name = "loguru", specifier = ">=0.7.3" }, { name = "nodejs-wheel", specifier = ">=22.18.0" }, { name = "numpy", specifier = ">=1.23.0,<2.0.0" }, { name = "openai", specifier = ">=1.99.3,<2" }, { name = "pydantic-i18n", specifier = ">=0.4.5" }, { name = "pydash", specifier = ">=8.0.5" }, { name = "python-dotenv", specifier = ">=1.1.0" }, - { name = "traceroot", specifier = ">=0.0.5a2" }, + { name = "traceroot", specifier = ">=0.0.7" }, { name = "uvicorn", extras = ["standard"], specifier = ">=0.34.2" }, ] @@ -324,9 +331,10 @@ wheels = [ [[package]] name = "camel-ai" -version = "0.2.76a13" +version = "0.2.78" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "astor" }, { name = "colorama" }, { name = "docstring-parser" }, { name = "httpx" }, @@ -339,9 +347,9 @@ dependencies = [ { name = "tiktoken" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/7c/0145edf0307e360557917de28691eb0c41b36b017a28c6b67e58a729a6da/camel_ai-0.2.76a13.tar.gz", hash = "sha256:487570c36a39a333ae8000783babd5a82350a829aaa8aa2ae712470b596cafe1", size = 950278, upload-time = "2025-10-06T06:09:46.064Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/2b/cd5181bfd0ebcf567a088ee5c1e3768b132ba4b1489ee19d5fb0bd679586/camel_ai-0.2.78.tar.gz", hash = "sha256:24745da225da7da96dcd85f72d143c6104569c17f14280c369d7e82b86851284", size = 964632, upload-time = "2025-10-15T17:20:54.181Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/46/9886106669491737631178830bce79bd7bf63391db4d2200f645089dd9df/camel_ai-0.2.76a13-py3-none-any.whl", hash = "sha256:b860412e4a5b5fc31b0cc3d4b1eeefcd02382d9a5aced252856a1eff0285a97b", size = 1400549, upload-time = "2025-10-06T06:09:43.291Z" }, + { url = "https://files.pythonhosted.org/packages/01/81/0cfb1c0d9da589665e2eb4471887967e70bba428638c37fb4f6a78baf300/camel_ai-0.2.78-py3-none-any.whl", hash = "sha256:356624da13dfe0c55ef43dc509c18ce029f67fe3997966495a4ce9be931078d5", size = 1415578, upload-time = "2025-10-15T17:20:51.727Z" }, ] [package.optional-dependencies] @@ -1153,19 +1161,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, ] -[[package]] -name = "loguru" -version = "0.7.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "win32-setctime", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" }, -] - [[package]] name = "lxml" version = "6.0.2" @@ -2528,7 +2523,7 @@ wheels = [ [[package]] name = "traceroot" -version = "0.0.5" +version = "0.0.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "opentelemetry-api" }, @@ -2549,9 +2544,9 @@ dependencies = [ { name = "pyyaml" }, { name = "watchtower" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/45/5e/8ade61cadecf69b4fa49205640a7424880bc25b5e9615159ba8cf4aff2bf/traceroot-0.0.5.tar.gz", hash = "sha256:0924d9b524a9e59d64c4eec4c812018f2d7583558de17001294ace96874381c0", size = 28066, upload-time = "2025-08-24T03:29:04.966Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/c0/9f047cc761a9f98a2e7a9a8fef4c01ea1eeb7b2383fe1f3ad82d24ac98b3/traceroot-0.0.7.tar.gz", hash = "sha256:7792def0bb466977318f0126756c02e8950a1c208bcec7a8efed1e05e02b189d", size = 25710, upload-time = "2025-10-16T06:17:39.587Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c9/d4/2777d7c3d0e36b3b2d9f903151991f3e1c21f190788c0e5f537fd762ae34/traceroot-0.0.5-py3-none-any.whl", hash = "sha256:ec27afb4ac33df3109c4c436f3bdfc47e30e2f1ce5eb90ba2215cfebe19e6b2e", size = 24324, upload-time = "2025-08-24T03:29:03.446Z" }, + { url = "https://files.pythonhosted.org/packages/45/59/8593afb3615fb0c2e0cf6888dc49d9ae05d365c76ee053f43a36519f889c/traceroot-0.0.7-py3-none-any.whl", hash = "sha256:2a20a8e2dfa6b10e1f96bc98d5b84dc40c14c01d47098f86068393ece99a2862", size = 24026, upload-time = "2025-10-16T06:17:38.573Z" }, ] [[package]] @@ -2734,15 +2729,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/67/35/25e68fbc99e672127cc6fbb14b8ec1ba3dfef035bf1e4c90f78f24a80b7d/wikipedia-1.4.0.tar.gz", hash = "sha256:db0fad1829fdd441b1852306e9856398204dc0786d2996dd2e0c8bb8e26133b2", size = 27748, upload-time = "2014-11-15T15:59:49.808Z" } -[[package]] -name = "win32-setctime" -version = "1.2.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" }, -] - [[package]] name = "wrapt" version = "1.17.3" diff --git a/components.json b/components.json index 1d282e640..ec15bf398 100644 --- a/components.json +++ b/components.json @@ -10,6 +10,7 @@ "cssVariables": true, "prefix": "" }, + "iconLibrary": "lucide", "aliases": { "components": "@/components", "utils": "@/lib/utils", @@ -17,5 +18,7 @@ "lib": "@/lib", "hooks": "@/hooks" }, - "iconLibrary": "lucide" -} \ No newline at end of file + "registries": { + "@animate-ui": "https://animate-ui.com/r/{name}.json" + } +} diff --git a/electron-builder.json b/electron-builder.json index 69bec5117..3b0752ac4 100644 --- a/electron-builder.json +++ b/electron-builder.json @@ -12,6 +12,10 @@ "from": "backend", "to": "backend", "filter": ["**/*", "!.venv/**/*"] + }, + { + "from": "utils", + "to": "utils" } ], "protocols": [ diff --git a/electron/main/fileReader.ts b/electron/main/fileReader.ts index 99bb22198..82905e383 100644 --- a/electron/main/fileReader.ts +++ b/electron/main/fileReader.ts @@ -541,12 +541,54 @@ export class FileReader { } } - public getFileList(email: string, taskId: string): FileInfo[] { + private findTaskInProjects(userDir: string, taskId: string): string | null { + try { + if (!fs.existsSync(userDir)) { + return null; + } + const entries = fs.readdirSync(userDir); + + // Look for project directories + for (const entry of entries) { + if (entry.startsWith('project_')) { + const projectDir = path.join(userDir, entry); + const taskDir = path.join(projectDir, `task_${taskId}`); + + if (fs.existsSync(taskDir)) { + return taskDir; + } + } + } + + return null; + } catch (err) { + console.error("Error finding task in projects:", err); + return null; + } + } + + public getFileList(email: string, taskId: string, projectId?: string): FileInfo[] { const safeEmail = email.split('@')[0].replace(/[\\/*?:"<>|\s]/g, "_").replace(/^\.+|\.+$/g, ""); - const userHome = app.getPath('home'); - const dirPath = path.join(userHome, "eigent", safeEmail, `task_${taskId}`); + + let dirPath: string; + + // Check if projectId is provided for new project-based structure + if (projectId) { + dirPath = path.join(userHome, "eigent", safeEmail, `project_${projectId}`, `task_${taskId}`); + } else { + // First try project-based structure (scan for existing projects) + const userDir = path.join(userHome, "eigent", safeEmail); + const projectBasedPath = this.findTaskInProjects(userDir, taskId); + + if (projectBasedPath) { + dirPath = projectBasedPath; + } else { + // Fallback to legacy direct task structure + dirPath = path.join(userHome, "eigent", safeEmail, `task_${taskId}`); + } + } try { if (!fs.existsSync(dirPath)) { @@ -559,10 +601,62 @@ export class FileReader { return []; } } - public getLogFolder(email: string): string { + public deleteTaskFiles(email: string, taskId: string, projectId?: string): { + success: boolean; + path: { dirPath: string; logPath: string } + } + { const safeEmail = email.split('@')[0].replace(/[\\/*?:"<>|\s]/g, "_").replace(/^\.+|\.+$/g, ""); + const userHome = app.getPath('home'); + + let dirPath: string; + let logPath: string; + + // Check if projectId is provided for new project-based structure + if (projectId) { + dirPath = path.join(userHome, "eigent", safeEmail, `project_${projectId}`, `task_${taskId}`); + logPath = path.join(userHome, ".eigent", safeEmail, `project_${projectId}`, `task_${taskId}`); + } else { + // First try project-based structure + const userDir = path.join(userHome, "eigent", safeEmail); + const projectBasedPath = this.findTaskInProjects(userDir, taskId); + + if (projectBasedPath) { + dirPath = projectBasedPath; + // Extract project from path to construct log path + const projectMatch = projectBasedPath.match(/project_([^\\\/]+)/); + if (projectMatch) { + logPath = path.join(userHome, ".eigent", safeEmail, projectMatch[0], `task_${taskId}`); + } else { + logPath = path.join(userHome, ".eigent", safeEmail, `task_${taskId}`); + } + } else { + // Fallback to legacy direct task structure + dirPath = path.join(userHome, "eigent", safeEmail, `task_${taskId}`); + logPath = path.join(userHome, ".eigent", safeEmail, `task_${taskId}`); + } + } + try { + let success = false; + if (fs.existsSync(dirPath)) { + fs.rmSync(dirPath, { recursive: true, force: true }); + success = true; + } + if (fs.existsSync(logPath)) { + fs.rmSync(logPath, { recursive: true, force: true }); + success = true; + } + return { success, path: { dirPath, logPath } }; + } catch (err) { + console.error("Delete task files failed:", dirPath, err); + return { success: false, path: { dirPath, logPath } }; + } + } + + public getLogFolder(email: string): string { + const safeEmail = email.split('@')[0].replace(/[\\/*?:"<>|\s]/g, "_").replace(/^\.+|\.+$/g, ""); const userHome = app.getPath('home'); const dirPath = path.join(userHome, "eigent", safeEmail); @@ -577,5 +671,205 @@ export class FileReader { return ''; } } + + public createProjectStructure(email: string, projectId: string): { success: boolean; path: string } { + const safeEmail = email.split('@')[0].replace(/[\\/*?:"<>|\s]/g, "_").replace(/^\.+|\.+$/g, ""); + const userHome = app.getPath('home'); + const projectPath = path.join(userHome, "eigent", safeEmail, `project_${projectId}`); + + try { + if (!fs.existsSync(projectPath)) { + fs.mkdirSync(projectPath, { recursive: true }); + } + return { success: true, path: projectPath }; + } catch (err) { + console.error("Create project structure failed:", err); + return { success: false, path: projectPath }; + } + } + + public getProjectList(email: string): Array<{ id: string; name: string; path: string; taskCount: number; createdAt: Date }> { + const safeEmail = email.split('@')[0].replace(/[\\/*?:"<>|\s]/g, "_").replace(/^\.+|\.+$/g, ""); + const userHome = app.getPath('home'); + const userDir = path.join(userHome, "eigent", safeEmail); + + try { + if (!fs.existsSync(userDir)) { + return []; + } + + const entries = fs.readdirSync(userDir); + const projects: Array<{ id: string; name: string; path: string; taskCount: number; createdAt: Date }> = []; + + for (const entry of entries) { + if (entry.startsWith('project_')) { + const projectPath = path.join(userDir, entry); + const stats = fs.statSync(projectPath); + + if (stats.isDirectory()) { + const projectId = entry.replace('project_', ''); + + // Count tasks in this project + const taskCount = this.countTasksInProject(projectPath); + + projects.push({ + id: projectId, + name: `Project ${projectId}`, + path: projectPath, + taskCount, + createdAt: stats.birthtime + }); + } + } + } + + return projects.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime()); + } catch (err) { + console.error("Get project list failed:", err); + return []; + } + } + + public getTasksInProject(email: string, projectId: string): Array<{ id: string; name: string; path: string; createdAt: Date }> { + const safeEmail = email.split('@')[0].replace(/[\\/*?:"<>|\s]/g, "_").replace(/^\.+|\.+$/g, ""); + const userHome = app.getPath('home'); + const projectPath = path.join(userHome, "eigent", safeEmail, `project_${projectId}`); + + try { + if (!fs.existsSync(projectPath)) { + return []; + } + + const entries = fs.readdirSync(projectPath); + const tasks: Array<{ id: string; name: string; path: string; createdAt: Date }> = []; + + for (const entry of entries) { + if (entry.startsWith('task_')) { + const taskPath = path.join(projectPath, entry); + const stats = fs.statSync(taskPath); + + if (stats.isDirectory()) { + const taskId = entry.replace('task_', ''); + + tasks.push({ + id: taskId, + name: `Task ${taskId}`, + path: taskPath, + createdAt: stats.birthtime + }); + } + } + } + + return tasks.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime()); + } catch (err) { + console.error("Get tasks in project failed:", err); + return []; + } + } + + public moveTaskToProject(email: string, taskId: string, projectId: string): { success: boolean; message: string } { + const safeEmail = email.split('@')[0].replace(/[\\/*?:"<>|\s]/g, "_").replace(/^\.+|\.+$/g, ""); + const userHome = app.getPath('home'); + + // Source path (legacy structure) + const sourcePath = path.join(userHome, "eigent", safeEmail, `task_${taskId}`); + const sourceLogPath = path.join(userHome, ".eigent", safeEmail, `task_${taskId}`); + + // Destination paths (project structure) + const projectPath = path.join(userHome, "eigent", safeEmail, `project_${projectId}`); + const destPath = path.join(projectPath, `task_${taskId}`); + const destLogPath = path.join(userHome, ".eigent", safeEmail, `project_${projectId}`, `task_${taskId}`); + + try { + // Create project structure if it doesn't exist + if (!fs.existsSync(projectPath)) { + fs.mkdirSync(projectPath, { recursive: true }); + } + + // Create destination log directory + const destLogDir = path.dirname(destLogPath); + if (!fs.existsSync(destLogDir)) { + fs.mkdirSync(destLogDir, { recursive: true }); + } + + // Move task files + if (fs.existsSync(sourcePath)) { + fs.renameSync(sourcePath, destPath); + } + + // Move log files + if (fs.existsSync(sourceLogPath)) { + fs.renameSync(sourceLogPath, destLogPath); + } + + return { success: true, message: `Task ${taskId} moved to project ${projectId}` }; + } catch (err) { + console.error("Move task to project failed:", err); + return { success: false, message: `Failed to move task: ${err}` }; + } + } + + public getProjectFileList(email: string, projectId: string): FileInfo[] { + const safeEmail = email.split('@')[0].replace(/[\\/*?:"<>|\s]/g, "_").replace(/^\.+|\.+$/g, ""); + const userHome = app.getPath('home'); + const projectPath = path.join(userHome, "eigent", safeEmail, `project_${projectId}`); + + try { + if (!fs.existsSync(projectPath)) { + return []; + } + + const allFiles: FileInfo[] = []; + const taskDirs = fs.readdirSync(projectPath); + + for (const taskDir of taskDirs) { + if (!taskDir.startsWith('task_')) continue; + + const taskPath = path.join(projectPath, taskDir); + const stats = fs.statSync(taskPath); + + if (stats.isDirectory()) { + const taskId = taskDir.replace('task_', ''); + const taskFiles = this.getFilesRecursive(taskPath, taskPath); + + const enrichedFiles = taskFiles.map(file => { + const fileDir = path.dirname(file.path); + const relativeParentPath = path.relative(projectPath, fileDir); + + return { + ...file, + task_id: taskId, + project_id: projectId, + relativePath: relativeParentPath === '.' ? '' : relativeParentPath + }; + }); + + allFiles.push(...enrichedFiles); + } + } + + return allFiles.sort((a, b) => { + // Sort by task_id first, then by file path + if (a.task_id !== b.task_id) { + return a.task_id!.localeCompare(b.task_id!); + } + return a.path.localeCompare(b.path); + }); + } catch (err) { + console.error("Get project file list failed:", err); + return []; + } + } + + private countTasksInProject(projectPath: string): number { + try { + const entries = fs.readdirSync(projectPath); + return entries.filter(entry => entry.startsWith('task_')).length; + } catch (err) { + console.error("Count tasks in project failed:", err); + return 0; + } + } } diff --git a/electron/main/index.ts b/electron/main/index.ts index 057ac8860..7c87795a2 100644 --- a/electron/main/index.ts +++ b/electron/main/index.ts @@ -40,6 +40,10 @@ let python_process: ChildProcessWithoutNullStreams | null = null; let backendPort: number = 5001; let browser_port = 9222; +// Protocol URL queue for handling URLs before window is ready +let protocolUrlQueue: string[] = []; +let isWindowReady = false; + // ==================== path config ==================== const preload = path.join(__dirname, '../preload/index.mjs'); const indexHtml = path.join(RENDERER_DIST, 'index.html'); @@ -97,6 +101,19 @@ const setupProtocolHandlers = () => { // ==================== protocol url handle ==================== function handleProtocolUrl(url: string) { log.info('enter handleProtocolUrl', url); + + // If window is not ready, queue the URL + if (!isWindowReady || !win || win.isDestroyed()) { + log.info('Window not ready, queuing protocol URL:', url); + protocolUrlQueue.push(url); + return; + } + + processProtocolUrl(url); +} + +// Process a single protocol URL +function processProtocolUrl(url: string) { const urlObj = new URL(url); const code = urlObj.searchParams.get('code'); const share_token = urlObj.searchParams.get('share_token'); @@ -130,6 +147,26 @@ function handleProtocolUrl(url: string) { } } +// Process all queued protocol URLs +function processQueuedProtocolUrls() { + if (protocolUrlQueue.length > 0) { + log.info('Processing queued protocol URLs:', protocolUrlQueue.length); + + // Verify window is ready before processing + if (!win || win.isDestroyed() || !isWindowReady) { + log.warn('Window not ready for processing queued URLs, keeping URLs in queue'); + return; + } + + const urls = [...protocolUrlQueue]; + protocolUrlQueue = []; + + urls.forEach(url => { + processProtocolUrl(url); + }); + } +} + // ==================== single instance lock ==================== const setupSingleInstanceLock = () => { const gotLock = app.requestSingleInstanceLock(); @@ -207,7 +244,7 @@ const checkManagerInstance = (manager: any, name: string) => { function registerIpcHandlers() { // ==================== basic info handler ==================== ipcMain.handle('get-browser-port', () => { - log.info('Starting new task') + log.info('Getting browser port') return browser_port }); ipcMain.handle('get-app-version', () => app.getVersion()); @@ -609,6 +646,13 @@ function registerIpcHandlers() { return { success: false, error: 'File does not exist' }; } + // Check if it's a directory + const stats = await fsp.stat(filePath); + if (stats.isDirectory()) { + log.error('Path is a directory, not a file:', filePath); + return { success: false, error: 'Path is a directory, not a file' }; + } + // Read file content const fileContent = await fsp.readFile(filePath); log.info('File read successfully:', filePath); @@ -802,9 +846,40 @@ function registerIpcHandlers() { } }); - ipcMain.handle('get-file-list', async (_, email: string, taskId: string) => { + ipcMain.handle('get-file-list', async (_, email: string, taskId: string, projectId?: string) => { const manager = checkManagerInstance(fileReader, 'FileReader'); - return manager.getFileList(email, taskId); + return manager.getFileList(email, taskId, projectId); + }); + + ipcMain.handle('delete-task-files', async (_, email: string, taskId: string, projectId?: string) => { + const manager = checkManagerInstance(fileReader, 'FileReader'); + return manager.deleteTaskFiles(email, taskId, projectId); + }); + + // New project management handlers + ipcMain.handle('create-project-structure', async (_, email: string, projectId: string) => { + const manager = checkManagerInstance(fileReader, 'FileReader'); + return manager.createProjectStructure(email, projectId); + }); + + ipcMain.handle('get-project-list', async (_, email: string) => { + const manager = checkManagerInstance(fileReader, 'FileReader'); + return manager.getProjectList(email); + }); + + ipcMain.handle('get-tasks-in-project', async (_, email: string, projectId: string) => { + const manager = checkManagerInstance(fileReader, 'FileReader'); + return manager.getTasksInProject(email, projectId); + }); + + ipcMain.handle('move-task-to-project', async (_, email: string, taskId: string, projectId: string) => { + const manager = checkManagerInstance(fileReader, 'FileReader'); + return manager.moveTaskToProject(email, taskId, projectId); + }); + + ipcMain.handle('get-project-file-list', async (_, email: string, projectId: string) => { + const manager = checkManagerInstance(fileReader, 'FileReader'); + return manager.getProjectFileList(email, projectId); }); ipcMain.handle('get-log-folder', async (_, email: string) => { @@ -1102,6 +1177,11 @@ async function createWindow() { }); }); + // Mark window as ready and process any queued protocol URLs + isWindowReady = true; + log.info('Window is ready, processing queued protocol URLs...'); + processQueuedProtocolUrls(); + // Now check and install dependencies let res:PromiseReturnType = await checkAndInstallDepsOnUpdate({ win }); if (!res.success) { @@ -1334,7 +1414,10 @@ app.on('window-all-closed', () => { webViewManager = null; } + // Reset window state win = null; + isWindowReady = false; + protocolUrlQueue = []; if (process.platform !== 'darwin') { app.quit(); @@ -1387,6 +1470,10 @@ app.on('before-quit', async (event) => { global.gc(); } + // Reset protocol handling state + isWindowReady = false; + protocolUrlQueue = []; + log.info('All cleanup completed, exiting...'); } catch (error) { log.error('Error during cleanup:', error); diff --git a/electron/main/install-deps.ts b/electron/main/install-deps.ts index f4b8cd2fe..c475ab94b 100644 --- a/electron/main/install-deps.ts +++ b/electron/main/install-deps.ts @@ -230,10 +230,8 @@ class InstallLogs { /**Display filtered logs based on severity */ displayFilteredLogs(data:String) { - if (!data) return; + if (!data) return; const msg = data.toString().trimEnd(); - //Detect if uv sync is run - detectInstallationLogs(msg); if (msg.toLowerCase().includes("error") || msg.toLowerCase().includes("traceback")) { log.error(`BACKEND: [DEPS INSTALL] ${msg}`); safeMainWindowSend('install-dependencies-log', { type: 'stderr', data: data.toString() }); @@ -358,6 +356,29 @@ export async function installDependencies(version: string): Promise name.startsWith('python')); + if (pythonDir) { + sitePackagesPath = path.join(libPath, pythonDir, 'site-packages'); + } + } + + if (sitePackagesPath) { + const npmMarkerPath = path.join(sitePackagesPath, 'camel', 'toolkits', 'hybrid_browser_toolkit', 'ts', '.npm_dependencies_installed'); + if (fs.existsSync(npmMarkerPath)) { + fs.unlinkSync(npmMarkerPath); + log.info('[DEPS INSTALL] Removed npm dependencies marker for fresh installation'); + } + } + } catch (error) { + log.warn('[DEPS INSTALL] Could not clean npm marker file:', error); + // Non-critical, continue + } + // try default install const installSuccess = await runInstall([], version) if (installSuccess.success) { @@ -592,6 +646,24 @@ export async function installDependencies(version: string): Promise + if (!dependencyInstallationDetected && installPatterns.some(pattern => msg.includes(pattern) && !msg.includes("Uvicorn running on") )) { dependencyInstallationDetected = true; log.info('[BACKEND STARTUP] UV dependency installation detected during uvicorn startup'); - + // Create installing lock file to maintain consistency with install-deps.ts InstallLogs.setLockPath(); log.info('[BACKEND STARTUP] Created uv_installing.lock file'); - + // Notify frontend that installation has started (only once) if (!installationNotificationSent) { installationNotificationSent = true; diff --git a/electron/main/webview.ts b/electron/main/webview.ts index 8036dda6d..8e686e6eb 100644 --- a/electron/main/webview.ts +++ b/electron/main/webview.ts @@ -64,6 +64,9 @@ export class WebViewManager { } const view = new WebContentsView({ webPreferences: { + // Use a separate session partition for webviews to isolate storage from main window + // This ensures clearing webview storage won't affect main window's auth data + partition: 'persist:agent-webview', nodeIntegration: false, contextIsolation: true, backgroundThrottling: true, @@ -269,6 +272,7 @@ export class WebViewManager { if (!webViewInfo.view.webContents.isDestroyed()) { webViewInfo.view.webContents.removeAllListeners() + // Now safe to clear all storage since webviews use separate partition webViewInfo.view.webContents.session.clearCache() webViewInfo.view.webContents.session.clearStorageData({ storages: ['cookies', 'localstorage', 'websql', 'indexdb', 'serviceworkers', 'cachestorage'] diff --git a/package.json b/package.json index 42fafd1ff..f95e834b0 100644 --- a/package.json +++ b/package.json @@ -14,7 +14,8 @@ "type": "module", "scripts": { "compile-babel": "cd backend && uv run pybabel compile -d lang", - "dev": "npm run compile-babel && vite", + "clean-cache": "rimraf node_modules/.vite", + "dev": "npm run clean-cache && npm run compile-babel && vite", "build": "npm run compile-babel && tsc && vite build && electron-builder -- --publish always", "build:mac": "npm run compile-babel && tsc && vite build && electron-builder --mac", "build:win": "npm run compile-babel && tsc && vite build && electron-builder --win", @@ -30,6 +31,7 @@ "dependencies": { "@electron/notarize": "^2.5.0", "@fontsource/inter": "^5.2.5", + "@gsap/react": "^2.1.2", "@microsoft/fetch-event-source": "^2.0.1", "@monaco-editor/loader": "^1.5.0", "@monaco-editor/react": "^4.7.0", @@ -71,8 +73,10 @@ "lucide-react": "^0.509.0", "mammoth": "^1.9.1", "monaco-editor": "^0.52.2", + "motion": "^12.23.23", "next-themes": "^0.4.6", "papaparse": "^5.5.3", + "postprocessing": "^6.37.8", "react-markdown": "^10.1.0", "react-resizable-panels": "^3.0.4", "react-router-dom": "^7.6.0", @@ -81,6 +85,7 @@ "tailwind-merge": "^3.3.0", "tailwindcss-animate": "^1.0.7", "tar": "^7.4.3", + "three": "^0.180.0", "tree-kill": "^1.2.2", "tw-animate-css": "^1.2.9", "unzipper": "^0.12.3", @@ -112,6 +117,7 @@ "react": "^18.3.1", "react-dom": "^18.3.1", "react-i18next": "^15.7.3", + "rimraf": "^6.0.1", "tailwindcss": "^3.4.15", "typescript": "^5.4.2", "vite": "^5.4.11", diff --git a/server/.env.example b/server/.env.example index 76637a8ef..d58015f48 100644 --- a/server/.env.example +++ b/server/.env.example @@ -5,3 +5,5 @@ database_url=postgresql://postgres:postgres@localhost:5432/postgres # Chat Share Secret Key CHAT_SHARE_SECRET_KEY=put-your-secret-key-here CHAT_SHARE_SALT=put-your-encode-salt-here + + diff --git a/server/Dockerfile b/server/Dockerfile index e8fb2b8f3..17ff0e2d3 100644 --- a/server/Dockerfile +++ b/server/Dockerfile @@ -1,5 +1,5 @@ # Use a Python image with uv pre-installed -FROM ghcr.io/astral-sh/uv:python3.13-bookworm-slim +FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim # Install the project into `/app` WORKDIR /app @@ -15,6 +15,10 @@ ENV UV_PYTHON_INSTALL_MIRROR=https://registry.npmmirror.com/-/binary/python-buil ARG database_url ENV database_url=$database_url +RUN apt-get update && apt-get install -y \ + gcc \ + python3-dev \ + && rm -rf /var/lib/apt/lists/* # Copy dependency files first COPY pyproject.toml uv.lock ./ diff --git a/server/alembic/versions/2025_10_15_1446-eec7242b3a9b_modify_chat_history_add_project_id.py b/server/alembic/versions/2025_10_15_1446-eec7242b3a9b_modify_chat_history_add_project_id.py new file mode 100644 index 000000000..aaa7626dc --- /dev/null +++ b/server/alembic/versions/2025_10_15_1446-eec7242b3a9b_modify_chat_history_add_project_id.py @@ -0,0 +1,36 @@ +"""modify_chat_history_add_project_id + +Revision ID: eec7242b3a9b +Revises: d74ab2a44600 +Create Date: 2025-10-15 14:46:47.904254 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes + + +# revision identifiers, used by Alembic. +revision: str = "eec7242b3a9b" +down_revision: Union[str, None] = "d74ab2a44600" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.add_column("chat_history", sa.Column("project_id", sqlmodel.sql.sqltypes.AutoString(), nullable=True)) + op.create_index(op.f("ix_chat_history_project_id"), "chat_history", ["project_id"], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("ix_chat_history_project_id"), table_name="chat_history") + op.drop_column("chat_history", "project_id") + # ### end Alembic commands ### diff --git a/server/app/__init__.py b/server/app/__init__.py index 101f7685b..08a45184b 100644 --- a/server/app/__init__.py +++ b/server/app/__init__.py @@ -1,5 +1,6 @@ from fastapi import FastAPI from fastapi_pagination import add_pagination + api = FastAPI(swagger_ui_parameters={"persistAuthorization": True}) add_pagination(api) diff --git a/server/app/controller/chat/history_controller.py b/server/app/controller/chat/history_controller.py index a3ab6a220..f58b29995 100644 --- a/server/app/controller/chat/history_controller.py +++ b/server/app/controller/chat/history_controller.py @@ -6,47 +6,95 @@ from fastapi_babel import _ from sqlmodel import Session, select, desc from app.component.auth import Auth, auth_must from app.component.database import session +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("server_chat_history") router = APIRouter(prefix="/chat", tags=["Chat History"]) @router.post("/history", name="save chat history", response_model=ChatHistoryOut) +@traceroot.trace() def create_chat_history(data: ChatHistoryIn, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - data.user_id = auth.user.id - chat_history = ChatHistory(**data.model_dump()) - session.add(chat_history) - session.commit() - session.refresh(chat_history) - return chat_history + """Save new chat history.""" + user_id = auth.user.id + + try: + data.user_id = user_id + chat_history = ChatHistory(**data.model_dump()) + session.add(chat_history) + session.commit() + session.refresh(chat_history) + logger.info("Chat history created", extra={"user_id": user_id, "history_id": chat_history.id, "task_id": data.task_id}) + return chat_history + except Exception as e: + session.rollback() + logger.error("Chat history creation failed", extra={"user_id": user_id, "task_id": data.task_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") @router.get("/histories", name="get chat history") +@traceroot.trace() def list_chat_history(session: Session = Depends(session), auth: Auth = Depends(auth_must)) -> Page[ChatHistoryOut]: - stmt = select(ChatHistory).where(ChatHistory.user_id == auth.user.id).order_by(desc(ChatHistory.created_at)) - return paginate(session, stmt) + """List chat histories for current user.""" + user_id = auth.user.id + stmt = select(ChatHistory).where(ChatHistory.user_id == user_id).order_by(desc(ChatHistory.created_at)) + result = paginate(session, stmt) + total = result.total if hasattr(result, 'total') else 0 + logger.debug("Chat histories listed", extra={"user_id": user_id, "total": total}) + return result @router.delete("/history/{history_id}", name="delete chat history") -def delete_chat_history(history_id: str, session: Session = Depends(session)): +@traceroot.trace() +def delete_chat_history(history_id: str, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Delete chat history.""" + user_id = auth.user.id history = session.exec(select(ChatHistory).where(ChatHistory.id == history_id)).first() + if not history: - raise HTTPException(status_code=404, detail="Caht History not found") - session.delete(history) - session.commit() - return Response(status_code=204) + logger.warning("Chat history not found for deletion", extra={"user_id": user_id, "history_id": history_id}) + raise HTTPException(status_code=404, detail="Chat History not found") + + if history.user_id != user_id: + logger.warning("Unauthorized deletion attempt", extra={"user_id": user_id, "history_id": history_id, "owner_id": history.user_id}) + raise HTTPException(status_code=403, detail="You are not allowed to delete this chat history") + + try: + session.delete(history) + session.commit() + logger.info("Chat history deleted", extra={"user_id": user_id, "history_id": history_id}) + return Response(status_code=204) + except Exception as e: + session.rollback() + logger.error("Chat history deletion failed", extra={"user_id": user_id, "history_id": history_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") @router.put("/history/{history_id}", name="update chat history", response_model=ChatHistoryOut) +@traceroot.trace() def update_chat_history( history_id: int, data: ChatHistoryUpdate, session: Session = Depends(session), auth: Auth = Depends(auth_must) ): + """Update chat history.""" + user_id = auth.user.id history = session.exec(select(ChatHistory).where(ChatHistory.id == history_id)).first() + if not history: + logger.warning("Chat history not found for update", extra={"user_id": user_id, "history_id": history_id}) raise HTTPException(status_code=404, detail="Chat History not found") - if history.user_id != auth.user.id: + + if history.user_id != user_id: + logger.warning("Unauthorized update attempt", extra={"user_id": user_id, "history_id": history_id, "owner_id": history.user_id}) raise HTTPException(status_code=403, detail="You are not allowed to update this chat history") - update_data = data.model_dump(exclude_unset=True) - history.update_fields(update_data) - history.save(session) - session.refresh(history) - return history + + try: + update_data = data.model_dump(exclude_unset=True) + history.update_fields(update_data) + history.save(session) + session.refresh(history) + logger.info("Chat history updated", extra={"user_id": user_id, "history_id": history_id, "fields_updated": list(update_data.keys())}) + return history + except Exception as e: + logger.error("Chat history update failed", extra={"user_id": user_id, "history_id": history_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") \ No newline at end of file diff --git a/server/app/controller/chat/share_controller.py b/server/app/controller/chat/share_controller.py index 17a41ae96..bd3372775 100644 --- a/server/app/controller/chat/share_controller.py +++ b/server/app/controller/chat/share_controller.py @@ -1,78 +1,107 @@ -from fastapi import APIRouter, Depends, HTTPException, Response -from sqlmodel import Session, asc, select -from app.component.database import session -import json -import asyncio -from itsdangerous import SignatureExpired, BadTimeSignature -from starlette.responses import StreamingResponse -from app.model.chat.chat_share import ChatHistoryShareOut, ChatShare, ChatShareIn -from app.model.chat.chat_step import ChatStep -from app.model.chat.chat_history import ChatHistory - -router = APIRouter(prefix="/chat", tags=["Chat Share"]) - - -@router.get("/share/info/{token}", name="Get shared chat info", response_model=ChatHistoryShareOut) -def get_share_info(token: str, session: Session = Depends(session)): - """ - Get shared chat history info by token, excluding sensitive data. - """ - try: - task_id = ChatShare.verify_token(token, False) - except (SignatureExpired, BadTimeSignature): - raise HTTPException(status_code=400, detail="Share link is invalid or has expired.") - - stmt = select(ChatHistory).where(ChatHistory.task_id == task_id) - history = session.exec(stmt).one_or_none() - - if not history: - raise HTTPException(status_code=404, detail="Chat history not found.") - - return history - - -@router.get("/share/playback/{token}", name="Playback shared chat via SSE") -async def share_playback(token: str, session: Session = Depends(session), delay_time: float = 0): - """ - Playbacks the chat history via a sharing token (SSE). - delay_time: control sse interval, max 5 seconds - """ - if delay_time > 5: - delay_time = 5 - try: - task_id = ChatShare.verify_token(token, False) - except SignatureExpired: - raise HTTPException(status_code=400, detail="Share link has expired.") - except BadTimeSignature: - raise HTTPException(status_code=400, detail="Share link is invalid.") - - async def event_generator(): - stmt = select(ChatStep).where(ChatStep.task_id == task_id).order_by(asc(ChatStep.id)) - steps = session.exec(stmt).all() - - if not steps: - yield f"data: {json.dumps({'error': 'No steps found for this task.'})}\n\n" - return - - for step in steps: - step_data = { - "id": step.id, - "task_id": step.task_id, - "step": step.step, - "data": step.data, - "created_at": step.created_at.isoformat() if step.created_at else None, - } - yield f"data: {json.dumps(step_data)}\n\n" - if delay_time > 0 and step.step != "create_agent": - await asyncio.sleep(delay_time) - - return StreamingResponse(event_generator(), media_type="text/event-stream") - - -@router.post("/share", name="Generate sharable link for a task(1 day expiration)") -def create_share_link(data: ChatShareIn): - """ - Generates a sharing token with an expiration time for the specified task_id. - """ - share_token = ChatShare.generate_token(data.task_id) - return {"share_token": share_token} +from fastapi import APIRouter, Depends, HTTPException, Response +from sqlmodel import Session, asc, select +from app.component.database import session +import json +import asyncio +from itsdangerous import SignatureExpired, BadTimeSignature +from starlette.responses import StreamingResponse +from app.model.chat.chat_share import ChatHistoryShareOut, ChatShare, ChatShareIn +from app.model.chat.chat_step import ChatStep +from app.model.chat.chat_history import ChatHistory +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("server_chat_share") + +router = APIRouter(prefix="/chat", tags=["Chat Share"]) + + +@router.get("/share/info/{token}", name="Get shared chat info", response_model=ChatHistoryShareOut) +@traceroot.trace() +def get_share_info(token: str, session: Session = Depends(session)): + """ + Get shared chat history info by token, excluding sensitive data. + """ + try: + task_id = ChatShare.verify_token(token, False) + except SignatureExpired: + logger.warning("Shared chat access failed: token expired", extra={"token_prefix": token[:10]}) + raise HTTPException(status_code=400, detail="Share link is invalid or has expired.") + except BadTimeSignature: + logger.warning("Shared chat access failed: invalid token", extra={"token_prefix": token[:10]}) + raise HTTPException(status_code=400, detail="Share link is invalid or has expired.") + + stmt = select(ChatHistory).where(ChatHistory.task_id == task_id) + history = session.exec(stmt).one_or_none() + + if not history: + logger.warning("Shared chat not found", extra={"task_id": task_id}) + raise HTTPException(status_code=404, detail="Chat history not found.") + + logger.info("Shared chat info accessed", extra={"task_id": task_id}) + return history + + +@router.get("/share/playback/{token}", name="Playback shared chat via SSE") +@traceroot.trace() +async def share_playback(token: str, session: Session = Depends(session), delay_time: float = 0): + """ + Playbacks the chat history via a sharing token (SSE). + delay_time: control sse interval, max 5 seconds + """ + if delay_time > 5: + logger.debug("Delay time capped", extra={"requested": delay_time, "capped": 5}) + delay_time = 5 + + try: + task_id = ChatShare.verify_token(token, False) + except SignatureExpired: + logger.warning("Shared chat playback failed: token expired", extra={"token_prefix": token[:10]}) + raise HTTPException(status_code=400, detail="Share link has expired.") + except BadTimeSignature: + logger.warning("Shared chat playback failed: invalid token", extra={"token_prefix": token[:10]}) + raise HTTPException(status_code=400, detail="Share link is invalid.") + + async def event_generator(): + try: + stmt = select(ChatStep).where(ChatStep.task_id == task_id).order_by(asc(ChatStep.id)) + steps = session.exec(stmt).all() + + if not steps: + logger.warning("No steps found for playback", extra={"task_id": task_id}) + yield f"data: {json.dumps({'error': 'No steps found for this task.'})}\n\n" + return + + logger.info("Shared chat playback started", extra={"task_id": task_id, "step_count": len(steps), "delay_time": delay_time}) + + for idx, step in enumerate(steps, start=1): + step_data = { + "id": step.id, + "task_id": step.task_id, + "step": step.step, + "data": step.data, + "created_at": step.created_at.isoformat() if step.created_at else None, + } + yield f"data: {json.dumps(step_data)}\n\n" + + if delay_time > 0 and step.step != "create_agent": + await asyncio.sleep(delay_time) + + logger.info("Shared chat playback completed", extra={"task_id": task_id, "step_count": len(steps)}) + except Exception as e: + logger.error("Shared chat playback error", extra={"task_id": task_id, "error": str(e)}, exc_info=True) + yield f"data: {json.dumps({'error': 'Playback error occurred.'})}\n\n" + + return StreamingResponse(event_generator(), media_type="text/event-stream") + + +@router.post("/share", name="Generate sharable link for a task(1 day expiration)") +@traceroot.trace() +def create_share_link(data: ChatShareIn): + """Generate sharing token with 1-day expiration for task.""" + try: + share_token = ChatShare.generate_token(data.task_id) + logger.info("Share link created", extra={"task_id": data.task_id, "token_prefix": share_token[:10]}) + return {"share_token": share_token} + except Exception as e: + logger.error("Share link creation failed", extra={"task_id": data.task_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") \ No newline at end of file diff --git a/server/app/controller/chat/snapshot_controller.py b/server/app/controller/chat/snapshot_controller.py index 5767746bd..af0115fc2 100644 --- a/server/app/controller/chat/snapshot_controller.py +++ b/server/app/controller/chat/snapshot_controller.py @@ -1,81 +1,138 @@ -from app.model.chat.chat_snpshot import ChatSnapshot, ChatSnapshotIn -from typing import List, Optional -from fastapi import Depends, HTTPException, Response, APIRouter -from sqlmodel import Session, select -from app.component.database import session -from app.component.auth import Auth, auth_must -from fastapi_babel import _ - -router = APIRouter(prefix="/chat", tags=["Chat Snapshot Management"]) - - -@router.get("/snapshots", name="list chat snapshots", response_model=List[ChatSnapshot]) -async def list_chat_snapshots( - api_task_id: Optional[str] = None, - camel_task_id: Optional[str] = None, - browser_url: Optional[str] = None, - session: Session = Depends(session), -): - query = select(ChatSnapshot) - if api_task_id is not None: - query = query.where(ChatSnapshot.api_task_id == api_task_id) - if camel_task_id is not None: - query = query.where(ChatSnapshot.camel_task_id == camel_task_id) - if browser_url is not None: - query = query.where(ChatSnapshot.browser_url == browser_url) - snapshots = session.exec(query).all() - return snapshots - - -@router.get("/snapshots/{snapshot_id}", name="get chat snapshot", response_model=ChatSnapshot) -async def get_chat_snapshot(snapshot_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - snapshot = session.get(ChatSnapshot, snapshot_id) - if not snapshot: - raise HTTPException(status_code=404, detail=_("Chat snapshot not found")) - return snapshot - - -@router.post("/snapshots", name="create chat snapshot", response_model=ChatSnapshot) -async def create_chat_snapshot( - snapshot: ChatSnapshotIn, auth: Auth = Depends(auth_must), session: Session = Depends(session) -): - image_path = ChatSnapshotIn.save_image(auth.user.id, snapshot.api_task_id, snapshot.image_base64) - chat_snapshot = ChatSnapshot( - user_id=auth.user.id, - api_task_id=snapshot.api_task_id, - camel_task_id=snapshot.camel_task_id, - browser_url=snapshot.browser_url, - image_path=image_path, - ) - session.add(chat_snapshot) - session.commit() - session.refresh(chat_snapshot) - return Response(status_code=200) - - -@router.put("/snapshots/{snapshot_id}", name="update chat snapshot", response_model=ChatSnapshot) -async def update_chat_snapshot( - snapshot_id: int, - snapshot_update: ChatSnapshot, - session: Session = Depends(session), - auth: Auth = Depends(auth_must), -): - db_snapshot = session.get(ChatSnapshot, snapshot_id) - if not db_snapshot: - raise HTTPException(status_code=404, detail=_("Chat snapshot not found")) - for key, value in snapshot_update.dict(exclude_unset=True).items(): - setattr(db_snapshot, key, value) - session.add(db_snapshot) - session.commit() - session.refresh(db_snapshot) - return db_snapshot - - -@router.delete("/snapshots/{snapshot_id}", name="delete chat snapshot") -async def delete_chat_snapshot(snapshot_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - db_snapshot = session.get(ChatSnapshot, snapshot_id) - if not db_snapshot: - raise HTTPException(status_code=404, detail=_("Chat snapshot not found")) - session.delete(db_snapshot) - session.commit() - return Response(status_code=204) +from app.model.chat.chat_snpshot import ChatSnapshot, ChatSnapshotIn +from typing import List, Optional +from fastapi import Depends, HTTPException, Response, APIRouter +from sqlmodel import Session, select +from app.component.database import session +from app.component.auth import Auth, auth_must +from fastapi_babel import _ +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("server_chat_snapshot") + +router = APIRouter(prefix="/chat", tags=["Chat Snapshot Management"]) + + +@router.get("/snapshots", name="list chat snapshots", response_model=List[ChatSnapshot]) +@traceroot.trace() +async def list_chat_snapshots( + api_task_id: Optional[str] = None, + camel_task_id: Optional[str] = None, + browser_url: Optional[str] = None, + session: Session = Depends(session), +): + """List chat snapshots with optional filtering.""" + query = select(ChatSnapshot) + if api_task_id is not None: + query = query.where(ChatSnapshot.api_task_id == api_task_id) + if camel_task_id is not None: + query = query.where(ChatSnapshot.camel_task_id == camel_task_id) + if browser_url is not None: + query = query.where(ChatSnapshot.browser_url == browser_url) + + snapshots = session.exec(query).all() + logger.debug("Snapshots listed", extra={"api_task_id": api_task_id, "camel_task_id": camel_task_id, "count": len(snapshots)}) + return snapshots + + +@router.get("/snapshots/{snapshot_id}", name="get chat snapshot", response_model=ChatSnapshot) +@traceroot.trace() +async def get_chat_snapshot(snapshot_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Get specific chat snapshot.""" + user_id = auth.user.id + snapshot = session.get(ChatSnapshot, snapshot_id) + + if not snapshot: + logger.warning("Snapshot not found", extra={"user_id": user_id, "snapshot_id": snapshot_id}) + raise HTTPException(status_code=404, detail=_("Chat snapshot not found")) + + logger.debug("Snapshot retrieved", extra={"user_id": user_id, "snapshot_id": snapshot_id, "api_task_id": snapshot.api_task_id}) + return snapshot + + +@router.post("/snapshots", name="create chat snapshot", response_model=ChatSnapshot) +@traceroot.trace() +async def create_chat_snapshot( + snapshot: ChatSnapshotIn, auth: Auth = Depends(auth_must), session: Session = Depends(session) +): + """Create new chat snapshot from image.""" + user_id = auth.user.id + + try: + image_path = ChatSnapshotIn.save_image(user_id, snapshot.api_task_id, snapshot.image_base64) + chat_snapshot = ChatSnapshot( + user_id=user_id, + api_task_id=snapshot.api_task_id, + camel_task_id=snapshot.camel_task_id, + browser_url=snapshot.browser_url, + image_path=image_path, + ) + session.add(chat_snapshot) + session.commit() + session.refresh(chat_snapshot) + logger.info("Snapshot created", extra={"user_id": user_id, "snapshot_id": chat_snapshot.id, "api_task_id": snapshot.api_task_id, "image_path": image_path}) + return chat_snapshot + except Exception as e: + session.rollback() + logger.error("Snapshot creation failed", extra={"user_id": user_id, "api_task_id": snapshot.api_task_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.put("/snapshots/{snapshot_id}", name="update chat snapshot", response_model=ChatSnapshot) +@traceroot.trace() +async def update_chat_snapshot( + snapshot_id: int, + snapshot_update: ChatSnapshot, + session: Session = Depends(session), + auth: Auth = Depends(auth_must), +): + """Update chat snapshot.""" + user_id = auth.user.id + db_snapshot = session.get(ChatSnapshot, snapshot_id) + + if not db_snapshot: + logger.warning("Snapshot not found for update", extra={"user_id": user_id, "snapshot_id": snapshot_id}) + raise HTTPException(status_code=404, detail=_("Chat snapshot not found")) + + if db_snapshot.user_id != user_id: + logger.warning("Unauthorized snapshot update", extra={"user_id": user_id, "snapshot_id": snapshot_id, "owner_id": db_snapshot.user_id}) + raise HTTPException(status_code=403, detail=_("You are not allowed to update this snapshot")) + + try: + update_data = snapshot_update.dict(exclude_unset=True) + for key, value in update_data.items(): + setattr(db_snapshot, key, value) + session.add(db_snapshot) + session.commit() + session.refresh(db_snapshot) + logger.info("Snapshot updated", extra={"user_id": user_id, "snapshot_id": snapshot_id, "fields_updated": list(update_data.keys())}) + return db_snapshot + except Exception as e: + session.rollback() + logger.error("Snapshot update failed", extra={"user_id": user_id, "snapshot_id": snapshot_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.delete("/snapshots/{snapshot_id}", name="delete chat snapshot") +@traceroot.trace() +async def delete_chat_snapshot(snapshot_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Delete chat snapshot.""" + user_id = auth.user.id + db_snapshot = session.get(ChatSnapshot, snapshot_id) + + if not db_snapshot: + logger.warning("Snapshot not found for deletion", extra={"user_id": user_id, "snapshot_id": snapshot_id}) + raise HTTPException(status_code=404, detail=_("Chat snapshot not found")) + + if db_snapshot.user_id != user_id: + logger.warning("Unauthorized snapshot deletion", extra={"user_id": user_id, "snapshot_id": snapshot_id, "owner_id": db_snapshot.user_id}) + raise HTTPException(status_code=403, detail=_("You are not allowed to delete this snapshot")) + + try: + session.delete(db_snapshot) + session.commit() + logger.info("Snapshot deleted", extra={"user_id": user_id, "snapshot_id": snapshot_id, "image_path": db_snapshot.image_path}) + return Response(status_code=204) + except Exception as e: + session.rollback() + logger.error("Snapshot deletion failed", extra={"user_id": user_id, "snapshot_id": snapshot_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") \ No newline at end of file diff --git a/server/app/controller/chat/step_controller.py b/server/app/controller/chat/step_controller.py index c33639112..21e7199a9 100644 --- a/server/app/controller/chat/step_controller.py +++ b/server/app/controller/chat/step_controller.py @@ -1,105 +1,163 @@ -import asyncio -import json -from typing import List, Optional -from fastapi import Depends, HTTPException, Query, Response, APIRouter -from fastapi.responses import StreamingResponse -from sqlmodel import Session, asc, select -from app.component.database import session -from app.component.auth import Auth, auth_must -from fastapi_babel import _ -from app.model.chat.chat_step import ChatStep, ChatStepOut, ChatStepIn - -router = APIRouter(prefix="/chat", tags=["Chat Step Management"]) - - -@router.get("/steps", name="list chat steps", response_model=List[ChatStepOut]) -async def list_chat_steps( - task_id: str, step: Optional[str] = None, session: Session = Depends(session), auth: Auth = Depends(auth_must) -): - query = select(ChatStep) - if task_id is not None: - query = query.where(ChatStep.task_id == task_id) - if step is not None: - query = query.where(ChatStep.step == step) - chat_steps = session.exec(query).all() - return chat_steps - - -@router.get("/steps/playback/{task_id}", name="Playback Chat Step via SSE") -async def share_playback( - task_id: str, delay_time: float = 0, session: Session = Depends(session), auth: Auth = Depends(auth_must) -): - """ - Playbacks the chat steps (SSE). - """ - if delay_time > 5: - delay_time = 5 - - async def event_generator(): - stmt = select(ChatStep).where(ChatStep.task_id == task_id).order_by(asc(ChatStep.id)) - steps = session.exec(stmt).all() - - if not steps: - yield f"data: {json.dumps({'error': 'No steps found for this task.'})}\n\n" - return - - for step in steps: - step_data = { - "id": step.id, - "task_id": step.task_id, - "step": step.step, - "data": step.data, - "created_at": step.created_at.isoformat() if step.created_at else None, - } - yield f"data: {json.dumps(step_data)}\n\n" - if delay_time > 0: - await asyncio.sleep(delay_time) - - return StreamingResponse(event_generator(), media_type="text/event-stream") - - -@router.get("/steps/{step_id}", name="get chat step", response_model=ChatStepOut) -async def get_chat_step(step_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - chat_step = session.get(ChatStep, step_id) - if not chat_step: - raise HTTPException(status_code=404, detail=_("Chat step not found")) - return chat_step - - -@router.post("/steps", name="create chat step") -# TODO Limit request sources -async def create_chat_step(step: ChatStepIn, session: Session = Depends(session)): - chat_step = ChatStep( - task_id=step.task_id, - step=step.step, - data=step.data, - ) - session.add(chat_step) - session.commit() - session.refresh(chat_step) - return {"code": 200, "msg": "success"} - - -@router.put("/steps/{step_id}", name="update chat step", response_model=ChatStepOut) -async def update_chat_step( - step_id: int, chat_step_update: ChatStep, session: Session = Depends(session), auth: Auth = Depends(auth_must) -): - db_chat_step = session.get(ChatStep, step_id) - if not db_chat_step: - raise HTTPException(status_code=404, detail=_("Chat step not found")) - for key, value in chat_step_update.dict(exclude_unset=True).items(): - setattr(db_chat_step, key, value) - session.add(db_chat_step) - session.commit() - session.refresh(db_chat_step) - return db_chat_step - - -@router.delete("/steps/{step_id}", name="delete chat step") -async def delete_chat_step(step_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - db_chat_step = session.get(ChatStep, step_id) - if not db_chat_step: - raise HTTPException(status_code=404, detail=_("Chat step not found")) - session.delete(db_chat_step) - session.commit() - return Response(status_code=204) +import asyncio +import json +from typing import List, Optional +from fastapi import Depends, HTTPException, Query, Response, APIRouter +from fastapi.responses import StreamingResponse +from sqlmodel import Session, asc, select +from app.component.database import session +from app.component.auth import Auth, auth_must +from fastapi_babel import _ +from app.model.chat.chat_step import ChatStep, ChatStepOut, ChatStepIn +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("server_chat_step") + +router = APIRouter(prefix="/chat", tags=["Chat Step Management"]) + + +@router.get("/steps", name="list chat steps", response_model=List[ChatStepOut]) +@traceroot.trace() +async def list_chat_steps( + task_id: str, step: Optional[str] = None, session: Session = Depends(session), auth: Auth = Depends(auth_must) +): + """List chat steps for a task with optional step type filtering.""" + user_id = auth.user.id + query = select(ChatStep) + if task_id is not None: + query = query.where(ChatStep.task_id == task_id) + if step is not None: + query = query.where(ChatStep.step == step) + + chat_steps = session.exec(query).all() + logger.debug("Chat steps listed", extra={"user_id": user_id, "task_id": task_id, "step_type": step, "count": len(chat_steps)}) + return chat_steps + + +@router.get("/steps/playback/{task_id}", name="Playback Chat Step via SSE") +@traceroot.trace() +async def share_playback( + task_id: str, delay_time: float = 0, session: Session = Depends(session), auth: Auth = Depends(auth_must) +): + """Playback chat steps via SSE stream.""" + user_id = auth.user.id + if delay_time > 5: + logger.debug("Delay time capped", extra={"user_id": user_id, "task_id": task_id, "requested": delay_time, "capped": 5}) + delay_time = 5 + + async def event_generator(): + try: + stmt = select(ChatStep).where(ChatStep.task_id == task_id).order_by(asc(ChatStep.id)) + steps = session.exec(stmt).all() + + if not steps: + logger.warning("No steps found for playback", extra={"user_id": user_id, "task_id": task_id}) + yield f"data: {json.dumps({'error': 'No steps found for this task.'})}\n\n" + return + + logger.info("Chat step playback started", extra={"user_id": user_id, "task_id": task_id, "step_count": len(steps), "delay_time": delay_time}) + + for step in steps: + step_data = { + "id": step.id, + "task_id": step.task_id, + "step": step.step, + "data": step.data, + "created_at": step.created_at.isoformat() if step.created_at else None, + } + yield f"data: {json.dumps(step_data)}\n\n" + if delay_time > 0: + await asyncio.sleep(delay_time) + + + logger.info("Chat step playback completed", extra={"user_id": user_id, "task_id": task_id, "step_count": len(steps)}) + except Exception as e: + logger.error("Chat step playback error", extra={"user_id": user_id, "task_id": task_id, "error": str(e)}, exc_info=True) + yield f"data: {json.dumps({'error': 'Playback error occurred.'})}\n\n" + + return StreamingResponse(event_generator(), media_type="text/event-stream") + + +@router.get("/steps/{step_id}", name="get chat step", response_model=ChatStepOut) +@traceroot.trace() +async def get_chat_step(step_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Get specific chat step.""" + user_id = auth.user.id + chat_step = session.get(ChatStep, step_id) + + if not chat_step: + logger.warning("Chat step not found", extra={"user_id": user_id, "step_id": step_id}) + raise HTTPException(status_code=404, detail=_("Chat step not found")) + + logger.debug("Chat step retrieved", extra={"user_id": user_id, "step_id": step_id, "task_id": chat_step.task_id}) + return chat_step + + +@router.post("/steps", name="create chat step") +@traceroot.trace() +async def create_chat_step(step: ChatStepIn, session: Session = Depends(session)): + """Create new chat step. TODO: Implement request source validation.""" + try: + chat_step = ChatStep( + task_id=step.task_id, + step=step.step, + data=step.data, + ) + session.add(chat_step) + session.commit() + session.refresh(chat_step) + logger.info("Chat step created", extra={"step_id": chat_step.id, "task_id": step.task_id, "step_type": step.step}) + return {"code": 200, "msg": "success"} + except Exception as e: + session.rollback() + logger.error("Chat step creation failed", extra={"task_id": step.task_id, "step_type": step.step, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.put("/steps/{step_id}", name="update chat step", response_model=ChatStepOut) +@traceroot.trace() +async def update_chat_step( + step_id: int, chat_step_update: ChatStep, session: Session = Depends(session), auth: Auth = Depends(auth_must) +): + """Update chat step.""" + user_id = auth.user.id + db_chat_step = session.get(ChatStep, step_id) + + if not db_chat_step: + logger.warning("Chat step not found for update", extra={"user_id": user_id, "step_id": step_id}) + raise HTTPException(status_code=404, detail=_("Chat step not found")) + + try: + update_data = chat_step_update.dict(exclude_unset=True) + for key, value in update_data.items(): + setattr(db_chat_step, key, value) + session.add(db_chat_step) + session.commit() + session.refresh(db_chat_step) + logger.info("Chat step updated", extra={"user_id": user_id, "step_id": step_id, "task_id": db_chat_step.task_id, "fields_updated": list(update_data.keys())}) + return db_chat_step + except Exception as e: + session.rollback() + logger.error("Chat step update failed", extra={"user_id": user_id, "step_id": step_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.delete("/steps/{step_id}", name="delete chat step") +@traceroot.trace() +async def delete_chat_step(step_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Delete chat step.""" + user_id = auth.user.id + db_chat_step = session.get(ChatStep, step_id) + + if not db_chat_step: + logger.warning("Chat step not found for deletion", extra={"user_id": user_id, "step_id": step_id}) + raise HTTPException(status_code=404, detail=_("Chat step not found")) + + try: + session.delete(db_chat_step) + session.commit() + logger.info("Chat step deleted", extra={"user_id": user_id, "step_id": step_id, "task_id": db_chat_step.task_id}) + return Response(status_code=204) + except Exception as e: + session.rollback() + logger.error("Chat step deletion failed", extra={"user_id": user_id, "step_id": step_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") \ No newline at end of file diff --git a/server/app/controller/config/config_controller.py b/server/app/controller/config/config_controller.py index 51ee87d8d..103a343bc 100644 --- a/server/app/controller/config/config_controller.py +++ b/server/app/controller/config/config_controller.py @@ -1,121 +1,172 @@ -from typing import List, Optional -from fastapi import Depends, HTTPException, Query, Response, APIRouter -from sqlmodel import Session, select, or_ -from app.component.database import session -from app.component.auth import Auth, auth_must -from fastapi_babel import _ -from app.model.config.config import Config, ConfigCreate, ConfigUpdate, ConfigInfo, ConfigOut - -router = APIRouter(tags=["Config Management"]) - - -@router.get("/configs", name="list configs", response_model=list[ConfigOut]) -async def list_configs( - config_group: Optional[str] = None, session: Session = Depends(session), auth: Auth = Depends(auth_must) -): - query = select(Config) - user_id = auth.user.id - if user_id is not None: - query = query.where(Config.user_id == user_id) - if config_group is not None: - query = query.where(Config.config_group == config_group) - configs = session.exec(query).all() - return configs - - -@router.get("/configs/{config_id}", name="get config", response_model=ConfigOut) -async def get_config( - config_id: int, - session: Session = Depends(session), - auth: Auth = Depends(auth_must), -): - query = select(Config).where(Config.user_id == auth.user.id) - - if config_id is not None: - query = query.where(Config.id == config_id) - - config = session.exec(query).first() - - if not config: - raise HTTPException(status_code=404, detail=_("Configuration not found")) - return config - - -@router.post("/configs", name="create config", response_model=ConfigOut) -async def create_config(config: ConfigCreate, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - if not ConfigInfo.is_valid_env_var(config.config_group, config.config_name): - raise HTTPException(status_code=400, detail=_("Config Name is valid")) - - # Check if configuration already exists - existing_config = session.exec( - select(Config).where(Config.user_id == auth.user.id, Config.config_name == config.config_name) - ).first() - - if existing_config: - raise HTTPException(status_code=400, detail=_("Configuration already exists for this user")) - - db_config = Config( - user_id=auth.user.id, - config_name=config.config_name, - config_value=config.config_value, - config_group=config.config_group, - ) - session.add(db_config) - session.commit() - session.refresh(db_config) - return db_config - - -@router.put("/configs/{config_id}", name="update config", response_model=ConfigOut) -async def update_config( - config_id: int, config_update: ConfigUpdate, session: Session = Depends(session), auth: Auth = Depends(auth_must) -): - db_config = session.exec(select(Config).where(Config.id == config_id, Config.user_id == auth.user.id)).first() - - if not db_config: - raise HTTPException(status_code=404, detail=_("Configuration not found")) - - # Check if configuration group is valid - if not ConfigInfo.is_valid_env_var(config_update.config_group, config_update.config_name): - raise HTTPException(status_code=400, detail=_("Invalid configuration group")) - - # Check for conflicts with other configurations - existing_config = session.exec( - select(Config).where( - Config.user_id == auth.user.id, - Config.config_name == config_update.config_name, - Config.id != config_id, - ) - ).first() - - if existing_config: - raise HTTPException(status_code=400, detail=_("Configuration already exists for this user")) - - db_config.config_name = config_update.config_name - db_config.config_value = config_update.config_value - - session.add(db_config) - session.commit() - session.refresh(db_config) - return db_config - - -@router.delete("/configs/{config_id}", name="delete config") -async def delete_config(config_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - db_config = session.exec(select(Config).where(Config.id == config_id, Config.user_id == auth.user.id)).first() - - if not db_config: - raise HTTPException(status_code=404, detail=_("Configuration not found")) - session.delete(db_config) - session.commit() - return Response(status_code=204) - - -@router.get("/config/info", name="get config info") -async def get_config_info( - show_all: bool = Query(False, description="Show all config info, including those with empty env_vars"), -): - configs = ConfigInfo.getinfo() - if show_all: - return configs - return {k: v for k, v in configs.items() if v.get("env_vars") and len(v["env_vars"]) > 0} +from typing import List, Optional +from fastapi import Depends, HTTPException, Query, Response, APIRouter +from sqlmodel import Session, select, or_ +from app.component.database import session +from app.component.auth import Auth, auth_must +from fastapi_babel import _ +from app.model.config.config import Config, ConfigCreate, ConfigUpdate, ConfigInfo, ConfigOut +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("server_config_controller") + +router = APIRouter(tags=["Config Management"]) + + +@router.get("/configs", name="list configs", response_model=list[ConfigOut]) +@traceroot.trace() +async def list_configs( + config_group: Optional[str] = None, session: Session = Depends(session), auth: Auth = Depends(auth_must) +): + """List user's configurations with optional group filtering.""" + user_id = auth.user.id + query = select(Config).where(Config.user_id == user_id) + + if config_group is not None: + query = query.where(Config.config_group == config_group) + + configs = session.exec(query).all() + logger.debug("Configs listed", extra={"user_id": user_id, "config_group": config_group, "count": len(configs)}) + return configs + + +@router.get("/configs/{config_id}", name="get config", response_model=ConfigOut) +@traceroot.trace() +async def get_config( + config_id: int, + session: Session = Depends(session), + auth: Auth = Depends(auth_must), +): + query = select(Config).where(Config.user_id == auth.user.id) + + if config_id is not None: + query = query.where(Config.id == config_id) + + config = session.exec(query).first() + + if not config: + logger.warning("Config not found") + raise HTTPException(status_code=404, detail=_("Configuration not found")) + + logger.debug("Config retrieved") + return config + + +@router.post("/configs", name="create config", response_model=ConfigOut) +@traceroot.trace() +async def create_config(config: ConfigCreate, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Create new configuration.""" + user_id = auth.user.id + + if not ConfigInfo.is_valid_env_var(config.config_group, config.config_name): + logger.warning("Config validation failed", extra={"user_id": user_id, "config_group": config.config_group, "config_name": config.config_name}) + raise HTTPException(status_code=400, detail=_("Invalid config name or group")) + + # Check if configuration already exists + existing_config = session.exec( + select(Config).where(Config.user_id == user_id, Config.config_name == config.config_name) + ).first() + + if existing_config: + logger.warning("Config creation failed: already exists", extra={"user_id": user_id, "config_name": config.config_name}) + raise HTTPException(status_code=400, detail=_("Configuration already exists for this user")) + + try: + db_config = Config( + user_id=user_id, + config_name=config.config_name, + config_value=config.config_value, + config_group=config.config_group, + ) + session.add(db_config) + session.commit() + session.refresh(db_config) + logger.info("Config created", extra={"user_id": user_id, "config_id": db_config.id, "config_group": config.config_group, "config_name": config.config_name}) + return db_config + except Exception as e: + session.rollback() + logger.error("Config creation failed", extra={"user_id": user_id, "config_name": config.config_name, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.put("/configs/{config_id}", name="update config", response_model=ConfigOut) +@traceroot.trace() +async def update_config( + config_id: int, config_update: ConfigUpdate, session: Session = Depends(session), auth: Auth = Depends(auth_must) +): + """Update configuration.""" + user_id = auth.user.id + db_config = session.exec(select(Config).where(Config.id == config_id, Config.user_id == user_id)).first() + + if not db_config: + logger.warning("Config not found for update", extra={"user_id": user_id, "config_id": config_id}) + raise HTTPException(status_code=404, detail=_("Configuration not found")) + + # Check if configuration group is valid + if not ConfigInfo.is_valid_env_var(config_update.config_group, config_update.config_name): + logger.warning("Config update validation failed", extra={"user_id": user_id, "config_id": config_id, "config_group": config_update.config_group}) + raise HTTPException(status_code=400, detail=_("Invalid configuration group")) + + # Check for conflicts with other configurations + existing_config = session.exec( + select(Config).where( + Config.user_id == user_id, + Config.config_name == config_update.config_name, + Config.id != config_id, + ) + ).first() + + if existing_config: + logger.warning("Config update failed: duplicate name", extra={"user_id": user_id, "config_id": config_id, "config_name": config_update.config_name}) + raise HTTPException(status_code=400, detail=_("Configuration already exists for this user")) + + try: + db_config.config_name = config_update.config_name + db_config.config_value = config_update.config_value + db_config.config_group = config_update.config_group + session.add(db_config) + session.commit() + session.refresh(db_config) + logger.info("Config updated", extra={"user_id": user_id, "config_id": config_id, "config_group": config_update.config_group}) + return db_config + except Exception as e: + session.rollback() + logger.error("Config update failed", extra={"user_id": user_id, "config_id": config_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.delete("/configs/{config_id}", name="delete config") +@traceroot.trace() +async def delete_config(config_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Delete configuration.""" + user_id = auth.user.id + db_config = session.exec(select(Config).where(Config.id == config_id, Config.user_id == user_id)).first() + + if not db_config: + logger.warning("Config not found for deletion", extra={"user_id": user_id, "config_id": config_id}) + raise HTTPException(status_code=404, detail=_("Configuration not found")) + + try: + session.delete(db_config) + session.commit() + logger.info("Config deleted", extra={"user_id": user_id, "config_id": config_id, "config_name": db_config.config_name}) + return Response(status_code=204) + except Exception as e: + session.rollback() + logger.error("Config deletion failed", extra={"user_id": user_id, "config_id": config_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get("/config/info", name="get config info") +@traceroot.trace() +async def get_config_info( + show_all: bool = Query(False, description="Show all config info, including those with empty env_vars"), +): + """Get available configuration templates and info.""" + configs = ConfigInfo.getinfo() + if show_all: + logger.debug("Config info retrieved", extra={"show_all": True, "count": len(configs)}) + return configs + + filtered = {k: v for k, v in configs.items() if v.get("env_vars") and len(v["env_vars"]) > 0} + logger.debug("Config info retrieved", extra={"show_all": False, "total_count": len(configs), "filtered_count": len(filtered)}) + return filtered \ No newline at end of file diff --git a/server/app/controller/health_controller.py b/server/app/controller/health_controller.py new file mode 100644 index 000000000..6428097cd --- /dev/null +++ b/server/app/controller/health_controller.py @@ -0,0 +1,15 @@ +from fastapi import APIRouter +from pydantic import BaseModel + +router = APIRouter(tags=["Health"]) + + +class HealthResponse(BaseModel): + status: str + service: str + + +@router.get("/health", name="health check", response_model=HealthResponse) +async def health_check(): + """Health check endpoint for monitoring and container orchestration.""" + return HealthResponse(status="ok", service="eigent-server") diff --git a/server/app/controller/mcp/mcp_controller.py b/server/app/controller/mcp/mcp_controller.py index 1a38c7580..72efbdba8 100644 --- a/server/app/controller/mcp/mcp_controller.py +++ b/server/app/controller/mcp/mcp_controller.py @@ -1,214 +1,262 @@ -import os -from typing import Dict -from fastapi import Depends, HTTPException, APIRouter -from fastapi_babel import _ -from fastapi_pagination import Page -from fastapi_pagination.ext.sqlmodel import paginate -from sqlmodel import Session, col, select -from sqlalchemy.orm import selectinload, with_loader_criteria -from app.component.auth import Auth, auth_must -from app.component.database import session -from app.model.mcp.mcp import Mcp, McpOut, McpType -from app.model.mcp.mcp_env import McpEnv, Status as McpEnvStatus -from app.model.mcp.mcp_user import McpImportType, McpUser, Status -from loguru import logger -from camel.toolkits.mcp_toolkit import MCPToolkit -from app.component.environment import env - -from app.component.validator.McpServer import ( - McpRemoteServer, - McpServerItem, - validate_mcp_remote_servers, - validate_mcp_servers, -) - -router = APIRouter(tags=["Mcp Servers"]) - - -async def pre_instantiate_mcp_toolkit(config_dict: dict) -> bool: - """ - Pre-instantiate MCP toolkit to complete authentication process - - Args: - config_dict: MCP server configuration dictionary - - Returns: - bool: Whether successfully instantiated and connected - """ - try: - # Ensure unified auth directory for all mcp servers - for server_config in config_dict.get("mcpServers", {}).values(): - if "env" not in server_config: - server_config["env"] = {} - # Set global auth directory to persist authentication across tasks - if "MCP_REMOTE_CONFIG_DIR" not in server_config["env"]: - server_config["env"]["MCP_REMOTE_CONFIG_DIR"] = env( - "MCP_REMOTE_CONFIG_DIR", - os.path.expanduser("~/.mcp-auth") - ) - - # Create MCP toolkit and attempt to connect - mcp_toolkit = MCPToolkit(config_dict=config_dict, timeout=30) - await mcp_toolkit.connect() - - # Get tools list to ensure connection is successful - tools = mcp_toolkit.get_tools() - logger.info(f"Successfully pre-instantiated MCP toolkit with {len(tools)} tools") - - # Disconnect, authentication info is already saved - await mcp_toolkit.disconnect() - return True - - except Exception as e: - logger.warning(f"Failed to pre-instantiate MCP toolkit: {e!r}") - return False - - -@router.get("/mcps", name="mcp list") -async def gets( - keyword: str | None = None, - category_id: int | None = None, - mine: int | None = None, - session: Session = Depends(session), - auth: Auth = Depends(auth_must), -) -> Page[McpOut]: - stmt = ( - select(Mcp) - .where(Mcp.no_delete()) - .options( - selectinload(Mcp.category), - selectinload(Mcp.envs), - with_loader_criteria(McpEnv, col(McpEnv.status) == McpEnvStatus.in_use), - ) - # .order_by(col(Mcp.sort).desc()) - ) - if keyword: - stmt = stmt.where(col(Mcp.key).like(f"%{keyword.lower()}%")) - if category_id: - stmt = stmt.where(Mcp.category_id == category_id) - if mine and auth: - stmt = ( - stmt.join(McpUser) - .where(McpUser.user_id == auth.user.id) - .options( - selectinload(Mcp.mcp_user), - with_loader_criteria(McpUser, col(McpUser.user_id) == auth.user.id), - ) - ) - return paginate(session, stmt) - - -@router.get("/mcp", name="mcp detail", response_model=McpOut) -async def get(id: int, session: Session = Depends(session)): - stmt = select(Mcp).where(Mcp.no_delete(), Mcp.id == id).options(selectinload(Mcp.category), selectinload(Mcp.envs)) - model = session.exec(stmt).one() - return model - - -@router.post("/mcp/install", name="mcp install") -async def install(mcp_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - mcp = session.get_one(Mcp, mcp_id) - if not mcp: - raise HTTPException(status_code=404, detail=_("Mcp not found")) - exists = session.exec(select(McpUser).where(McpUser.mcp_id == mcp.id, McpUser.user_id == auth.user.id)).first() - if exists: - raise HTTPException(status_code=400, detail=_("mcp is installed")) - - install_command: dict = mcp.install_command - - # Pre-instantiate MCP toolkit for authentication - config_dict = { - "mcpServers": { - mcp.key: install_command - } - } - - try: - success = await pre_instantiate_mcp_toolkit(config_dict) - if not success: - logger.warning(f"Pre-instantiation failed for MCP {mcp.key}, but continuing with installation") - except Exception as e: - logger.warning(f"Exception during pre-instantiation for MCP {mcp.key}: {e}") - - mcp_user = McpUser( - mcp_id=mcp.id, - user_id=auth.user.id, - mcp_name=mcp.name, - mcp_key=mcp.key, - mcp_desc=mcp.description, - type=mcp.type, - status=Status.enable, - command=install_command["command"], - args=install_command["args"], - env=install_command["env"], - server_url=None, - ) - mcp_user.save() - return mcp_user - - -@router.post("/mcp/import/{mcp_type}", name="mcp import") -async def import_mcp( - mcp_type: McpImportType, mcp_data: dict, session: Session = Depends(session), auth: Auth = Depends(auth_must) -): - logger.debug(mcp_type, mcp_type.value) - - if mcp_type == McpImportType.Local: - is_valid, res = validate_mcp_servers(mcp_data) - if not is_valid: - raise HTTPException(status_code=400, detail=res) - mcp_data: Dict[str, McpServerItem] = res.mcpServers - - for name, data in mcp_data.items(): - # Pre-instantiate MCP toolkit for authentication - config_dict = { - "mcpServers": { - name: { - "command": data.command, - "args": data.args, - "env": data.env or {} - } - } - } - - try: - success = await pre_instantiate_mcp_toolkit(config_dict) - if not success: - logger.warning(f"Pre-instantiation failed for local MCP {name}, but continuing with installation") - except Exception as e: - logger.warning(f"Exception during pre-instantiation for local MCP {name}: {e}") - - mcp_user = McpUser( - mcp_id=0, - user_id=auth.user.id, - mcp_name=name, - mcp_key=name, - mcp_desc=name, - type=McpType.Local, - status=Status.enable, - command=data.command, - args=data.args, - env=data.env, - server_url=None, - ) - mcp_user.save() - return {"message": "Local MCP servers imported successfully", "count": len(mcp_data)} - elif mcp_type == McpImportType.Remote: - is_valid, res = validate_mcp_remote_servers(mcp_data) - if not is_valid: - raise HTTPException(status_code=400, detail=res) - data: McpRemoteServer = res - - # For remote servers, we don't need to pre-instantiate as they typically don't require authentication - # but we can still try to validate the connection if needed - - mcp_user = McpUser( - mcp_id=0, - user_id=auth.user.id, - type=McpType.Remote, - status=Status.enable, - mcp_name=data.server_name, - server_url=data.server_url, - ) - mcp_user.save() - return mcp_user +import os +from typing import Dict +from fastapi import Depends, HTTPException, APIRouter +from fastapi_babel import _ +from fastapi_pagination import Page +from fastapi_pagination.ext.sqlmodel import paginate +from sqlmodel import Session, col, select +from sqlalchemy.orm import selectinload, with_loader_criteria +from app.component.auth import Auth, auth_must +from app.component.database import session +from app.model.mcp.mcp import Mcp, McpOut, McpType +from app.model.mcp.mcp_env import McpEnv, Status as McpEnvStatus +from app.model.mcp.mcp_user import McpImportType, McpUser, Status +from camel.toolkits.mcp_toolkit import MCPToolkit +from app.component.environment import env +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("server_mcp_controller") + +from app.component.validator.McpServer import ( + McpRemoteServer, + McpServerItem, + validate_mcp_remote_servers, + validate_mcp_servers, +) + +router = APIRouter(tags=["Mcp Servers"]) + + +async def pre_instantiate_mcp_toolkit(config_dict: dict) -> bool: + """ + Pre-instantiate MCP toolkit to complete authentication process + + Args: + config_dict: MCP server configuration dictionary + + Returns: + bool: Whether successfully instantiated and connected + """ + try: + # Ensure unified auth directory for all mcp servers + for server_config in config_dict.get("mcpServers", {}).values(): + if "env" not in server_config: + server_config["env"] = {} + # Set global auth directory to persist authentication across tasks + if "MCP_REMOTE_CONFIG_DIR" not in server_config["env"]: + server_config["env"]["MCP_REMOTE_CONFIG_DIR"] = env( + "MCP_REMOTE_CONFIG_DIR", + os.path.expanduser("~/.mcp-auth") + ) + + # Create MCP toolkit and attempt to connect + mcp_toolkit = MCPToolkit(config_dict=config_dict, timeout=30) + await mcp_toolkit.connect() + + # Get tools list to ensure connection is successful + tools = mcp_toolkit.get_tools() + logger.info("MCP toolkit pre-instantiated", extra={"tools_count": len(tools)}) + + # Disconnect, authentication info is already saved + await mcp_toolkit.disconnect() + return True + + except Exception as e: + logger.warning("MCP toolkit pre-instantiation failed", extra={"error": str(e)}, exc_info=True) + return False + + +@router.get("/mcps", name="mcp list") +@traceroot.trace() +async def gets( + keyword: str | None = None, + category_id: int | None = None, + mine: int | None = None, + session: Session = Depends(session), + auth: Auth = Depends(auth_must), +) -> Page[McpOut]: + """List MCP servers with optional filtering.""" + user_id = auth.user.id + stmt = ( + select(Mcp) + .where(Mcp.no_delete()) + .options( + selectinload(Mcp.category), + selectinload(Mcp.envs), + with_loader_criteria(McpEnv, col(McpEnv.status) == McpEnvStatus.in_use), + ) + ) + if keyword: + stmt = stmt.where(col(Mcp.key).like(f"%{keyword.lower()}%")) + if category_id: + stmt = stmt.where(Mcp.category_id == category_id) + if mine and auth: + stmt = ( + stmt.join(McpUser) + .where(McpUser.user_id == user_id) + .options( + selectinload(Mcp.mcp_user), + with_loader_criteria(McpUser, col(McpUser.user_id) == user_id), + ) + ) + + result = paginate(session, stmt) + total = result.total if hasattr(result, 'total') else 0 + logger.debug("MCP list retrieved", extra={"user_id": user_id, "keyword": keyword, "category_id": category_id, "mine": mine, "total": total}) + return result + + +@router.get("/mcp", name="mcp detail", response_model=McpOut) +@traceroot.trace() +async def get(id: int, session: Session = Depends(session)): + """Get MCP server details.""" + try: + stmt = select(Mcp).where(Mcp.no_delete(), Mcp.id == id).options(selectinload(Mcp.category), selectinload(Mcp.envs)) + model = session.exec(stmt).one() + logger.debug("MCP detail retrieved", extra={"mcp_id": id, "mcp_key": model.key}) + return model + except Exception as e: + logger.warning("MCP not found", extra={"mcp_id": id}) + raise HTTPException(status_code=404, detail=_("Mcp not found")) + + +@router.post("/mcp/install", name="mcp install") +@traceroot.trace() +async def install(mcp_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Install MCP server for user.""" + user_id = auth.user.id + + mcp = session.get_one(Mcp, mcp_id) + if not mcp: + logger.warning("MCP install failed: MCP not found", extra={"user_id": user_id, "mcp_id": mcp_id}) + raise HTTPException(status_code=404, detail=_("Mcp not found")) + + exists = session.exec(select(McpUser).where(McpUser.mcp_id == mcp.id, McpUser.user_id == user_id)).first() + if exists: + logger.warning("MCP install failed: already installed", extra={"user_id": user_id, "mcp_id": mcp_id, "mcp_key": mcp.key}) + raise HTTPException(status_code=400, detail=_("mcp is installed")) + + install_command: dict = mcp.install_command + + # Pre-instantiate MCP toolkit for authentication + config_dict = { + "mcpServers": { + mcp.key: install_command + } + } + + try: + success = await pre_instantiate_mcp_toolkit(config_dict) + if not success: + logger.warning("MCP pre-instantiation failed, continuing with installation", extra={"user_id": user_id, "mcp_id": mcp_id, "mcp_key": mcp.key}) + else: + logger.debug("MCP toolkit pre-instantiated", extra={"mcp_key": mcp.key}) + except Exception as e: + logger.warning("MCP pre-instantiation exception", extra={"user_id": user_id, "mcp_key": mcp.key, "error": str(e)}, exc_info=True) + + try: + mcp_user = McpUser( + mcp_id=mcp.id, + user_id=user_id, + mcp_name=mcp.name, + mcp_key=mcp.key, + mcp_desc=mcp.description, + type=mcp.type, + status=Status.enable, + command=install_command["command"], + args=install_command["args"], + env=install_command["env"], + server_url=None, + ) + mcp_user.save() + logger.info("MCP installed", extra={"user_id": user_id, "mcp_id": mcp_id, "mcp_key": mcp.key}) + return mcp_user + except Exception as e: + logger.error("MCP installation failed", extra={"user_id": user_id, "mcp_id": mcp_id, "mcp_key": mcp.key, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.post("/mcp/import/{mcp_type}", name="mcp import") +@traceroot.trace() +async def import_mcp( + mcp_type: McpImportType, mcp_data: dict, session: Session = Depends(session), auth: Auth = Depends(auth_must) +): + """Import MCP servers (local or remote).""" + user_id = auth.user.id + + if mcp_type == McpImportType.Local: + logger.info("Importing local MCP servers", extra={"user_id": user_id}) + is_valid, res = validate_mcp_servers(mcp_data) + if not is_valid: + logger.warning("Local MCP import validation failed", extra={"user_id": user_id, "error": res}) + raise HTTPException(status_code=400, detail=res) + + mcp_data: Dict[str, McpServerItem] = res.mcpServers + imported_count = 0 + + for name, data in mcp_data.items(): + config_dict = { + "mcpServers": { + name: { + "command": data.command, + "args": data.args, + "env": data.env or {} + } + } + } + + try: + success = await pre_instantiate_mcp_toolkit(config_dict) + if not success: + logger.warning("Local MCP pre-instantiation failed, continuing", extra={"user_id": user_id, "mcp_name": name}) + except Exception as e: + logger.warning("Local MCP pre-instantiation exception", extra={"user_id": user_id, "mcp_name": name, "error": str(e)}) + + try: + mcp_user = McpUser( + mcp_id=0, + user_id=user_id, + mcp_name=name, + mcp_key=name, + mcp_desc=name, + type=McpType.Local, + status=Status.enable, + command=data.command, + args=data.args, + env=data.env, + server_url=None, + ) + mcp_user.save() + imported_count += 1 + except Exception as e: + logger.error("Failed to import local MCP", extra={"user_id": user_id, "mcp_name": name, "error": str(e)}, exc_info=True) + + logger.info("Local MCPs imported", extra={"user_id": user_id, "count": imported_count}) + return {"message": "Local MCP servers imported successfully", "count": imported_count} + + elif mcp_type == McpImportType.Remote: + logger.info("Importing remote MCP server", extra={"user_id": user_id}) + is_valid, res = validate_mcp_remote_servers(mcp_data) + if not is_valid: + logger.warning("Remote MCP import validation failed", extra={"user_id": user_id, "error": res}) + raise HTTPException(status_code=400, detail=res) + + data: McpRemoteServer = res + + try: + # For remote servers, we don't need to pre-instantiate as they typically don't require authentication + # but we can still try to validate the connection if needed + mcp_user = McpUser( + mcp_id=0, + user_id=user_id, + type=McpType.Remote, + status=Status.enable, + mcp_name=data.server_name, + server_url=data.server_url, + ) + mcp_user.save() + logger.info("Remote MCP imported", extra={"user_id": user_id, "server_name": data.server_name, "server_url": data.server_url}) + return mcp_user + except Exception as e: + logger.error("Remote MCP import failed", extra={"user_id": user_id, "server_name": data.server_name, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") \ No newline at end of file diff --git a/server/app/controller/mcp/proxy_controller.py b/server/app/controller/mcp/proxy_controller.py index aa008229a..0ec1a0cfd 100644 --- a/server/app/controller/mcp/proxy_controller.py +++ b/server/app/controller/mcp/proxy_controller.py @@ -1,173 +1,196 @@ -from fastapi import APIRouter, Depends -from exa_py import Exa -from loguru import logger -from app.component.auth import key_must -from app.component.environment import env_not_empty -from app.model.mcp.proxy import ExaSearch -from typing import Any, cast -import requests - -from app.model.user.key import Key - - -router = APIRouter(prefix="/proxy", tags=["Mcp Servers"]) - - -@router.post("/exa") -def exa_search(search: ExaSearch, key: Key = Depends(key_must)): - EXA_API_KEY = env_not_empty("EXA_API_KEY") - try: - exa = Exa(EXA_API_KEY) - - if search.num_results is not None and not 0 < search.num_results <= 100: - raise ValueError("num_results must be between 1 and 100") - - if search.include_text is not None: - if len(search.include_text) > 1: - raise ValueError("include_text can only contain 1 string") - if len(search.include_text[0].split()) > 5: - raise ValueError("include_text string cannot be longer than 5 words") - - if search.exclude_text is not None: - if len(search.exclude_text) > 1: - raise ValueError("exclude_text can only contain 1 string") - if len(search.exclude_text[0].split()) > 5: - raise ValueError("exclude_text string cannot be longer than 5 words") - - # Call Exa API with direct parameters - if search.text: - results = cast( - dict[str, Any], - exa.search_and_contents( - query=search.query, - type=search.search_type, - category=search.category, - num_results=search.num_results, - include_text=search.include_text, - exclude_text=search.exclude_text, - use_autoprompt=search.use_autoprompt, - text=True, - ), - ) - else: - results = cast( - dict[str, Any], - exa.search( - query=search.query, - type=search.search_type, - category=search.category, - num_results=search.num_results, - include_text=search.include_text, - exclude_text=search.exclude_text, - use_autoprompt=search.use_autoprompt, - ), - ) - - return results - - except Exception as e: - return {"error": f"Exa search failed: {e!s}"} - - -@router.get("/google") -def google_search(query: str, search_type: str = "web", key: Key = Depends(key_must)): - # https://developers.google.com/custom-search/v1/overview - GOOGLE_API_KEY = env_not_empty("GOOGLE_API_KEY") - # https://cse.google.com/cse/all - SEARCH_ENGINE_ID = env_not_empty("SEARCH_ENGINE_ID") - - # Using the first page - start_page_idx = 1 - # Different language may get different result - search_language = "en" - # How many pages to return - num_result_pages = 10 - # Constructing the URL - # Doc: https://developers.google.com/custom-search/v1/using_rest - base_url = ( - f"https://www.googleapis.com/customsearch/v1?" - f"key={GOOGLE_API_KEY}&cx={SEARCH_ENGINE_ID}&q={query}&start=" - f"{start_page_idx}&lr={search_language}&num={num_result_pages}" - ) - - if search_type == "image": - url = base_url + "&searchType=image" - else: - url = base_url - - responses = [] - # Fetch the results given the URL - try: - # Make the get - result = requests.get(url) - data = result.json() - - # Get the result items - if "items" in data: - search_items = data.get("items") - - # Iterate over results found - for i, search_item in enumerate(search_items, start=1): - if search_type == "image": - # Process image search results - title = search_item.get("title") - image_url = search_item.get("link") - display_link = search_item.get("displayLink") - - # Get context URL (page containing the image) - image_info = search_item.get("image", {}) - context_url = image_info.get("contextLink", "") - - # Get image dimensions if available - width = image_info.get("width") - height = image_info.get("height") - - response = { - "result_id": i, - "title": title, - "image_url": image_url, - "display_link": display_link, - "context_url": context_url, - } - - # Add dimensions if available - if width: - response["width"] = int(width) - if height: - response["height"] = int(height) - - responses.append(response) - else: - # Process web search results (existing logic) - # Check metatags are present - if "pagemap" not in search_item: - continue - if "metatags" not in search_item["pagemap"]: - continue - if "og:description" in search_item["pagemap"]["metatags"][0]: - long_description = search_item["pagemap"]["metatags"][0]["og:description"] - else: - long_description = "N/A" - # Get the page title - title = search_item.get("title") - # Page snippet - snippet = search_item.get("snippet") - - # Extract the page url - link = search_item.get("link") - response = { - "result_id": i, - "title": title, - "description": snippet, - "long_description": long_description, - "url": link, - } - responses.append(response) - else: - error_info = data.get("error", {}) - logger.error(f"Google search failed - API response: {error_info}") - responses.append({"error": f"Google search failed - API response: {error_info}"}) - - except Exception as e: - responses.append({"error": f"google search failed: {e!s}"}) - return responses +from fastapi import APIRouter, Depends, HTTPException +from exa_py import Exa +from app.component.auth import key_must +from app.component.environment import env_not_empty +from app.model.mcp.proxy import ExaSearch +from typing import Any, cast +import requests +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("server_proxy_controller") + +from app.model.user.key import Key + + +router = APIRouter(prefix="/proxy", tags=["Mcp Servers"]) + + +@router.post("/exa") +@traceroot.trace() +def exa_search(search: ExaSearch, key: Key = Depends(key_must)): + """Search using Exa API.""" + EXA_API_KEY = env_not_empty("EXA_API_KEY") + try: + # Validate input parameters + if search.num_results is not None and not 0 < search.num_results <= 100: + logger.warning("Invalid exa search parameter", extra={"param": "num_results", "value": search.num_results}) + raise ValueError("num_results must be between 1 and 100") + + if search.include_text is not None and len(search.include_text) > 0: + if len(search.include_text) > 1: + logger.warning("Invalid exa search parameter", extra={"param": "include_text", "reason": "more than 1 string"}) + raise ValueError("include_text can only contain 1 string") + if len(search.include_text[0].split()) > 5: + logger.warning("Invalid exa search parameter", extra={"param": "include_text", "reason": "exceeds 5 words"}) + raise ValueError("include_text string cannot be longer than 5 words") + + if search.exclude_text is not None and len(search.exclude_text) > 0: + if len(search.exclude_text) > 1: + logger.warning("Invalid exa search parameter", extra={"param": "exclude_text", "reason": "more than 1 string"}) + raise ValueError("exclude_text can only contain 1 string") + if len(search.exclude_text[0].split()) > 5: + logger.warning("Invalid exa search parameter", extra={"param": "exclude_text", "reason": "exceeds 5 words"}) + raise ValueError("exclude_text string cannot be longer than 5 words") + + exa = Exa(EXA_API_KEY) + + # Call Exa API with direct parameters + if search.text: + results = cast( + dict[str, Any], + exa.search_and_contents( + query=search.query, + type=search.search_type, + category=search.category, + num_results=search.num_results, + include_text=search.include_text, + exclude_text=search.exclude_text, + use_autoprompt=search.use_autoprompt, + text=True, + ), + ) + else: + results = cast( + dict[str, Any], + exa.search( + query=search.query, + type=search.search_type, + category=search.category, + num_results=search.num_results, + include_text=search.include_text, + exclude_text=search.exclude_text, + use_autoprompt=search.use_autoprompt, + ), + ) + + result_count = len(results.get("results", [])) if "results" in results else 0 + logger.info("Exa search completed", extra={"query": search.query, "search_type": search.search_type, "result_count": result_count}) + return results + + except ValueError as e: + logger.warning("Exa search validation error", extra={"error": str(e)}) + raise HTTPException(status_code=500, detail="Internal server error") + except Exception as e: + logger.error("Exa search failed", extra={"query": search.query, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get("/google") +@traceroot.trace() +def google_search(query: str, search_type: str = "web", key: Key = Depends(key_must)): + """Search using Google Custom Search API.""" + # https://developers.google.com/custom-search/v1/overview + GOOGLE_API_KEY = env_not_empty("GOOGLE_API_KEY") + # https://cse.google.com/cse/all + SEARCH_ENGINE_ID = env_not_empty("SEARCH_ENGINE_ID") + + # Using the first page + start_page_idx = 1 + # Different language may get different result + search_language = "en" + # How many pages to return + num_result_pages = 10 + + # Constructing the URL + # Doc: https://developers.google.com/custom-search/v1/using_rest + base_url = ( + f"https://www.googleapis.com/customsearch/v1?" + f"key={GOOGLE_API_KEY}&cx={SEARCH_ENGINE_ID}&q={query}&start=" + f"{start_page_idx}&lr={search_language}&num={num_result_pages}" + ) + + if search_type == "image": + url = base_url + "&searchType=image" + else: + url = base_url + + responses = [] + + try: + # Make the GET request + result = requests.get(url) + data = result.json() + + # Get the result items + if "items" in data: + search_items = data.get("items") + + # Iterate over results found + for i, search_item in enumerate(search_items, start=1): + if search_type == "image": + # Process image search results + title = search_item.get("title") + image_url = search_item.get("link") + display_link = search_item.get("displayLink") + + # Get context URL (page containing the image) + image_info = search_item.get("image", {}) + context_url = image_info.get("contextLink", "") + + # Get image dimensions if available + width = image_info.get("width") + height = image_info.get("height") + + response = { + "result_id": i, + "title": title, + "image_url": image_url, + "display_link": display_link, + "context_url": context_url, + } + + # Add dimensions if available + if width: + response["width"] = int(width) + if height: + response["height"] = int(height) + + responses.append(response) + else: + # Process web search results + # Check metatags are present + if "pagemap" not in search_item: + continue + if "metatags" not in search_item["pagemap"]: + continue + if "og:description" in search_item["pagemap"]["metatags"][0]: + long_description = search_item["pagemap"]["metatags"][0]["og:description"] + else: + long_description = "N/A" + # Get the page title + title = search_item.get("title") + # Page snippet + snippet = search_item.get("snippet") + + # Extract the page url + link = search_item.get("link") + response = { + "result_id": i, + "title": title, + "description": snippet, + "long_description": long_description, + "url": link, + } + responses.append(response) + + logger.info("Google search completed", extra={"query": query, "search_type": search_type, "result_count": len(responses)}) + else: + error_info = data.get("error", {}) + logger.error("Google search API error", extra={"query": query, "api_error": error_info}) + raise HTTPException(status_code=500, detail="Internal server error") + + except Exception as e: + logger.error("Google search failed", extra={"query": query, "search_type": search_type, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + return responses \ No newline at end of file diff --git a/server/app/controller/mcp/user_controller.py b/server/app/controller/mcp/user_controller.py index 12b979abb..f1f90e400 100644 --- a/server/app/controller/mcp/user_controller.py +++ b/server/app/controller/mcp/user_controller.py @@ -1,139 +1,181 @@ -import os -from typing import List, Optional -from fastapi import Depends, HTTPException, Query, Response, APIRouter -from sqlmodel import Session, select -from app.component.database import session -from app.component.auth import Auth, auth_must -from fastapi_babel import _ -from app.model.mcp.mcp_user import McpUser, McpUserIn, McpUserOut, McpUserUpdate, Status -from app.model.mcp.mcp import Mcp -from loguru import logger -from camel.toolkits.mcp_toolkit import MCPToolkit -from app.component.environment import env - -router = APIRouter(tags=["McpUser Management"]) - - -async def pre_instantiate_mcp_toolkit(config_dict: dict) -> bool: - """ - Pre-instantiate MCP toolkit to complete authentication process - - Args: - config_dict: MCP server configuration dictionary - - Returns: - bool: Whether successfully instantiated and connected - """ - try: - # Ensure unified auth directory for all mcp servers - for server_config in config_dict.get("mcpServers", {}).values(): - if "env" not in server_config: - server_config["env"] = {} - # Set global auth directory to persist authentication across tasks - if "MCP_REMOTE_CONFIG_DIR" not in server_config["env"]: - server_config["env"]["MCP_REMOTE_CONFIG_DIR"] = env( - "MCP_REMOTE_CONFIG_DIR", - os.path.expanduser("~/.mcp-auth") - ) - - # Create MCP toolkit and attempt to connect - mcp_toolkit = MCPToolkit(config_dict=config_dict, timeout=30) - await mcp_toolkit.connect() - - # Get tools list to ensure connection is successful - tools = mcp_toolkit.get_tools() - logger.info(f"Successfully pre-instantiated MCP toolkit with {len(tools)} tools") - - # Disconnect, authentication info is already saved - await mcp_toolkit.disconnect() - return True - - except Exception as e: - logger.warning(f"Failed to pre-instantiate MCP toolkit: {e!r}") - return False - - -@router.get("/mcp/users", name="list mcp users", response_model=List[McpUserOut]) -async def list_mcp_users( - mcp_id: Optional[int] = None, - session: Session = Depends(session), - auth: Auth = Depends(auth_must), -): - user_id = auth.user.id - query = select(McpUser) - if mcp_id is not None: - query = query.where(McpUser.mcp_id == mcp_id) - if user_id is not None: - query = query.where(McpUser.user_id == user_id) - mcp_users = session.exec(query).all() - return mcp_users - - -@router.get("/mcp/users/{mcp_user_id}", name="get mcp user", response_model=McpUserOut) -async def get_mcp_user(mcp_user_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - query = select(McpUser).where(McpUser.id == mcp_user_id) - mcp_user = session.exec(query).first() - if not mcp_user: - raise HTTPException(status_code=404, detail=_("McpUser not found")) - return mcp_user - - -@router.post("/mcp/users", name="create mcp user", response_model=McpUserOut) -async def create_mcp_user(mcp_user: McpUserIn, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - exists = session.exec( - select(McpUser).where(McpUser.mcp_id == mcp_user.mcp_id, McpUser.user_id == auth.user.id) - ).first() - if exists: - raise HTTPException(status_code=400, detail=_("mcp is installed")) - - # Get MCP configuration from the main Mcp table - mcp = session.get(Mcp, mcp_user.mcp_id) - if mcp and mcp.install_command: - # Pre-instantiate MCP toolkit for authentication - config_dict = { - "mcpServers": { - mcp.key: mcp.install_command - } - } - - try: - success = await pre_instantiate_mcp_toolkit(config_dict) - if not success: - logger.warning(f"Pre-instantiation failed for MCP {mcp.key}, but continuing with user creation") - except Exception as e: - logger.warning(f"Exception during pre-instantiation for MCP {mcp.key}: {e}") - - db_mcp_user = McpUser(mcp_id=mcp_user.mcp_id, user_id=auth.user.id, env=mcp_user.env) - session.add(db_mcp_user) - session.commit() - session.refresh(db_mcp_user) - return db_mcp_user - - -@router.put("/mcp/users/{id}", name="update mcp user") -async def update_mcp_user( - id: int, - update_item: McpUserUpdate, - session: Session = Depends(session), - auth: Auth = Depends(auth_must), -): - model = session.get(McpUser, id) - if not model: - raise HTTPException(status_code=404, detail=_("Mcp Info not found")) - if model.user_id != auth.user.id: - raise HTTPException(status_code=400, detail=_("current user have no permission to modify")) - update_data = update_item.model_dump(exclude_unset=True) - model.update_fields(update_data) - model.save(session) - session.refresh(model) - return model - - -@router.delete("/mcp/users/{mcp_user_id}", name="delete mcp user") -async def delete_mcp_user(mcp_user_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - db_mcp_user = session.get(McpUser, mcp_user_id) - if not db_mcp_user: - raise HTTPException(status_code=404, detail=_("Mcp Info not found")) - session.delete(db_mcp_user) - session.commit() - return Response(status_code=204) +import os +from typing import List, Optional +from fastapi import Depends, HTTPException, Query, Response, APIRouter +from sqlmodel import Session, select +from app.component.database import session +from app.component.auth import Auth, auth_must +from fastapi_babel import _ +from app.model.mcp.mcp_user import McpUser, McpUserIn, McpUserOut, McpUserUpdate, Status +from app.model.mcp.mcp import Mcp +from camel.toolkits.mcp_toolkit import MCPToolkit +from app.component.environment import env +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("server_mcp_user_controller") + +router = APIRouter(tags=["McpUser Management"]) + + +async def pre_instantiate_mcp_toolkit(config_dict: dict) -> bool: + """ + Pre-instantiate MCP toolkit to complete authentication process + + Args: + config_dict: MCP server configuration dictionary + + Returns: + bool: Whether successfully instantiated and connected + """ + try: + # Ensure unified auth directory for all mcp servers + for server_config in config_dict.get("mcpServers", {}).values(): + if "env" not in server_config: + server_config["env"] = {} + # Set global auth directory to persist authentication across tasks + if "MCP_REMOTE_CONFIG_DIR" not in server_config["env"]: + server_config["env"]["MCP_REMOTE_CONFIG_DIR"] = env( + "MCP_REMOTE_CONFIG_DIR", + os.path.expanduser("~/.mcp-auth") + ) + + # Create MCP toolkit and attempt to connect + mcp_toolkit = MCPToolkit(config_dict=config_dict, timeout=30) + await mcp_toolkit.connect() + + # Get tools list to ensure connection is successful + tools = mcp_toolkit.get_tools() + logger.info("MCP toolkit pre-instantiated", extra={"tools_count": len(tools)}) + + # Disconnect, authentication info is already saved + await mcp_toolkit.disconnect() + return True + + except Exception as e: + logger.warning("MCP toolkit pre-instantiation failed", extra={"error": str(e)}, exc_info=True) + return False + + +@router.get("/mcp/users", name="list mcp users", response_model=List[McpUserOut]) +@traceroot.trace() +async def list_mcp_users( + mcp_id: Optional[int] = None, + session: Session = Depends(session), + auth: Auth = Depends(auth_must), +): + """List MCP users for current user.""" + user_id = auth.user.id + query = select(McpUser) + if mcp_id is not None: + query = query.where(McpUser.mcp_id == mcp_id) + if user_id is not None: + query = query.where(McpUser.user_id == user_id) + mcp_users = session.exec(query).all() + logger.debug("MCP users listed", extra={"user_id": user_id, "mcp_id": mcp_id, "count": len(mcp_users)}) + return mcp_users + + +@router.get("/mcp/users/{mcp_user_id}", name="get mcp user", response_model=McpUserOut) +@traceroot.trace() +async def get_mcp_user(mcp_user_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Get MCP user details.""" + query = select(McpUser).where(McpUser.id == mcp_user_id) + mcp_user = session.exec(query).first() + if not mcp_user: + logger.warning("MCP user not found", extra={"user_id": auth.user.id, "mcp_user_id": mcp_user_id}) + raise HTTPException(status_code=404, detail=_("McpUser not found")) + logger.debug("MCP user retrieved", extra={"user_id": auth.user.id, "mcp_user_id": mcp_user_id, "mcp_id": mcp_user.mcp_id}) + return mcp_user + + +@router.post("/mcp/users", name="create mcp user", response_model=McpUserOut) +@traceroot.trace() +async def create_mcp_user(mcp_user: McpUserIn, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Create MCP user installation.""" + user_id = auth.user.id + mcp_id = mcp_user.mcp_id + + exists = session.exec( + select(McpUser).where(McpUser.mcp_id == mcp_id, McpUser.user_id == user_id) + ).first() + if exists: + logger.warning("MCP already installed", extra={"user_id": user_id, "mcp_id": mcp_id}) + raise HTTPException(status_code=400, detail=_("mcp is installed")) + + # Get MCP configuration from the main Mcp table + mcp = session.get(Mcp, mcp_id) + if mcp and mcp.install_command: + config_dict = { + "mcpServers": { + mcp.key: mcp.install_command + } + } + + try: + success = await pre_instantiate_mcp_toolkit(config_dict) + if not success: + logger.warning("MCP pre-instantiation failed, continuing", extra={"user_id": user_id, "mcp_id": mcp_id, "mcp_key": mcp.key}) + except Exception as e: + logger.warning("MCP pre-instantiation exception", extra={"user_id": user_id, "mcp_id": mcp_id, "error": str(e)}, exc_info=True) + + try: + db_mcp_user = McpUser(mcp_id=mcp_id, user_id=user_id, env=mcp_user.env) + session.add(db_mcp_user) + session.commit() + session.refresh(db_mcp_user) + logger.info("MCP user created", extra={"user_id": user_id, "mcp_id": mcp_id, "mcp_user_id": db_mcp_user.id}) + return db_mcp_user + except Exception as e: + session.rollback() + logger.error("MCP user creation failed", extra={"user_id": user_id, "mcp_id": mcp_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.put("/mcp/users/{id}", name="update mcp user") +@traceroot.trace() +async def update_mcp_user( + id: int, + update_item: McpUserUpdate, + session: Session = Depends(session), + auth: Auth = Depends(auth_must), +): + """Update MCP user settings.""" + user_id = auth.user.id + model = session.get(McpUser, id) + if not model: + logger.warning("MCP user not found for update", extra={"user_id": user_id, "mcp_user_id": id}) + raise HTTPException(status_code=404, detail=_("Mcp Info not found")) + if model.user_id != user_id: + logger.warning("Unauthorized MCP user update", extra={"user_id": user_id, "mcp_user_id": id, "owner_id": model.user_id}) + raise HTTPException(status_code=400, detail=_("current user have no permission to modify")) + + try: + update_data = update_item.model_dump(exclude_unset=True) + model.update_fields(update_data) + model.save(session) + session.refresh(model) + logger.info("MCP user updated", extra={"user_id": user_id, "mcp_user_id": id}) + return model + except Exception as e: + logger.error("MCP user update failed", extra={"user_id": user_id, "mcp_user_id": id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.delete("/mcp/users/{mcp_user_id}", name="delete mcp user") +@traceroot.trace() +async def delete_mcp_user(mcp_user_id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Delete MCP user installation.""" + user_id = auth.user.id + db_mcp_user = session.get(McpUser, mcp_user_id) + if not db_mcp_user: + logger.warning("MCP user not found for deletion", extra={"user_id": user_id, "mcp_user_id": mcp_user_id}) + raise HTTPException(status_code=404, detail=_("Mcp Info not found")) + + try: + session.delete(db_mcp_user) + session.commit() + logger.info("MCP user deleted", extra={"user_id": user_id, "mcp_user_id": mcp_user_id, "mcp_id": db_mcp_user.mcp_id}) + return Response(status_code=204) + except Exception as e: + session.rollback() + logger.error("MCP user deletion failed", extra={"user_id": user_id, "mcp_user_id": mcp_user_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") \ No newline at end of file diff --git a/server/app/controller/oauth/oauth_controller.py b/server/app/controller/oauth/oauth_controller.py index be14905ce..c43e50973 100644 --- a/server/app/controller/oauth/oauth_controller.py +++ b/server/app/controller/oauth/oauth_controller.py @@ -1,58 +1,81 @@ -from fastapi import APIRouter, Request, HTTPException -from fastapi.responses import RedirectResponse, JSONResponse, HTMLResponse -from app.component.environment import env -from app.component.oauth_adapter import OauthCallbackPayload, get_oauth_adapter -from typing import Optional - -router = APIRouter(prefix="/oauth", tags=["Oauth Servers"]) - - -@router.get("/{app}/login", name="OAuth Login Redirect") -def oauth_login(app: str, request: Request, state: Optional[str] = None): - try: - callback_url = str(request.url_for("OAuth Callback", app=app)) - if callback_url.startswith("http://"): - callback_url = "https://" + callback_url[len("http://") :] - adapter = get_oauth_adapter(app, callback_url) - url = adapter.get_authorize_url(state) - if not url: - raise HTTPException(status_code=400, detail="Failed to generate authorization URL") - return RedirectResponse(str(url)) - except Exception as e: - raise HTTPException(status_code=400, detail=str(e)) - - -@router.get("/{app}/callback", name="OAuth Callback") -def oauth_callback(app: str, request: Request, code: Optional[str] = None, state: Optional[str] = None): - if not code: - raise HTTPException(status_code=400, detail="Missing code parameter") - redirect_url = f"eigent://callback/oauth?provider={app}&code={code}&state={state}" - html_content = f""" - - - OAuth Callback - - - -

Redirecting, please wait...

- - - - """ - return HTMLResponse(content=html_content) - - -@router.post("/{app}/token", name="OAuth Fetch Token") -def fetch_token(app: str, request: Request, data: OauthCallbackPayload): - try: - callback_url = str(request.url_for("OAuth Callback", app=app)) - if callback_url.startswith("http://"): - callback_url = "https://" + callback_url[len("http://") :] - - adapter = get_oauth_adapter(app, callback_url) - token_data = adapter.fetch_token(data.code) - return JSONResponse(token_data) - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) +from fastapi import APIRouter, Request, HTTPException +from fastapi.responses import RedirectResponse, JSONResponse, HTMLResponse +from app.component.environment import env +from app.component.oauth_adapter import OauthCallbackPayload, get_oauth_adapter +from typing import Optional +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("server_oauth_controller") + +router = APIRouter(prefix="/oauth", tags=["Oauth Servers"]) + + +@router.get("/{app}/login", name="OAuth Login Redirect") +@traceroot.trace() +def oauth_login(app: str, request: Request, state: Optional[str] = None): + """Redirect user to OAuth provider's authorization endpoint.""" + try: + callback_url = str(request.url_for("OAuth Callback", app=app)) + if callback_url.startswith("http://"): + callback_url = "https://" + callback_url[len("http://") :] + + adapter = get_oauth_adapter(app, callback_url) + url = adapter.get_authorize_url(state) + + if not url: + logger.error("Failed to generate authorization URL", extra={"provider": app, "callback_url": callback_url}) + raise HTTPException(status_code=400, detail="Failed to generate authorization URL") + + logger.info("OAuth login initiated", extra={"provider": app}) + return RedirectResponse(str(url)) + except HTTPException: + raise + except Exception as e: + logger.error("OAuth login failed", extra={"provider": app, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=400, detail="OAuth login failed") + + +@router.get("/{app}/callback", name="OAuth Callback") +@traceroot.trace() +def oauth_callback(app: str, request: Request, code: Optional[str] = None, state: Optional[str] = None): + """Handle OAuth provider callback and redirect to client app.""" + if not code: + logger.warning("OAuth callback missing code", extra={"provider": app}) + raise HTTPException(status_code=400, detail="Missing code parameter") + + logger.info("OAuth callback received", extra={"provider": app, "has_state": state is not None}) + + redirect_url = f"eigent://callback/oauth?provider={app}&code={code}&state={state}" + html_content = f""" + + + OAuth Callback + + + +

Redirecting, please wait...

+ + + + """ + return HTMLResponse(content=html_content) + + +@router.post("/{app}/token", name="OAuth Fetch Token") +@traceroot.trace() +def fetch_token(app: str, request: Request, data: OauthCallbackPayload): + """Exchange authorization code for access token.""" + try: + callback_url = str(request.url_for("OAuth Callback", app=app)) + if callback_url.startswith("http://"): + callback_url = "https://" + callback_url[len("http://") :] + + adapter = get_oauth_adapter(app, callback_url) + token_data = adapter.fetch_token(data.code) + logger.info("OAuth token fetched", extra={"provider": app}) + return JSONResponse(token_data) + except Exception as e: + logger.error("OAuth token fetch failed", extra={"provider": app, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") \ No newline at end of file diff --git a/server/app/controller/provider/provider_controller.py b/server/app/controller/provider/provider_controller.py index 410f246fc..5c7786da4 100644 --- a/server/app/controller/provider/provider_controller.py +++ b/server/app/controller/provider/provider_controller.py @@ -1,100 +1,140 @@ -from typing import List, Optional -from fastapi import Depends, HTTPException, Query, Response, APIRouter -from fastapi_babel import _ -from fastapi_pagination import Page -from fastapi_pagination.ext.sqlmodel import paginate -from sqlalchemy import update -from sqlmodel import Session, select, col -from sqlalchemy.exc import SQLAlchemyError - -from app.component.database import session -from app.component.auth import Auth, auth_must -from app.model.provider.provider import Provider, ProviderIn, ProviderOut, ProviderPreferIn - - -router = APIRouter(tags=["Provider Management"]) - - -@router.get("/providers", name="list providers", response_model=Page[ProviderOut]) -async def gets( - keyword: str | None = None, - prefer: Optional[bool] = Query(None, description="Filter by prefer status"), - session: Session = Depends(session), - auth: Auth = Depends(auth_must), -) -> Page[ProviderOut]: - user_id = auth.user.id - stmt = select(Provider).where(Provider.user_id == user_id, Provider.no_delete()) - if keyword: - stmt = stmt.where(col(Provider.provider_name).like(f"%{keyword}%")) - if prefer is not None: - stmt = stmt.where(Provider.prefer == prefer) - stmt = stmt.order_by(col(Provider.created_at).desc(), col(Provider.id).desc()) # Added for consistent pagination - return paginate(session, stmt) - - -@router.get("/provider", name="get provider detail", response_model=ProviderOut) -async def get(id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - user_id = auth.user.id - stmt = select(Provider).where(Provider.user_id == user_id, Provider.no_delete(), Provider.id == id) - model = session.exec(stmt).one_or_none() - if not model: - raise HTTPException(status_code=404, detail=_("Provider not found")) - return model - - -@router.post("/provider", name="create provider", response_model=ProviderOut) -async def post(data: ProviderIn, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - user_id = auth.user.id - model = Provider(**data.model_dump(), user_id=user_id) - model.save(session) - return model - - -@router.put("/provider/{id}", name="update provider", response_model=ProviderOut) -async def put(id: int, data: ProviderIn, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - user_id = auth.user.id - model = session.exec( - select(Provider).where(Provider.user_id == user_id, Provider.no_delete(), Provider.id == id) - ).one_or_none() - if not model: - raise HTTPException(status_code=404, detail=_("Provider not found")) - model.model_type = data.model_type - model.provider_name = data.provider_name - model.api_key = data.api_key - model.endpoint_url = data.endpoint_url - model.encrypted_config = data.encrypted_config - model.is_vaild = data.is_vaild - model.save(session) - session.refresh(model) - return model - - -@router.delete("/provider/{id}", name="delete provider") -async def delete(id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - user_id = auth.user.id - model = session.exec( - select(Provider).where(Provider.user_id == user_id, Provider.no_delete(), Provider.id == id) - ).one_or_none() - if not model: - raise HTTPException(status_code=404, detail=_("Provider not found")) - model.delete(session) - return Response(status_code=204) - - -@router.post("/provider/prefer", name="set provider prefer") -async def set_prefer(data: ProviderPreferIn, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - user_id = auth.user.id - try: - # 1. current user's all provider prefer set to false - session.exec(update(Provider).where(Provider.user_id == user_id, Provider.no_delete()).values(prefer=False)) - # 2. set the prefer of the specified provider_id to true - session.exec( - update(Provider) - .where(Provider.user_id == user_id, Provider.no_delete(), Provider.id == data.provider_id) - .values(prefer=True) - ) - session.commit() - return {"success": True} - except SQLAlchemyError as e: - session.rollback() - raise HTTPException(status_code=500, detail=str(e)) +from typing import List, Optional +from fastapi import Depends, HTTPException, Query, Response, APIRouter +from fastapi_babel import _ +from fastapi_pagination import Page +from fastapi_pagination.ext.sqlmodel import paginate +from sqlalchemy import update +from sqlmodel import Session, select, col +from sqlalchemy.exc import SQLAlchemyError + +from app.component.database import session +from app.component.auth import Auth, auth_must +from app.model.provider.provider import Provider, ProviderIn, ProviderOut, ProviderPreferIn +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("server_provider_controller") + +router = APIRouter(tags=["Provider Management"]) + + +@router.get("/providers", name="list providers", response_model=Page[ProviderOut]) +@traceroot.trace() +async def gets( + keyword: str | None = None, + prefer: Optional[bool] = Query(None, description="Filter by prefer status"), + session: Session = Depends(session), + auth: Auth = Depends(auth_must), +) -> Page[ProviderOut]: + """List user's providers with optional filtering.""" + user_id = auth.user.id + stmt = select(Provider).where(Provider.user_id == user_id, Provider.no_delete()) + if keyword: + stmt = stmt.where(col(Provider.provider_name).like(f"%{keyword}%")) + if prefer is not None: + stmt = stmt.where(Provider.prefer == prefer) + stmt = stmt.order_by(col(Provider.created_at).desc(), col(Provider.id).desc()) + logger.debug("Providers listed", extra={"user_id": user_id, "keyword": keyword, "prefer_filter": prefer}) + return paginate(session, stmt) + + +@router.get("/provider", name="get provider detail", response_model=ProviderOut) +@traceroot.trace() +async def get(id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Get provider details.""" + user_id = auth.user.id + stmt = select(Provider).where(Provider.user_id == user_id, Provider.no_delete(), Provider.id == id) + model = session.exec(stmt).one_or_none() + if not model: + logger.warning("Provider not found", extra={"user_id": user_id, "provider_id": id}) + raise HTTPException(status_code=404, detail=_("Provider not found")) + logger.debug("Provider retrieved", extra={"user_id": user_id, "provider_id": id}) + return model + + +@router.post("/provider", name="create provider", response_model=ProviderOut) +@traceroot.trace() +async def post(data: ProviderIn, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Create a new provider.""" + user_id = auth.user.id + try: + model = Provider(**data.model_dump(), user_id=user_id) + model.save(session) + logger.info("Provider created", extra={"user_id": user_id, "provider_id": model.id, "provider_name": data.provider_name}) + return model + except Exception as e: + logger.error("Provider creation failed", extra={"user_id": user_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.put("/provider/{id}", name="update provider", response_model=ProviderOut) +@traceroot.trace() +async def put(id: int, data: ProviderIn, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Update provider details.""" + user_id = auth.user.id + model = session.exec( + select(Provider).where(Provider.user_id == user_id, Provider.no_delete(), Provider.id == id) + ).one_or_none() + if not model: + logger.warning("Provider not found for update", extra={"user_id": user_id, "provider_id": id}) + raise HTTPException(status_code=404, detail=_("Provider not found")) + + try: + model.model_type = data.model_type + model.provider_name = data.provider_name + model.api_key = data.api_key + model.endpoint_url = data.endpoint_url + model.encrypted_config = data.encrypted_config + model.is_vaild = data.is_vaild + model.save(session) + session.refresh(model) + logger.info("Provider updated", extra={"user_id": user_id, "provider_id": id, "provider_name": data.provider_name}) + return model + except Exception as e: + logger.error("Provider update failed", extra={"user_id": user_id, "provider_id": id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.delete("/provider/{id}", name="delete provider") +@traceroot.trace() +async def delete(id: int, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Delete a provider.""" + user_id = auth.user.id + model = session.exec( + select(Provider).where(Provider.user_id == user_id, Provider.no_delete(), Provider.id == id) + ).one_or_none() + if not model: + logger.warning("Provider not found for deletion", extra={"user_id": user_id, "provider_id": id}) + raise HTTPException(status_code=404, detail=_("Provider not found")) + + try: + model.delete(session) + logger.info("Provider deleted", extra={"user_id": user_id, "provider_id": id}) + return Response(status_code=204) + except Exception as e: + logger.error("Provider deletion failed", extra={"user_id": user_id, "provider_id": id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.post("/provider/prefer", name="set provider prefer") +@traceroot.trace() +async def set_prefer(data: ProviderPreferIn, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Set preferred provider for user.""" + user_id = auth.user.id + provider_id = data.provider_id + + try: + # 1. Set all current user's providers prefer to false + session.exec(update(Provider).where(Provider.user_id == user_id, Provider.no_delete()).values(prefer=False)) + # 2. Set the prefer of the specified provider_id to true + session.exec( + update(Provider) + .where(Provider.user_id == user_id, Provider.no_delete(), Provider.id == provider_id) + .values(prefer=True) + ) + session.commit() + logger.info("Preferred provider set", extra={"user_id": user_id, "provider_id": provider_id}) + return {"success": True} + except SQLAlchemyError as e: + session.rollback() + logger.error("Failed to set preferred provider", extra={"user_id": user_id, "provider_id": provider_id, "error": str(e)}, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") \ No newline at end of file diff --git a/server/app/controller/user/login_controller.py b/server/app/controller/user/login_controller.py index 908e63da9..b0d943eb3 100644 --- a/server/app/controller/user/login_controller.py +++ b/server/app/controller/user/login_controller.py @@ -1,90 +1,114 @@ -from fastapi import APIRouter, Depends, HTTPException -from fastapi_babel import _ -from sqlmodel import Session -from app.component import code -from app.component.auth import Auth -from app.component.database import session -from app.component.encrypt import password_verify -from app.component.stack_auth import StackAuth -from app.exception.exception import UserException -from app.model.user.user import LoginByPasswordIn, LoginResponse, Status, User, RegisterIn -from loguru import logger -from app.component.environment import env - - -router = APIRouter(tags=["Login/Registration"]) - - -@router.post("/login", name="login by email or password") -async def by_password(data: LoginByPasswordIn, session: Session = Depends(session)) -> LoginResponse: - """ - User login with email and password - """ - user = User.by(User.email == data.email, s=session).one_or_none() - if not user or not password_verify(data.password, user.password): - raise UserException(code.password, _("Account or password error")) - return LoginResponse(token=Auth.create_access_token(user.id), email=user.email) - - -@router.post("/login-by_stack", name="login by stack") -async def by_stack_auth( - token: str, - type: str = "signup", - invite_code: str | None = None, - session: Session = Depends(session), -): - try: - stack_id = await StackAuth.user_id(token) - info = await StackAuth.user_info(token) - except Exception as e: - logger.error(e) - raise HTTPException(500, detail=_(f"{e}")) - user = User.by(User.stack_id == stack_id, s=session).one_or_none() - - if not user: - # Only signup can create user - if type != "signup": - raise UserException(code.error, _("User not found")) - with session as s: - try: - user = User( - username=info["username"] if "username" in info else None, - nickname=info["display_name"], - email=info["primary_email"], - avatar=info["profile_image_url"], - stack_id=stack_id, - ) - s.add(user) - s.commit() - session.refresh(user) - return LoginResponse(token=Auth.create_access_token(user.id), email=user.email) - except Exception as e: - s.rollback() - logger.error(f"Failed to register: {e}") - raise UserException(code.error, _("Failed to register")) - else: - if user.status == Status.Block: - raise UserException(code.error, _("Your account has been blocked.")) - return LoginResponse(token=Auth.create_access_token(user.id), email=user.email) - - -@router.post("/register", name="register by email/password") -async def register(data: RegisterIn, session: Session = Depends(session)): - # Check if email is already registered - if User.by(User.email == data.email, s=session).one_or_none(): - raise UserException(code.error, _("Email already registered")) - - with session as s: - try: - user = User( - email=data.email, - password=data.password, - ) - s.add(user) - s.commit() - s.refresh(user) - except Exception as e: - s.rollback() - logger.error(f"Failed to register: {e}") - raise UserException(code.error, _("Failed to register")) - return {"status": "success"} +from fastapi import APIRouter, Depends, HTTPException +from fastapi_babel import _ +from sqlmodel import Session +from app.component import code +from app.component.auth import Auth +from app.component.database import session +from app.component.encrypt import password_verify +from app.component.stack_auth import StackAuth +from app.exception.exception import UserException +from app.model.user.user import LoginByPasswordIn, LoginResponse, Status, User, RegisterIn +from app.component.environment import env +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("server_login_controller") + + +router = APIRouter(tags=["Login/Registration"]) + + +@router.post("/login", name="login by email or password") +@traceroot.trace() +async def by_password(data: LoginByPasswordIn, session: Session = Depends(session)) -> LoginResponse: + """ + User login with email and password + """ + email = data.email + user = User.by(User.email == email, s=session).one_or_none() + + if not user: + logger.warning("Login failed: user not found", extra={"email": email}) + raise UserException(code.password, _("Account or password error")) + + if not password_verify(data.password, user.password): + logger.warning("Login failed: invalid password", extra={"user_id": user.id, "email": email}) + raise UserException(code.password, _("Account or password error")) + + logger.info("User login successful", extra={"user_id": user.id, "email": email}) + return LoginResponse(token=Auth.create_access_token(user.id), email=user.email) + + +@router.post("/login-by_stack", name="login by stack") +@traceroot.trace() +async def by_stack_auth( + token: str, + type: str = "signup", + invite_code: str | None = None, + session: Session = Depends(session), +): + try: + stack_id = await StackAuth.user_id(token) + info = await StackAuth.user_info(token) + except Exception as e: + logger.error("Stack auth failed", extra={"type": type, "error": str(e)}, exc_info=True) + raise HTTPException(500, detail=_("Authentication failed")) + + user = User.by(User.stack_id == stack_id, s=session).one_or_none() + + if not user: + if type != "signup": + logger.warning("Stack auth signup blocked: user not found", extra={"stack_id": stack_id, "type": type}) + raise UserException(code.error, _("User not found")) + + with session as s: + try: + user = User( + username=info["username"] if "username" in info else None, + nickname=info["display_name"], + email=info["primary_email"], + avatar=info["profile_image_url"], + stack_id=stack_id, + ) + s.add(user) + s.commit() + s.refresh(user) + logger.info("New user registered via stack", extra={"user_id": user.id, "email": user.email, "stack_id": stack_id}) + return LoginResponse(token=Auth.create_access_token(user.id), email=user.email) + except Exception as e: + s.rollback() + logger.error("Stack auth registration failed", extra={"stack_id": stack_id, "error": str(e)}, exc_info=True) + raise UserException(code.error, _("Failed to register")) + else: + if user.status == Status.Block: + logger.warning("Blocked user login attempt", extra={"user_id": user.id, "stack_id": stack_id}) + raise UserException(code.error, _("Your account has been blocked.")) + + logger.info("User login via stack successful", extra={"user_id": user.id, "email": user.email, "stack_id": stack_id}) + return LoginResponse(token=Auth.create_access_token(user.id), email=user.email) + + +@router.post("/register", name="register by email/password") +@traceroot.trace() +async def register(data: RegisterIn, session: Session = Depends(session)): + email = data.email + + if User.by(User.email == email, s=session).one_or_none(): + logger.warning("Registration failed: email already exists", extra={"email": email}) + raise UserException(code.error, _("Email already registered")) + + with session as s: + try: + user = User( + email=email, + password=data.password, + ) + s.add(user) + s.commit() + s.refresh(user) + logger.info("User registered successfully", extra={"user_id": user.id, "email": email}) + except Exception as e: + s.rollback() + logger.error("User registration failed", extra={"email": email, "error": str(e)}, exc_info=True) + raise UserException(code.error, _("Failed to register")) + + return {"status": "success"} \ No newline at end of file diff --git a/server/app/controller/user/user_controller.py b/server/app/controller/user/user_controller.py index cd8ecc3b9..dfb523c20 100644 --- a/server/app/controller/user/user_controller.py +++ b/server/app/controller/user/user_controller.py @@ -1,115 +1,151 @@ -from fastapi import APIRouter, Depends -from sqlalchemy import func -from sqlmodel import Session, select -from app.component.auth import Auth, auth_must -from app.component.database import session -from app.model.user.privacy import UserPrivacy, UserPrivacySettings -from app.model.user.user import User, UserIn, UserOut, UserProfile -from app.model.user.user_stat import UserStat, UserStatActionIn, UserStatOut -from app.model.chat.chat_history import ChatHistory -from app.model.mcp.mcp_user import McpUser -from app.model.config.config import Config -from app.model.chat.chat_snpshot import ChatSnapshot -from app.model.user.user_credits_record import UserCreditsRecord - - -router = APIRouter(tags=["User"]) - - -@router.get("/user", name="user info", response_model=UserOut) -def get(auth: Auth = Depends(auth_must), session: Session = Depends(session)): - # 获取用户信息时触发积分刷新 - user: User = auth.user - user.refresh_credits_on_active(session) - return user - - -@router.put("/user", name="update user info", response_model=UserOut) -def put(data: UserIn, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - model = auth.user - model.username = data.username - model.save(session) - return model - - -@router.put("/user/profile", name="update user profile", response_model=UserProfile) -def put_profile(data: UserProfile, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - model = auth.user - model.nickname = data.nickname - model.fullname = data.fullname - model.work_desc = data.work_desc - model.save(session) - return model - - -@router.get("/user/privacy", name="get user privacy") -def get_privacy(session: Session = Depends(session), auth: Auth = Depends(auth_must)): - user_id = auth.user.id - stmt = select(UserPrivacy).where(UserPrivacy.user_id == user_id) - model = session.exec(stmt).one_or_none() - - if not model: - return UserPrivacySettings.default_settings() - return model.pricacy_setting - - -@router.put("/user/privacy", name="update user privacy") -def put_privacy(data: UserPrivacySettings, session: Session = Depends(session), auth: Auth = Depends(auth_must)): - user_id = auth.user.id - stmt = select(UserPrivacy).where(UserPrivacy.user_id == user_id) - model = session.exec(stmt).one_or_none() - default_settings = UserPrivacySettings.default_settings() - - if model: - model.pricacy_setting = {**model.pricacy_setting, **data.model_dump()} - model.save(session) - else: - model = UserPrivacy(user_id=user_id, pricacy_setting={**default_settings, **data.model_dump()}) - model.save(session) - - return model.pricacy_setting - - -@router.get("/user/current_credits", name="get user current credits") -def get_user_credits(auth: Auth = Depends(auth_must), session: Session = Depends(session)): - user = auth.user - user.refresh_credits_on_active(session) - credits = user.credits - daily_credits: UserCreditsRecord | None = UserCreditsRecord.get_daily_balance(user.id) - current_daily_credits = 0 - if daily_credits: - current_daily_credits = daily_credits.amount - daily_credits.balance - credits += current_daily_credits if current_daily_credits > 0 else 0 - return {"credits": credits, "daily_credits": current_daily_credits} - - -@router.get("/user/stat", name="get user stat", response_model=UserStatOut) -def get_user_stat(auth: Auth = Depends(auth_must), session: Session = Depends(session)): - """Get current user's operation statistics.""" - stat = session.exec(select(UserStat).where(UserStat.user_id == auth.user.id)).first() - data = UserStatOut() - if stat: - data = UserStatOut(**stat.model_dump()) - else: - data = UserStatOut(user_id=auth.user.id) - data.task_queries = ChatHistory.count(ChatHistory.user_id == auth.user.id, s=session) - mcp = McpUser.count(McpUser.user_id == auth.user.id, s=session) - tool: list = session.exec( - select(func.count("*")).where(Config.user_id == auth.user.id).group_by(Config.config_group) - ).all() - tool = tool.__len__() - data.mcp_install_count = mcp + tool - data.storage_used = ChatSnapshot.caclDir(ChatSnapshot.get_user_dir(auth.user.id)) - return data - - -@router.post("/user/stat", name="record user stat") -def record_user_stat( - data: UserStatActionIn, - auth: Auth = Depends(auth_must), - session: Session = Depends(session), -): - """Record or update current user's operation statistics.""" - data.user_id = auth.user.id - stat = UserStat.record_action(session, data) - return stat +from fastapi import APIRouter, Depends +from sqlalchemy import func +from sqlmodel import Session, select +from app.component.auth import Auth, auth_must +from app.component.database import session +from app.model.user.privacy import UserPrivacy, UserPrivacySettings +from app.model.user.user import User, UserIn, UserOut, UserProfile +from app.model.user.user_stat import UserStat, UserStatActionIn, UserStatOut +from app.model.chat.chat_history import ChatHistory +from app.model.mcp.mcp_user import McpUser +from app.model.config.config import Config +from app.model.chat.chat_snpshot import ChatSnapshot +from app.model.user.user_credits_record import UserCreditsRecord +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("server_user_controller") + +router = APIRouter(tags=["User"]) + + +@router.get("/user", name="user info", response_model=UserOut) +@traceroot.trace() +def get(auth: Auth = Depends(auth_must), session: Session = Depends(session)): + """Get current user information and refresh credits.""" + user: User = auth.user + user.refresh_credits_on_active(session) + logger.debug("User info retrieved", extra={"user_id": user.id}) + return user + + +@router.put("/user", name="update user info", response_model=UserOut) +@traceroot.trace() +def put(data: UserIn, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Update user basic information.""" + model = auth.user + model.username = data.username + model.save(session) + logger.info("User info updated", extra={"user_id": model.id, "username": data.username}) + return model + + +@router.put("/user/profile", name="update user profile", response_model=UserProfile) +@traceroot.trace() +def put_profile(data: UserProfile, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Update user profile details.""" + model = auth.user + model.nickname = data.nickname + model.fullname = data.fullname + model.work_desc = data.work_desc + model.save(session) + logger.info("User profile updated", extra={"user_id": model.id, "nickname": data.nickname}) + return model + + +@router.get("/user/privacy", name="get user privacy") +@traceroot.trace() +def get_privacy(session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Get user privacy settings.""" + user_id = auth.user.id + stmt = select(UserPrivacy).where(UserPrivacy.user_id == user_id) + model = session.exec(stmt).one_or_none() + + if not model: + logger.debug("Privacy settings not found, returning defaults", extra={"user_id": user_id}) + return UserPrivacySettings.default_settings() + + logger.debug("Privacy settings retrieved", extra={"user_id": user_id}) + return model.pricacy_setting + + +@router.put("/user/privacy", name="update user privacy") +@traceroot.trace() +def put_privacy(data: UserPrivacySettings, session: Session = Depends(session), auth: Auth = Depends(auth_must)): + """Update user privacy settings.""" + user_id = auth.user.id + stmt = select(UserPrivacy).where(UserPrivacy.user_id == user_id) + model = session.exec(stmt).one_or_none() + default_settings = UserPrivacySettings.default_settings() + + if model: + model.pricacy_setting = {**model.pricacy_setting, **data.model_dump()} + model.save(session) + logger.info("Privacy settings updated", extra={"user_id": user_id}) + else: + model = UserPrivacy(user_id=user_id, pricacy_setting={**default_settings, **data.model_dump()}) + model.save(session) + logger.info("Privacy settings created", extra={"user_id": user_id}) + + return model.pricacy_setting + + +@router.get("/user/current_credits", name="get user current credits") +@traceroot.trace() +def get_user_credits(auth: Auth = Depends(auth_must), session: Session = Depends(session)): + """Get user's current credit balance.""" + user = auth.user + user.refresh_credits_on_active(session) + credits = user.credits + daily_credits: UserCreditsRecord | None = UserCreditsRecord.get_daily_balance(user.id) + current_daily_credits = 0 + if daily_credits: + current_daily_credits = daily_credits.amount - daily_credits.balance + credits += current_daily_credits if current_daily_credits > 0 else 0 + + logger.debug("Credits retrieved", extra={"user_id": user.id, "total_credits": credits, "daily_credits": current_daily_credits}) + return {"credits": credits, "daily_credits": current_daily_credits} + + +@router.get("/user/stat", name="get user stat", response_model=UserStatOut) +@traceroot.trace() +def get_user_stat(auth: Auth = Depends(auth_must), session: Session = Depends(session)): + """Get current user's operation statistics.""" + user_id = auth.user.id + stat = session.exec(select(UserStat).where(UserStat.user_id == user_id)).first() + data = UserStatOut() + + if stat: + data = UserStatOut(**stat.model_dump()) + else: + data = UserStatOut(user_id=user_id) + + data.task_queries = ChatHistory.count(ChatHistory.user_id == user_id, s=session) + mcp = McpUser.count(McpUser.user_id == user_id, s=session) + tool: list = session.exec( + select(func.count("*")).where(Config.user_id == user_id).group_by(Config.config_group) + ).all() + tool = tool.__len__() + data.mcp_install_count = mcp + tool + data.storage_used = ChatSnapshot.caclDir(ChatSnapshot.get_user_dir(user_id)) + + logger.debug("User stats retrieved", extra={ + "user_id": user_id, + "task_queries": data.task_queries, + "mcp_install_count": data.mcp_install_count, + "storage_used": data.storage_used + }) + return data + + +@router.post("/user/stat", name="record user stat") +@traceroot.trace() +def record_user_stat( + data: UserStatActionIn, + auth: Auth = Depends(auth_must), + session: Session = Depends(session), +): + """Record or update current user's operation statistics.""" + data.user_id = auth.user.id + stat = UserStat.record_action(session, data) + logger.info("User stat recorded", extra={"user_id": data.user_id, "action": data.action if hasattr(data, 'action') else "unknown"}) + return stat \ No newline at end of file diff --git a/server/app/controller/user/user_password_controller.py b/server/app/controller/user/user_password_controller.py index 3efd19866..eec5fa004 100644 --- a/server/app/controller/user/user_password_controller.py +++ b/server/app/controller/user/user_password_controller.py @@ -1,24 +1,36 @@ -from fastapi import APIRouter, Depends -from sqlmodel import Session - -from app.component import code -from app.component.auth import Auth, auth_must -from app.component.database import session -from app.component.encrypt import password_hash, password_verify -from app.exception.exception import UserException -from app.model.user.user import UpdatePassword, UserOut -from fastapi_babel import _ - -router = APIRouter(tags=["User"]) - - -@router.put("/user/update-password", name="update password", response_model=UserOut) -def update_password(data: UpdatePassword, auth: Auth = Depends(auth_must), session: Session = Depends(session)): - model = auth.user - if not password_verify(data.password, model.password): - raise UserException(code.error, _("Password is incorrect")) - if data.new_password != data.re_new_password: - raise UserException(code.error, _("The two passwords do not match")) - model.password = password_hash(data.new_password) - model.save(session) - return model +from fastapi import APIRouter, Depends +from sqlmodel import Session + +from app.component import code +from app.component.auth import Auth, auth_must +from app.component.database import session +from app.component.encrypt import password_hash, password_verify +from app.exception.exception import UserException +from app.model.user.user import UpdatePassword, UserOut +from fastapi_babel import _ +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("server_password_controller") + +router = APIRouter(tags=["User"]) + + +@router.put("/user/update-password", name="update password", response_model=UserOut) +@traceroot.trace() +def update_password(data: UpdatePassword, auth: Auth = Depends(auth_must), session: Session = Depends(session)): + """Update user password after verifying current password.""" + user_id = auth.user.id + model = auth.user + + if not password_verify(data.password, model.password): + logger.warning("Password update failed: incorrect current password", extra={"user_id": user_id}) + raise UserException(code.error, _("Password is incorrect")) + + if data.new_password != data.re_new_password: + logger.warning("Password update failed: new passwords do not match", extra={"user_id": user_id}) + raise UserException(code.error, _("The two passwords do not match")) + + model.password = password_hash(data.new_password) + model.save(session) + logger.info("Password updated successfully", extra={"user_id": user_id}) + return model \ No newline at end of file diff --git a/server/app/model/chat/chat_history.py b/server/app/model/chat/chat_history.py index e351e34ae..6dd7775a1 100644 --- a/server/app/model/chat/chat_history.py +++ b/server/app/model/chat/chat_history.py @@ -4,7 +4,7 @@ from typing import Optional from enum import IntEnum from sqlalchemy_utils import ChoiceType from app.model.abstract.model import AbstractModel, DefaultTimes -from pydantic import BaseModel +from pydantic import BaseModel, model_validator class ChatStatus(IntEnum): @@ -16,6 +16,7 @@ class ChatHistory(AbstractModel, DefaultTimes, table=True): id: int = Field(default=None, primary_key=True) user_id: int = Field(index=True) task_id: str = Field(index=True, unique=True) + project_id: str = Field(index=True, unique=False, nullable=True) question: str language: str model_platform: str @@ -34,6 +35,7 @@ class ChatHistory(AbstractModel, DefaultTimes, table=True): class ChatHistoryIn(BaseModel): task_id: str + project_id: str | None = None user_id: int | None = None question: str language: str @@ -54,6 +56,7 @@ class ChatHistoryIn(BaseModel): class ChatHistoryOut(BaseModel): id: int task_id: str + project_id: str | None = None question: str language: str model_platform: str @@ -68,9 +71,17 @@ class ChatHistoryOut(BaseModel): tokens: int status: int + @model_validator(mode="after") + def fill_project_id_from_task_id(self): + """fill by task_id when project_id is None""" + if self.project_id is None: + self.project_id = self.task_id + return self + class ChatHistoryUpdate(BaseModel): project_name: str | None = None summary: str | None = None tokens: int | None = None status: int | None = None + project_id: str | None = None diff --git a/server/app/model/user/user_credits_record.py b/server/app/model/user/user_credits_record.py index bcdb7750d..5baf2307a 100644 --- a/server/app/model/user/user_credits_record.py +++ b/server/app/model/user/user_credits_record.py @@ -1,381 +1,383 @@ -from enum import IntEnum -from typing import Optional -from pydantic import BaseModel -from sqlmodel import Relationship, SQLModel, Field, Column, col, select, Session -from sqlalchemy_utils import ChoiceType -from sqlalchemy import Boolean, SmallInteger, text -from app.model.abstract.model import AbstractModel, DefaultTimes -from datetime import date, datetime, timedelta -from app.model.user.key import ModelType -from app.component.database import session_make -from loguru import logger - - -class CreditsChannel(IntEnum): - register = 1 # 注册赠送 - invite = 2 # 邀请赠送 - daily = 3 # 每日刷新积分 - monthly = 4 # 每月刷新积分 - paid = 5 # 付费积分 - addon = 6 # 加量包 - consume = 7 # 任务消费 - - -class CreditsPriority(IntEnum): - daily = 1 # 每日刷新积分 - monthly = 2 # 每月刷新积分 - paid = 3 # 付费积分 - addon = 4 # 加量包 - - -class CreditsPoint(IntEnum): - register = 1000 - invite = 500 - special_register = 1500 # 1000 register + 500 invite credit - - -class UserCreditsRecord(AbstractModel, DefaultTimes, table=True): - id: int = Field(default=None, primary_key=True) - user_id: int = Field(index=True, foreign_key="user.id") - invite_by: int = Field(default=None, nullable=True, description="invite by user id") - invite_code: str = Field(default="", max_length=255) - amount: int = Field(default=0) - balance: int = Field(default=0) - channel: CreditsChannel = Field( - default=CreditsChannel.register.value, sa_column=Column(ChoiceType(CreditsChannel, SmallInteger())) - ) - source_id: int = Field(default=0, description="source id") - remark: str = Field(default="", max_length=255) - expire_at: datetime = Field(default=None, nullable=True, description="Expiration time") - used: bool = Field( - default=False, - sa_column=Column(Boolean, server_default=text("false")), - description="Is this record used/expired", - ) - used_at: datetime = Field(default=None, nullable=True, description="Time when this record was used/expired") - - @classmethod - def get_permanent_credits(cls, user_id: int) -> int: - """ - 获取可用的token总量,直接用SQL聚合sum - Returns: - int: 可用的token总量 - """ - session = session_make() - from sqlalchemy import func - - statement = ( - select(func.sum(UserCreditsRecord.amount)) - .where(UserCreditsRecord.user_id == user_id) - .where( - UserCreditsRecord.channel.in_( - [ - CreditsChannel.register, - CreditsChannel.invite, - CreditsChannel.paid, - CreditsChannel.addon, - CreditsChannel.monthly, - ] - ) - ) - .where(UserCreditsRecord.used == False) - .where((UserCreditsRecord.expire_at.is_(None)) | (col(UserCreditsRecord.expire_at) > datetime.now())) - ) - result = session.exec(statement).first() - return result or 0 - - @classmethod - def get_temp_credits(cls, user_id: int) -> tuple[int, date]: - """ - 1. 获取可用的临时token总量,需要通过credits 然后根据model_type来计算 - 2. 每天只允许赠送一次临时的量 - - Returns: - int: 可用的临时token总量 - """ - session = session_make() - statement = ( - select(UserCreditsRecord) - .where(UserCreditsRecord.user_id == user_id) - .where(UserCreditsRecord.channel == CreditsChannel.daily) - .where(UserCreditsRecord.used == False) - .where(UserCreditsRecord.expire_at.is_not(None)) - .where(col(UserCreditsRecord.expire_at) > datetime.now()) - ) - record: UserCreditsRecord = session.exec(statement).first() - if record is None: - return 0, None - return record.amount - record.balance, record.expire_at - - @classmethod - def consume_credits(cls, user_id: int, amount: int, session: Session, source_id: int = 0, remark: str = ""): - """ - 消耗积分,优先消耗每日积分(daily),再消耗monthly、paid、addon等。 - 消耗时更新UserCreditsRecord的balance字段,记录已消耗积分数。 - 同时生成积分消耗记录,更新用户积分credits字段(不包括每日积分)。 - 避免重复生成积分消耗记录和重复扣减积分。 - """ - - # 检查是否已有积分消耗记录 - existing_consume_record = None - if source_id > 0: - existing_consume_record = session.exec( - select(UserCreditsRecord) - .where(UserCreditsRecord.user_id == user_id) - .where(UserCreditsRecord.channel == CreditsChannel.consume) - .where(UserCreditsRecord.source_id == source_id) - ).first() - - if existing_consume_record: - # 如果新amount更大,需要额外消耗积分 - if amount > 0: - existing_consume_record.amount -= amount - session.add(existing_consume_record) - # 直接处理额外的积分消耗,不生成新的消耗记录 - cls._consume_credits_internal_update(user_id, amount, session, source_id, remark) - # 如果新amount更小,需要退还积分(这里可以根据业务需求决定是否实现) - else: - # 暂时不实现退还逻辑,可以根据需要添加 - pass - - session.commit() - return - - # 没有现有记录,执行正常的积分消耗流程 - cls._consume_credits_internal(user_id, amount, session, source_id, remark) - - @classmethod - def _consume_credits_internal( - cls, user_id: int, amount: int, session: Session, source_id: int = 0, remark: str = "" - ): - """ - 内部积分消耗逻辑,处理实际的积分扣减 - """ - from app.model.user.user import User - - remain = amount - now = datetime.now() - consumed_from_daily = 0 - consumed_from_other = 0 - - # 优先消耗daily - statement = ( - select(UserCreditsRecord) - .where(UserCreditsRecord.user_id == user_id) - .where(UserCreditsRecord.channel == CreditsChannel.daily) - .where(UserCreditsRecord.used == False) - .where(UserCreditsRecord.expire_at.is_not(None)) - .where(col(UserCreditsRecord.expire_at) > now) - .order_by(UserCreditsRecord.expire_at) - ) - daily_records = session.exec(statement).first() - if daily_records: - can_consume = daily_records.amount - daily_records.balance - use = min(remain, can_consume) - daily_records.balance += use - session.add(daily_records) - remain -= use - consumed_from_daily = use - if remain == 0: - # 生成积分消耗记录 - consume_record = UserCreditsRecord( - user_id=user_id, - amount=-amount, - channel=CreditsChannel.consume, - source_id=source_id, - remark=remark or f"Consumed {amount} credits (daily: {consumed_from_daily})", - ) - session.add(consume_record) - session.commit() - return - - # 若daily不够,继续消耗monthly/paid/addon - if remain > 0: - statement = ( - select(UserCreditsRecord) - .where(UserCreditsRecord.user_id == user_id) - .where( - UserCreditsRecord.channel.in_( - [ - CreditsChannel.monthly, - CreditsChannel.paid, - CreditsChannel.addon, - CreditsChannel.register, - CreditsChannel.invite, - ] - ) - ) - .where(UserCreditsRecord.used == False) - .where((UserCreditsRecord.expire_at.is_(None)) | (col(UserCreditsRecord.expire_at) > now)) - .order_by(UserCreditsRecord.expire_at) - ) - other_records = session.exec(statement).all() - for record in other_records: - can_consume = record.amount - record.balance - if can_consume <= 0: - continue - use = min(remain, can_consume) - record.balance += use - session.add(record) - remain -= use - consumed_from_other += use - if remain == 0: - break - - # 更新用户积分字段(只扣除非每日积分消耗的部分) - if consumed_from_other > 0: - user = session.exec(select(User).where(User.id == user_id)).first() - if user: - user.credits -= consumed_from_other - session.add(user) - - # 生成积分消耗记录 - consume_record = UserCreditsRecord( - user_id=user_id, - amount=-amount, - channel=CreditsChannel.consume, - source_id=source_id, - remark=remark or f"Consumed {amount} credits (daily: {consumed_from_daily}, other: {consumed_from_other})", - ) - session.add(consume_record) - session.commit() - - if remain > 0: - raise Exception(f"Insufficient credits: need {amount}, remain {remain}") - - @classmethod - def _consume_credits_internal_update( - cls, user_id: int, amount: int, session: Session, source_id: int = 0, remark: str = "" - ): - """ - 内部积分消耗逻辑(更新模式),处理实际的积分扣减但不生成新的消耗记录 - 用于更新现有消耗记录时的额外积分消耗 - """ - from app.model.user.user import User - - remain = amount - now = datetime.now() - consumed_from_daily = 0 - consumed_from_other = 0 - - # 优先消耗daily - statement = ( - select(UserCreditsRecord) - .where(UserCreditsRecord.user_id == user_id) - .where(UserCreditsRecord.channel == CreditsChannel.daily) - .where(UserCreditsRecord.used == False) - .where(UserCreditsRecord.expire_at.is_not(None)) - .where(col(UserCreditsRecord.expire_at) > now) - .order_by(UserCreditsRecord.expire_at) - ) - daily_records = session.exec(statement).first() - if daily_records: - can_consume = daily_records.amount - daily_records.balance - use = min(remain, can_consume) - daily_records.balance += use - session.add(daily_records) - remain -= use - consumed_from_daily = use - if remain == 0: - # 不生成新的消耗记录,只更新现有记录 - return - - # 若daily不够,继续消耗monthly/paid/addon - if remain > 0: - statement = ( - select(UserCreditsRecord) - .where(UserCreditsRecord.user_id == user_id) - .where( - UserCreditsRecord.channel.in_( - [ - CreditsChannel.monthly, - CreditsChannel.paid, - CreditsChannel.addon, - CreditsChannel.register, - CreditsChannel.invite, - ] - ) - ) - .where(UserCreditsRecord.used == False) - .where((UserCreditsRecord.expire_at.is_(None)) | (col(UserCreditsRecord.expire_at) > now)) - .order_by(UserCreditsRecord.expire_at) - ) - other_records = session.exec(statement).all() - for record in other_records: - can_consume = record.amount - record.balance - if can_consume <= 0: - continue - use = min(remain, can_consume) - record.balance += use - session.add(record) - remain -= use - consumed_from_other += use - if remain == 0: - break - logger.info(f"consumed_from_other: {consumed_from_other}") - # 更新用户积分字段(只扣除非每日积分消耗的部分) - if consumed_from_other > 0: - user = session.exec(select(User).where(User.id == user_id)).first() - if user: - user.credits -= consumed_from_other - session.add(user) - - # 不生成新的消耗记录,因为现有记录已经在主函数中更新了 - - if remain > 0: - raise Exception(f"Insufficient credits: need {amount}, remain {remain}") - - @classmethod - def get_daily_balance_sum(cls, user_id: int) -> int: - """ - 获取用户所有每日积分(daily channel)的balance字段之和 - """ - session = session_make() - statement = ( - select(UserCreditsRecord.balance) - .where(UserCreditsRecord.user_id == user_id) - .where(UserCreditsRecord.channel == CreditsChannel.daily) - ) - balances = session.exec(statement).all() - return sum(balances) if balances else 0 - - @classmethod - def get_daily_balance(cls, user_id: int) -> int: - """ - 获取用户当前的每日积分数据 - """ - session = session_make() - statement = ( - select(UserCreditsRecord) - .where(UserCreditsRecord.user_id == user_id) - .where(UserCreditsRecord.channel == CreditsChannel.daily) - .where(UserCreditsRecord.used == False) - ) - record = session.exec(statement).first() - return record - - -class UserCreditsRecordWithChatOut(BaseModel): - """扩展的积分记录输出模型,包含聊天历史信息""" - - amount: int - balance: int - channel: CreditsChannel - source_id: int - expire_at: Optional[datetime] = None - created_at: datetime - updated_at: Optional[datetime] = None - # 聊天历史相关字段(当channel为consume且source_id有效时) - chat_project_name: Optional[str] = None - chat_tokens: Optional[int] = None - - -class UserCreditsRecordOut(BaseModel): - amount: int - balance: int - channel: CreditsChannel - source_id: int - remark: str - expire_at: datetime | None - created_at: datetime - updated_at: datetime | None +from enum import IntEnum +from typing import Optional +from pydantic import BaseModel +from sqlmodel import Relationship, SQLModel, Field, Column, col, select, Session +from sqlalchemy_utils import ChoiceType +from sqlalchemy import Boolean, SmallInteger, text +from app.model.abstract.model import AbstractModel, DefaultTimes +from datetime import date, datetime, timedelta +from app.model.user.key import ModelType +from app.component.database import session_make +from utils import traceroot_wrapper as traceroot + +logger = traceroot.get_logger("user_credits_record") + + +class CreditsChannel(IntEnum): + register = 1 # 注册赠送 + invite = 2 # 邀请赠送 + daily = 3 # 每日刷新积分 + monthly = 4 # 每月刷新积分 + paid = 5 # 付费积分 + addon = 6 # 加量包 + consume = 7 # 任务消费 + + +class CreditsPriority(IntEnum): + daily = 1 # 每日刷新积分 + monthly = 2 # 每月刷新积分 + paid = 3 # 付费积分 + addon = 4 # 加量包 + + +class CreditsPoint(IntEnum): + register = 1000 + invite = 500 + special_register = 1500 # 1000 register + 500 invite credit + + +class UserCreditsRecord(AbstractModel, DefaultTimes, table=True): + id: int = Field(default=None, primary_key=True) + user_id: int = Field(index=True, foreign_key="user.id") + invite_by: int = Field(default=None, nullable=True, description="invite by user id") + invite_code: str = Field(default="", max_length=255) + amount: int = Field(default=0) + balance: int = Field(default=0) + channel: CreditsChannel = Field( + default=CreditsChannel.register.value, sa_column=Column(ChoiceType(CreditsChannel, SmallInteger())) + ) + source_id: int = Field(default=0, description="source id") + remark: str = Field(default="", max_length=255) + expire_at: datetime = Field(default=None, nullable=True, description="Expiration time") + used: bool = Field( + default=False, + sa_column=Column(Boolean, server_default=text("false")), + description="Is this record used/expired", + ) + used_at: datetime = Field(default=None, nullable=True, description="Time when this record was used/expired") + + @classmethod + def get_permanent_credits(cls, user_id: int) -> int: + """ + 获取可用的token总量,直接用SQL聚合sum + Returns: + int: 可用的token总量 + """ + session = session_make() + from sqlalchemy import func + + statement = ( + select(func.sum(UserCreditsRecord.amount)) + .where(UserCreditsRecord.user_id == user_id) + .where( + UserCreditsRecord.channel.in_( + [ + CreditsChannel.register, + CreditsChannel.invite, + CreditsChannel.paid, + CreditsChannel.addon, + CreditsChannel.monthly, + ] + ) + ) + .where(UserCreditsRecord.used == False) + .where((UserCreditsRecord.expire_at.is_(None)) | (col(UserCreditsRecord.expire_at) > datetime.now())) + ) + result = session.exec(statement).first() + return result or 0 + + @classmethod + def get_temp_credits(cls, user_id: int) -> tuple[int, date]: + """ + 1. 获取可用的临时token总量,需要通过credits 然后根据model_type来计算 + 2. 每天只允许赠送一次临时的量 + + Returns: + int: 可用的临时token总量 + """ + session = session_make() + statement = ( + select(UserCreditsRecord) + .where(UserCreditsRecord.user_id == user_id) + .where(UserCreditsRecord.channel == CreditsChannel.daily) + .where(UserCreditsRecord.used == False) + .where(UserCreditsRecord.expire_at.is_not(None)) + .where(col(UserCreditsRecord.expire_at) > datetime.now()) + ) + record: UserCreditsRecord = session.exec(statement).first() + if record is None: + return 0, None + return record.amount - record.balance, record.expire_at + + @classmethod + def consume_credits(cls, user_id: int, amount: int, session: Session, source_id: int = 0, remark: str = ""): + """ + 消耗积分,优先消耗每日积分(daily),再消耗monthly、paid、addon等。 + 消耗时更新UserCreditsRecord的balance字段,记录已消耗积分数。 + 同时生成积分消耗记录,更新用户积分credits字段(不包括每日积分)。 + 避免重复生成积分消耗记录和重复扣减积分。 + """ + + # 检查是否已有积分消耗记录 + existing_consume_record = None + if source_id > 0: + existing_consume_record = session.exec( + select(UserCreditsRecord) + .where(UserCreditsRecord.user_id == user_id) + .where(UserCreditsRecord.channel == CreditsChannel.consume) + .where(UserCreditsRecord.source_id == source_id) + ).first() + + if existing_consume_record: + # 如果新amount更大,需要额外消耗积分 + if amount > 0: + existing_consume_record.amount -= amount + session.add(existing_consume_record) + # 直接处理额外的积分消耗,不生成新的消耗记录 + cls._consume_credits_internal_update(user_id, amount, session, source_id, remark) + # 如果新amount更小,需要退还积分(这里可以根据业务需求决定是否实现) + else: + # 暂时不实现退还逻辑,可以根据需要添加 + pass + + session.commit() + return + + # 没有现有记录,执行正常的积分消耗流程 + cls._consume_credits_internal(user_id, amount, session, source_id, remark) + + @classmethod + def _consume_credits_internal( + cls, user_id: int, amount: int, session: Session, source_id: int = 0, remark: str = "" + ): + """ + 内部积分消耗逻辑,处理实际的积分扣减 + """ + from app.model.user.user import User + + remain = amount + now = datetime.now() + consumed_from_daily = 0 + consumed_from_other = 0 + + # 优先消耗daily + statement = ( + select(UserCreditsRecord) + .where(UserCreditsRecord.user_id == user_id) + .where(UserCreditsRecord.channel == CreditsChannel.daily) + .where(UserCreditsRecord.used == False) + .where(UserCreditsRecord.expire_at.is_not(None)) + .where(col(UserCreditsRecord.expire_at) > now) + .order_by(UserCreditsRecord.expire_at) + ) + daily_records = session.exec(statement).first() + if daily_records: + can_consume = daily_records.amount - daily_records.balance + use = min(remain, can_consume) + daily_records.balance += use + session.add(daily_records) + remain -= use + consumed_from_daily = use + if remain == 0: + # 生成积分消耗记录 + consume_record = UserCreditsRecord( + user_id=user_id, + amount=-amount, + channel=CreditsChannel.consume, + source_id=source_id, + remark=remark or f"Consumed {amount} credits (daily: {consumed_from_daily})", + ) + session.add(consume_record) + session.commit() + return + + # 若daily不够,继续消耗monthly/paid/addon + if remain > 0: + statement = ( + select(UserCreditsRecord) + .where(UserCreditsRecord.user_id == user_id) + .where( + UserCreditsRecord.channel.in_( + [ + CreditsChannel.monthly, + CreditsChannel.paid, + CreditsChannel.addon, + CreditsChannel.register, + CreditsChannel.invite, + ] + ) + ) + .where(UserCreditsRecord.used == False) + .where((UserCreditsRecord.expire_at.is_(None)) | (col(UserCreditsRecord.expire_at) > now)) + .order_by(UserCreditsRecord.expire_at) + ) + other_records = session.exec(statement).all() + for record in other_records: + can_consume = record.amount - record.balance + if can_consume <= 0: + continue + use = min(remain, can_consume) + record.balance += use + session.add(record) + remain -= use + consumed_from_other += use + if remain == 0: + break + + # 更新用户积分字段(只扣除非每日积分消耗的部分) + if consumed_from_other > 0: + user = session.exec(select(User).where(User.id == user_id)).first() + if user: + user.credits -= consumed_from_other + session.add(user) + + # 生成积分消耗记录 + consume_record = UserCreditsRecord( + user_id=user_id, + amount=-amount, + channel=CreditsChannel.consume, + source_id=source_id, + remark=remark or f"Consumed {amount} credits (daily: {consumed_from_daily}, other: {consumed_from_other})", + ) + session.add(consume_record) + session.commit() + + if remain > 0: + raise Exception(f"Insufficient credits: need {amount}, remain {remain}") + + @classmethod + def _consume_credits_internal_update( + cls, user_id: int, amount: int, session: Session, source_id: int = 0, remark: str = "" + ): + """ + 内部积分消耗逻辑(更新模式),处理实际的积分扣减但不生成新的消耗记录 + 用于更新现有消耗记录时的额外积分消耗 + """ + from app.model.user.user import User + + remain = amount + now = datetime.now() + consumed_from_daily = 0 + consumed_from_other = 0 + + # 优先消耗daily + statement = ( + select(UserCreditsRecord) + .where(UserCreditsRecord.user_id == user_id) + .where(UserCreditsRecord.channel == CreditsChannel.daily) + .where(UserCreditsRecord.used == False) + .where(UserCreditsRecord.expire_at.is_not(None)) + .where(col(UserCreditsRecord.expire_at) > now) + .order_by(UserCreditsRecord.expire_at) + ) + daily_records = session.exec(statement).first() + if daily_records: + can_consume = daily_records.amount - daily_records.balance + use = min(remain, can_consume) + daily_records.balance += use + session.add(daily_records) + remain -= use + consumed_from_daily = use + if remain == 0: + # 不生成新的消耗记录,只更新现有记录 + return + + # 若daily不够,继续消耗monthly/paid/addon + if remain > 0: + statement = ( + select(UserCreditsRecord) + .where(UserCreditsRecord.user_id == user_id) + .where( + UserCreditsRecord.channel.in_( + [ + CreditsChannel.monthly, + CreditsChannel.paid, + CreditsChannel.addon, + CreditsChannel.register, + CreditsChannel.invite, + ] + ) + ) + .where(UserCreditsRecord.used == False) + .where((UserCreditsRecord.expire_at.is_(None)) | (col(UserCreditsRecord.expire_at) > now)) + .order_by(UserCreditsRecord.expire_at) + ) + other_records = session.exec(statement).all() + for record in other_records: + can_consume = record.amount - record.balance + if can_consume <= 0: + continue + use = min(remain, can_consume) + record.balance += use + session.add(record) + remain -= use + consumed_from_other += use + if remain == 0: + break + logger.info(f"consumed_from_other: {consumed_from_other}") + # 更新用户积分字段(只扣除非每日积分消耗的部分) + if consumed_from_other > 0: + user = session.exec(select(User).where(User.id == user_id)).first() + if user: + user.credits -= consumed_from_other + session.add(user) + + # 不生成新的消耗记录,因为现有记录已经在主函数中更新了 + + if remain > 0: + raise Exception(f"Insufficient credits: need {amount}, remain {remain}") + + @classmethod + def get_daily_balance_sum(cls, user_id: int) -> int: + """ + 获取用户所有每日积分(daily channel)的balance字段之和 + """ + session = session_make() + statement = ( + select(UserCreditsRecord.balance) + .where(UserCreditsRecord.user_id == user_id) + .where(UserCreditsRecord.channel == CreditsChannel.daily) + ) + balances = session.exec(statement).all() + return sum(balances) if balances else 0 + + @classmethod + def get_daily_balance(cls, user_id: int) -> int: + """ + 获取用户当前的每日积分数据 + """ + session = session_make() + statement = ( + select(UserCreditsRecord) + .where(UserCreditsRecord.user_id == user_id) + .where(UserCreditsRecord.channel == CreditsChannel.daily) + .where(UserCreditsRecord.used == False) + ) + record = session.exec(statement).first() + return record + + +class UserCreditsRecordWithChatOut(BaseModel): + """扩展的积分记录输出模型,包含聊天历史信息""" + + amount: int + balance: int + channel: CreditsChannel + source_id: int + expire_at: Optional[datetime] = None + created_at: datetime + updated_at: Optional[datetime] = None + # 聊天历史相关字段(当channel为consume且source_id有效时) + chat_project_name: Optional[str] = None + chat_tokens: Optional[int] = None + + +class UserCreditsRecordOut(BaseModel): + amount: int + balance: int + channel: CreditsChannel + source_id: int + remark: str + expire_at: datetime | None + created_at: datetime + updated_at: datetime | None diff --git a/server/main.py b/server/main.py index e440a7b13..3ea80ecd3 100644 --- a/server/main.py +++ b/server/main.py @@ -1,30 +1,36 @@ -from app import api -from app.component.environment import auto_include_routers, env -from loguru import logger -import os -from fastapi.staticfiles import StaticFiles - -prefix = env("url_prefix", "") -auto_include_routers(api, prefix, "app/controller") -public_dir = os.environ.get("PUBLIC_DIR") or os.path.join(os.path.dirname(__file__), "app", "public") -# Ensure static directory exists or gracefully skip mounting -if not os.path.isdir(public_dir): - try: - os.makedirs(public_dir, exist_ok=True) - logger.warning(f"Public directory did not exist. Created: {public_dir}") - except Exception as e: - logger.error(f"Public directory missing and could not be created: {public_dir}. Error: {e}") - public_dir = None - -if public_dir and os.path.isdir(public_dir): - api.mount("/public", StaticFiles(directory=public_dir), name="public") -else: - logger.warning("Skipping /public mount because public directory is unavailable") - -logger.add( - "runtime/log/app.log", - rotation="10 MB", - retention="10 days", - level="DEBUG", - enqueue=True, -) +import os +import sys +import pathlib + +# Add project root to Python path to import shared utils +_project_root = pathlib.Path(__file__).parent.parent +if str(_project_root) not in sys.path: + sys.path.insert(0, str(_project_root)) + +from utils import traceroot_wrapper as traceroot +from app import api +from app.component.environment import auto_include_routers, env +from fastapi.staticfiles import StaticFiles + +# Only initialize traceroot if enabled +if traceroot.is_enabled(): + from traceroot.integrations.fastapi import connect_fastapi + connect_fastapi(api) + +logger = traceroot.get_logger("server_main") + +prefix = env("url_prefix", "") +auto_include_routers(api, prefix, "app/controller") +public_dir = os.environ.get("PUBLIC_DIR") or os.path.join(os.path.dirname(__file__), "app", "public") +if not os.path.isdir(public_dir): + try: + os.makedirs(public_dir, exist_ok=True) + logger.warning(f"Public directory did not exist. Created: {public_dir}") + except Exception as e: + logger.error(f"Public directory missing and could not be created: {public_dir}. Error: {e}") + public_dir = None + +if public_dir and os.path.isdir(public_dir): + api.mount("/public", StaticFiles(directory=public_dir), name="public") +else: + logger.warning("Skipping /public mount because public directory is unavailable") diff --git a/server/pyproject.toml b/server/pyproject.toml index 9f6ee2358..85ccb128b 100644 --- a/server/pyproject.toml +++ b/server/pyproject.toml @@ -1,40 +1,41 @@ -[project] -name = "Eigent" -version = "0.1.0" -description = "Eigent" -readme = "README.md" -requires-python = ">=3.13" -dependencies = [ - "alembic>=1.15.2", - "click>=8.1.8", - "fastapi>=0.115.12", - "fastapi-babel>=1.0.0", - "fastapi-pagination>=0.12.34", - "passlib[bcrypt]>=1.7.4", - "bcrypt==4.0.1", - "pydantic-i18n>=0.4.5", - "pydantic[email]>=2.11.1", - "pyjwt>=2.10.1", - "python-dotenv>=1.1.0", - "sqlalchemy-utils>=0.41.2", - "sqlmodel>=0.0.24", - "pandas>=2.2.3", - "openpyxl>=3.1.5", - "pandas>=2.2.3", - "arrow>=1.3.0", - "fastapi-filter>=2.0.1", - "psycopg2-binary>=2.9.10", - "convert-case>=1.2.3", - "python-multipart>=0.0.20", - "loguru>=0.7.3", - "httpx>=0.28.1", - "pydash>=8.0.5", - "requests>=2.32.4", - "itsdangerous>=2.2.0", - "cryptography>=45.0.4", - "sqids>=0.5.2", - "exa-py>=1.14.16", -] - -[tool.ruff] -line-length = 120 +[project] +name = "Eigent" +version = "0.1.0" +description = "Eigent" +readme = "README.md" +requires-python = ">=3.12,<3.13" +dependencies = [ + "alembic>=1.15.2", + "openai>=1.99.3,<2", + "camel-ai==0.2.76a13", + "pydantic[email]>=2.11.1", + "click>=8.1.8", + "fastapi>=0.115.12", + "fastapi-babel>=1.0.0", + "fastapi-pagination>=0.12.34", + "passlib[bcrypt]>=1.7.4", + "bcrypt==4.0.1", + "pydantic-i18n>=0.4.5", + "pyjwt>=2.10.1", + "python-dotenv>=1.1.0", + "sqlalchemy-utils>=0.41.2", + "sqlmodel>=0.0.24", + "pandas>=2.2.3", + "openpyxl>=3.1.5", + "arrow>=1.3.0", + "fastapi-filter>=2.0.1", + "psycopg2-binary>=2.9.10", + "convert-case>=1.2.3", + "python-multipart>=0.0.20", + "httpx>=0.28.1", + "pydash>=8.0.5", + "requests>=2.32.4", + "itsdangerous>=2.2.0", + "cryptography>=45.0.4", + "sqids>=0.5.2", + "exa-py>=1.14.16", + "traceroot>=0.0.7", +] + +[tool.ruff] +line-length = 120 diff --git a/server/utils/__init__.py b/server/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/server/uv.lock b/server/uv.lock index 04975d407..d47b827d0 100644 --- a/server/uv.lock +++ b/server/uv.lock @@ -1,25 +1,25 @@ version = 1 revision = 2 -requires-python = ">=3.13" +requires-python = "==3.12.*" [[package]] name = "alembic" -version = "1.16.4" -source = { registry = "https://pypi.org/simple/" } +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "mako" }, { name = "sqlalchemy" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/83/52/72e791b75c6b1efa803e491f7cbab78e963695e76d4ada05385252927e76/alembic-1.16.4.tar.gz", hash = "sha256:efab6ada0dd0fae2c92060800e0bf5c1dc26af15a10e02fb4babff164b4725e2", size = 1968161, upload-time = "2025-07-10T16:17:20.192Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/45/6f4555f2039f364c3ce31399529dcf48dd60726ff3715ad67f547d87dfd2/alembic-1.17.0.tar.gz", hash = "sha256:4652a0b3e19616b57d652b82bfa5e38bf5dbea0813eed971612671cb9e90c0fe", size = 1975526, upload-time = "2025-10-11T18:40:13.585Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/62/96b5217b742805236614f05904541000f55422a6060a90d7fd4ce26c172d/alembic-1.16.4-py3-none-any.whl", hash = "sha256:b05e51e8e82efc1abd14ba2af6392897e145930c3e0a2faf2b0da2f7f7fd660d", size = 247026, upload-time = "2025-07-10T16:17:21.845Z" }, + { url = "https://files.pythonhosted.org/packages/44/1f/38e29b06bfed7818ebba1f84904afdc8153ef7b6c7e0d8f3bc6643f5989c/alembic-1.17.0-py3-none-any.whl", hash = "sha256:80523bc437d41b35c5db7e525ad9d908f79de65c27d6a5a5eab6df348a352d99", size = 247449, upload-time = "2025-10-11T18:40:16.288Z" }, ] [[package]] name = "annotated-types" version = "0.7.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, @@ -27,34 +27,53 @@ wheels = [ [[package]] name = "anyio" -version = "4.10.0" -source = { registry = "https://pypi.org/simple/" } +version = "4.11.0" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "idna" }, { name = "sniffio" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f1/b4/636b3b65173d3ce9a38ef5f0522789614e590dab6a8d505340a4efe4c567/anyio-4.10.0.tar.gz", hash = "sha256:3f3fae35c96039744587aa5b8371e7e8e603c0702999535961dd336026973ba6", size = 213252, upload-time = "2025-08-04T08:54:26.451Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/78/7d432127c41b50bccba979505f272c16cbcadcc33645d5fa3a738110ae75/anyio-4.11.0.tar.gz", hash = "sha256:82a8d0b81e318cc5ce71a5f1f8b5c4e63619620b63141ef8c995fa0db95a57c4", size = 219094, upload-time = "2025-09-23T09:19:12.58Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/12/e5e0282d673bb9746bacfb6e2dba8719989d3660cdb2ea79aee9a9651afb/anyio-4.10.0-py3-none-any.whl", hash = "sha256:60e474ac86736bbfd6f210f7a61218939c318f43f9972497381f1c5e930ed3d1", size = 107213, upload-time = "2025-08-04T08:54:24.882Z" }, + { url = "https://files.pythonhosted.org/packages/15/b3/9b1a8074496371342ec1e796a96f99c82c945a339cd81a8e73de28b4cf9e/anyio-4.11.0-py3-none-any.whl", hash = "sha256:0287e96f4d26d4149305414d4e3bc32f0dcd0862365a4bddea19d7a1ec38c4fc", size = 109097, upload-time = "2025-09-23T09:19:10.601Z" }, ] [[package]] name = "arrow" -version = "1.3.0" -source = { registry = "https://pypi.org/simple/" } +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "python-dateutil" }, - { name = "types-python-dateutil" }, + { name = "tzdata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2e/00/0f6e8fcdb23ea632c866620cc872729ff43ed91d284c866b515c6342b173/arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85", size = 131960, upload-time = "2023-09-30T22:11:18.25Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/33/032cdc44182491aa708d06a68b62434140d8c50820a087fac7af37703357/arrow-1.4.0.tar.gz", hash = "sha256:ed0cc050e98001b8779e84d461b0098c4ac597e88704a655582b21d116e526d7", size = 152931, upload-time = "2025-10-18T17:46:46.761Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/ed/e97229a566617f2ae958a6b13e7cc0f585470eac730a73e9e82c32a3cdd2/arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80", size = 66419, upload-time = "2023-09-30T22:11:16.072Z" }, + { url = "https://files.pythonhosted.org/packages/ed/c9/d7977eaacb9df673210491da99e6a247e93df98c715fc43fd136ce1d3d33/arrow-1.4.0-py3-none-any.whl", hash = "sha256:749f0769958ebdc79c173ff0b0670d59051a535fa26e8eba02953dc19eb43205", size = 68797, upload-time = "2025-10-18T17:46:45.663Z" }, +] + +[[package]] +name = "asgiref" +version = "3.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/46/08/4dfec9b90758a59acc6be32ac82e98d1fbfc321cb5cfa410436dbacf821c/asgiref-3.10.0.tar.gz", hash = "sha256:d89f2d8cd8b56dada7d52fa7dc8075baa08fb836560710d38c292a7a3f78c04e", size = 37483, upload-time = "2025-10-05T09:15:06.557Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/9c/fc2331f538fbf7eedba64b2052e99ccf9ba9d6888e2f41441ee28847004b/asgiref-3.10.0-py3-none-any.whl", hash = "sha256:aef8a81283a34d0ab31630c9b7dfe70c812c95eba78171367ca8745e88124734", size = 24050, upload-time = "2025-10-05T09:15:05.11Z" }, +] + +[[package]] +name = "attrs" +version = "25.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, ] [[package]] name = "babel" version = "2.17.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, @@ -63,7 +82,7 @@ wheels = [ [[package]] name = "bcrypt" version = "4.0.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/8c/ae/3af7d006aacf513975fd1948a6b4d6f8b4a307f8a244e1a3d3774b297aad/bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd", size = 25498, upload-time = "2022-10-09T15:36:49.775Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/78/d4/3b2657bd58ef02b23a07729b0df26f21af97169dbd0b5797afa9e97ebb49/bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f", size = 473446, upload-time = "2022-10-09T15:36:25.481Z" }, @@ -80,74 +99,128 @@ wheels = [ ] [[package]] -name = "certifi" -version = "2025.8.3" -source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +name = "boto3" +version = "1.40.55" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, + { name = "jmespath" }, + { name = "s3transfer" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/d8/a279c054e0c9731172f05b3d118f3ffc9d74806657f84fc0c93c42d1bb5d/boto3-1.40.55.tar.gz", hash = "sha256:27e35b4fa9edd414ce06c1a748bf57cacd8203271847d93fc1053e4a4ec6e1a9", size = 111590, upload-time = "2025-10-17T19:34:56.753Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, + { url = "https://files.pythonhosted.org/packages/42/8c/559c6145d857ed953536a83f3a94915bbd5d3d2d406db1abf8bf40be7645/boto3-1.40.55-py3-none-any.whl", hash = "sha256:2e30f5a0d49e107b8a5c0c487891afd300bfa410e1d918bf187ae45ac3839332", size = 139322, upload-time = "2025-10-17T19:34:55.028Z" }, +] + +[[package]] +name = "botocore" +version = "1.40.55" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jmespath" }, + { name = "python-dateutil" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a4/92/dce4842b2e215d213d34b064fcdd13c6a782c43344e77336bcde586e9229/botocore-1.40.55.tar.gz", hash = "sha256:79b6472e2de92b3519d44fc1eec8c5feced7f99a0d10fdea6dc93133426057c1", size = 14446917, upload-time = "2025-10-17T19:34:47.44Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/21/30/f13bbc36e83b78777ff1abf50a084efcc3336b808e76560d8c5a0c9219e0/botocore-1.40.55-py3-none-any.whl", hash = "sha256:cdc38f7a4ddb30a2cd1cdd4fabde2a5a16e41b5a642292e1c30de5c4e46f5d44", size = 14116107, upload-time = "2025-10-17T19:34:44.398Z" }, +] + +[[package]] +name = "camel-ai" +version = "0.2.76a13" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama" }, + { name = "docstring-parser" }, + { name = "httpx" }, + { name = "jsonschema" }, + { name = "mcp" }, + { name = "openai" }, + { name = "pillow" }, + { name = "psutil" }, + { name = "pydantic" }, + { name = "tiktoken" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f7/7c/0145edf0307e360557917de28691eb0c41b36b017a28c6b67e58a729a6da/camel_ai-0.2.76a13.tar.gz", hash = "sha256:487570c36a39a333ae8000783babd5a82350a829aaa8aa2ae712470b596cafe1", size = 950278, upload-time = "2025-10-06T06:09:46.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/46/9886106669491737631178830bce79bd7bf63391db4d2200f645089dd9df/camel_ai-0.2.76a13-py3-none-any.whl", hash = "sha256:b860412e4a5b5fc31b0cc3d4b1eeefcd02382d9a5aced252856a1eff0285a97b", size = 1400549, upload-time = "2025-10-06T06:09:43.291Z" }, +] + +[[package]] +name = "certifi" +version = "2025.10.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" }, ] [[package]] name = "cffi" -version = "1.17.1" -source = { registry = "https://pypi.org/simple/" } +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "pycparser" }, + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" }, - { url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" }, - { url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" }, - { url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" }, - { url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" }, - { url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" }, - { url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" }, - { url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" }, - { url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" }, - { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, ] [[package]] name = "charset-normalizer" -version = "3.4.2" -source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } +version = "3.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, - { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, - { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, - { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, - { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, - { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, - { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, - { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, - { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, - { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, - { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, - { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, - { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, - { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, ] [[package]] name = "click" -version = "8.2.1" -source = { registry = "https://pypi.org/simple/" } +version = "8.3.0" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/61/de6cd827efad202d7057d93e0fed9294b96952e188f7384832791c7b2254/click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4", size = 276943, upload-time = "2025-09-18T17:32:23.696Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, + { url = "https://files.pythonhosted.org/packages/db/d3/9dcc0f5797f070ec8edf30fbadfb200e71d9db6b84d211e3b2085a7589a0/click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc", size = 107295, upload-time = "2025-09-18T17:32:22.42Z" }, ] [[package]] name = "colorama" version = "0.4.6" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, @@ -156,7 +229,7 @@ wheels = [ [[package]] name = "convert-case" version = "1.2.3" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/6c/ac/22e9945f24acae18c43d1ff01f17ed792d4ba80b9d0757f2d18d23ce82ec/convert-case-1.2.3.tar.gz", hash = "sha256:a8c4329e47233a2b16cac3c5d020e8ba0305293efbe22a6d80f8ffddf049703f", size = 6984, upload-time = "2023-05-23T19:27:09.469Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/f3/2e/500ff29726ef207fdf6b625e62caf3839662c5d845897efc93bdf019192a/convert_case-1.2.3-py3-none-any.whl", hash = "sha256:ec8884050ca548e990666f82cba7ae2edfaa3c85dbead3042c2fd663b292373a", size = 9373, upload-time = "2023-05-23T19:27:06.039Z" }, @@ -164,43 +237,49 @@ wheels = [ [[package]] name = "cryptography" -version = "45.0.5" -source = { registry = "https://pypi.org/simple/" } +version = "46.0.3" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/1e/49527ac611af559665f71cbb8f92b332b5ec9c6fbc4e88b0f8e92f5e85df/cryptography-45.0.5.tar.gz", hash = "sha256:72e76caa004ab63accdf26023fccd1d087f6d90ec6048ff33ad0445abf7f605a", size = 744903, upload-time = "2025-07-02T13:06:25.941Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/fb/09e28bc0c46d2c547085e60897fea96310574c70fb21cd58a730a45f3403/cryptography-45.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:101ee65078f6dd3e5a028d4f19c07ffa4dd22cce6a20eaa160f8b5219911e7d8", size = 7043092, upload-time = "2025-07-02T13:05:01.514Z" }, - { url = "https://files.pythonhosted.org/packages/b1/05/2194432935e29b91fb649f6149c1a4f9e6d3d9fc880919f4ad1bcc22641e/cryptography-45.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3a264aae5f7fbb089dbc01e0242d3b67dffe3e6292e1f5182122bdf58e65215d", size = 4205926, upload-time = "2025-07-02T13:05:04.741Z" }, - { url = "https://files.pythonhosted.org/packages/07/8b/9ef5da82350175e32de245646b1884fc01124f53eb31164c77f95a08d682/cryptography-45.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e74d30ec9c7cb2f404af331d5b4099a9b322a8a6b25c4632755c8757345baac5", size = 4429235, upload-time = "2025-07-02T13:05:07.084Z" }, - { url = "https://files.pythonhosted.org/packages/7c/e1/c809f398adde1994ee53438912192d92a1d0fc0f2d7582659d9ef4c28b0c/cryptography-45.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3af26738f2db354aafe492fb3869e955b12b2ef2e16908c8b9cb928128d42c57", size = 4209785, upload-time = "2025-07-02T13:05:09.321Z" }, - { url = "https://files.pythonhosted.org/packages/d0/8b/07eb6bd5acff58406c5e806eff34a124936f41a4fb52909ffa4d00815f8c/cryptography-45.0.5-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e6c00130ed423201c5bc5544c23359141660b07999ad82e34e7bb8f882bb78e0", size = 3893050, upload-time = "2025-07-02T13:05:11.069Z" }, - { url = "https://files.pythonhosted.org/packages/ec/ef/3333295ed58d900a13c92806b67e62f27876845a9a908c939f040887cca9/cryptography-45.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:dd420e577921c8c2d31289536c386aaa30140b473835e97f83bc71ea9d2baf2d", size = 4457379, upload-time = "2025-07-02T13:05:13.32Z" }, - { url = "https://files.pythonhosted.org/packages/d9/9d/44080674dee514dbb82b21d6fa5d1055368f208304e2ab1828d85c9de8f4/cryptography-45.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d05a38884db2ba215218745f0781775806bde4f32e07b135348355fe8e4991d9", size = 4209355, upload-time = "2025-07-02T13:05:15.017Z" }, - { url = "https://files.pythonhosted.org/packages/c9/d8/0749f7d39f53f8258e5c18a93131919ac465ee1f9dccaf1b3f420235e0b5/cryptography-45.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:ad0caded895a00261a5b4aa9af828baede54638754b51955a0ac75576b831b27", size = 4456087, upload-time = "2025-07-02T13:05:16.945Z" }, - { url = "https://files.pythonhosted.org/packages/09/d7/92acac187387bf08902b0bf0699816f08553927bdd6ba3654da0010289b4/cryptography-45.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9024beb59aca9d31d36fcdc1604dd9bbeed0a55bface9f1908df19178e2f116e", size = 4332873, upload-time = "2025-07-02T13:05:18.743Z" }, - { url = "https://files.pythonhosted.org/packages/03/c2/840e0710da5106a7c3d4153c7215b2736151bba60bf4491bdb421df5056d/cryptography-45.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:91098f02ca81579c85f66df8a588c78f331ca19089763d733e34ad359f474174", size = 4564651, upload-time = "2025-07-02T13:05:21.382Z" }, - { url = "https://files.pythonhosted.org/packages/2e/92/cc723dd6d71e9747a887b94eb3827825c6c24b9e6ce2bb33b847d31d5eaa/cryptography-45.0.5-cp311-abi3-win32.whl", hash = "sha256:926c3ea71a6043921050eaa639137e13dbe7b4ab25800932a8498364fc1abec9", size = 2929050, upload-time = "2025-07-02T13:05:23.39Z" }, - { url = "https://files.pythonhosted.org/packages/1f/10/197da38a5911a48dd5389c043de4aec4b3c94cb836299b01253940788d78/cryptography-45.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:b85980d1e345fe769cfc57c57db2b59cff5464ee0c045d52c0df087e926fbe63", size = 3403224, upload-time = "2025-07-02T13:05:25.202Z" }, - { url = "https://files.pythonhosted.org/packages/fe/2b/160ce8c2765e7a481ce57d55eba1546148583e7b6f85514472b1d151711d/cryptography-45.0.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:f3562c2f23c612f2e4a6964a61d942f891d29ee320edb62ff48ffb99f3de9ae8", size = 7017143, upload-time = "2025-07-02T13:05:27.229Z" }, - { url = "https://files.pythonhosted.org/packages/c2/e7/2187be2f871c0221a81f55ee3105d3cf3e273c0a0853651d7011eada0d7e/cryptography-45.0.5-cp37-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:3fcfbefc4a7f332dece7272a88e410f611e79458fab97b5efe14e54fe476f4fd", size = 4197780, upload-time = "2025-07-02T13:05:29.299Z" }, - { url = "https://files.pythonhosted.org/packages/b9/cf/84210c447c06104e6be9122661159ad4ce7a8190011669afceeaea150524/cryptography-45.0.5-cp37-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:460f8c39ba66af7db0545a8c6f2eabcbc5a5528fc1cf6c3fa9a1e44cec33385e", size = 4420091, upload-time = "2025-07-02T13:05:31.221Z" }, - { url = "https://files.pythonhosted.org/packages/3e/6a/cb8b5c8bb82fafffa23aeff8d3a39822593cee6e2f16c5ca5c2ecca344f7/cryptography-45.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:9b4cf6318915dccfe218e69bbec417fdd7c7185aa7aab139a2c0beb7468c89f0", size = 4198711, upload-time = "2025-07-02T13:05:33.062Z" }, - { url = "https://files.pythonhosted.org/packages/04/f7/36d2d69df69c94cbb2473871926daf0f01ad8e00fe3986ac3c1e8c4ca4b3/cryptography-45.0.5-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2089cc8f70a6e454601525e5bf2779e665d7865af002a5dec8d14e561002e135", size = 3883299, upload-time = "2025-07-02T13:05:34.94Z" }, - { url = "https://files.pythonhosted.org/packages/82/c7/f0ea40f016de72f81288e9fe8d1f6748036cb5ba6118774317a3ffc6022d/cryptography-45.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:0027d566d65a38497bc37e0dd7c2f8ceda73597d2ac9ba93810204f56f52ebc7", size = 4450558, upload-time = "2025-07-02T13:05:37.288Z" }, - { url = "https://files.pythonhosted.org/packages/06/ae/94b504dc1a3cdf642d710407c62e86296f7da9e66f27ab12a1ee6fdf005b/cryptography-45.0.5-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:be97d3a19c16a9be00edf79dca949c8fa7eff621763666a145f9f9535a5d7f42", size = 4198020, upload-time = "2025-07-02T13:05:39.102Z" }, - { url = "https://files.pythonhosted.org/packages/05/2b/aaf0adb845d5dabb43480f18f7ca72e94f92c280aa983ddbd0bcd6ecd037/cryptography-45.0.5-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:7760c1c2e1a7084153a0f68fab76e754083b126a47d0117c9ed15e69e2103492", size = 4449759, upload-time = "2025-07-02T13:05:41.398Z" }, - { url = "https://files.pythonhosted.org/packages/91/e4/f17e02066de63e0100a3a01b56f8f1016973a1d67551beaf585157a86b3f/cryptography-45.0.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6ff8728d8d890b3dda5765276d1bc6fb099252915a2cd3aff960c4c195745dd0", size = 4319991, upload-time = "2025-07-02T13:05:43.64Z" }, - { url = "https://files.pythonhosted.org/packages/f2/2e/e2dbd629481b499b14516eed933f3276eb3239f7cee2dcfa4ee6b44d4711/cryptography-45.0.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7259038202a47fdecee7e62e0fd0b0738b6daa335354396c6ddebdbe1206af2a", size = 4554189, upload-time = "2025-07-02T13:05:46.045Z" }, - { url = "https://files.pythonhosted.org/packages/f8/ea/a78a0c38f4c8736287b71c2ea3799d173d5ce778c7d6e3c163a95a05ad2a/cryptography-45.0.5-cp37-abi3-win32.whl", hash = "sha256:1e1da5accc0c750056c556a93c3e9cb828970206c68867712ca5805e46dc806f", size = 2911769, upload-time = "2025-07-02T13:05:48.329Z" }, - { url = "https://files.pythonhosted.org/packages/79/b3/28ac139109d9005ad3f6b6f8976ffede6706a6478e21c889ce36c840918e/cryptography-45.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:90cb0a7bb35959f37e23303b7eed0a32280510030daba3f7fdfbb65defde6a97", size = 3390016, upload-time = "2025-07-02T13:05:50.811Z" }, + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, ] [[package]] name = "distro" version = "1.9.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, @@ -208,11 +287,20 @@ wheels = [ [[package]] name = "dnspython" -version = "2.7.0" -source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" } +version = "2.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, + { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, +] + +[[package]] +name = "docstring-parser" +version = "0.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, ] [[package]] @@ -223,6 +311,7 @@ dependencies = [ { name = "alembic" }, { name = "arrow" }, { name = "bcrypt" }, + { name = "camel-ai" }, { name = "click" }, { name = "convert-case" }, { name = "cryptography" }, @@ -233,7 +322,7 @@ dependencies = [ { name = "fastapi-pagination" }, { name = "httpx" }, { name = "itsdangerous" }, - { name = "loguru" }, + { name = "openai" }, { name = "openpyxl" }, { name = "pandas" }, { name = "passlib", extra = ["bcrypt"] }, @@ -248,6 +337,7 @@ dependencies = [ { name = "sqids" }, { name = "sqlalchemy-utils" }, { name = "sqlmodel" }, + { name = "traceroot" }, ] [package.metadata] @@ -255,6 +345,7 @@ requires-dist = [ { name = "alembic", specifier = ">=1.15.2" }, { name = "arrow", specifier = ">=1.3.0" }, { name = "bcrypt", specifier = "==4.0.1" }, + { name = "camel-ai", specifier = "==0.2.76a13" }, { name = "click", specifier = ">=8.1.8" }, { name = "convert-case", specifier = ">=1.2.3" }, { name = "cryptography", specifier = ">=45.0.4" }, @@ -265,7 +356,7 @@ requires-dist = [ { name = "fastapi-pagination", specifier = ">=0.12.34" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "itsdangerous", specifier = ">=2.2.0" }, - { name = "loguru", specifier = ">=0.7.3" }, + { name = "openai", specifier = ">=1.99.3,<2" }, { name = "openpyxl", specifier = ">=3.1.5" }, { name = "pandas", specifier = ">=2.2.3" }, { name = "passlib", extras = ["bcrypt"], specifier = ">=1.7.4" }, @@ -280,25 +371,26 @@ requires-dist = [ { name = "sqids", specifier = ">=0.5.2" }, { name = "sqlalchemy-utils", specifier = ">=0.41.2" }, { name = "sqlmodel", specifier = ">=0.0.24" }, + { name = "traceroot", specifier = ">=0.0.7" }, ] [[package]] name = "email-validator" -version = "2.2.0" -source = { registry = "https://pypi.org/simple/" } +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dnspython" }, { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/48/ce/13508a1ec3f8bb981ae4ca79ea40384becc868bfae97fd1c942bb3a001b1/email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7", size = 48967, upload-time = "2024-06-20T11:30:30.034Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238, upload-time = "2025-08-26T13:09:06.831Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/ee/bf0adb559ad3c786f12bcbc9296b3f5675f529199bef03e2df281fa1fadb/email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631", size = 33521, upload-time = "2024-06-20T11:30:28.248Z" }, + { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" }, ] [[package]] name = "et-xmlfile" version = "2.0.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/d3/38/af70d7ab1ae9d4da450eeec1fa3918940a5fafb9055e934af8d6eb0c2313/et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54", size = 17234, upload-time = "2024-10-25T17:25:40.039Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059, upload-time = "2024-10-25T17:25:39.051Z" }, @@ -306,38 +398,40 @@ wheels = [ [[package]] name = "exa-py" -version = "1.14.20" -source = { registry = "https://pypi.org/simple/" } +version = "1.16.1" +source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "httpcore" }, { name = "httpx" }, { name = "openai" }, { name = "pydantic" }, + { name = "python-dotenv" }, { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bd/00/b7efa5458c92ac415a334c9b27b0cfd6f0327ee545bd7f4c8639129a0ee0/exa_py-1.14.20.tar.gz", hash = "sha256:423789a0635b7a4ecd5f56d6b4a0dfb01126fa45ce1e04106c0bb96b7d551ebf", size = 35483, upload-time = "2025-07-30T18:52:53.35Z" } +sdist = { url = "https://files.pythonhosted.org/packages/77/15/abbe4361f42416c1741d252821bdfffe0e1ad9b39655b04db417b79b0d55/exa_py-1.16.1.tar.gz", hash = "sha256:3cb371b8efd321881a8217070f16afdac5afbaa9229177f80d5c427e1a6dbd59", size = 41364, upload-time = "2025-10-09T21:09:08.23Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/29/36/d574fd02741fa7706def78fd81f5fe405a84dca3d3cb94f80f27469d7d38/exa_py-1.14.20-py3-none-any.whl", hash = "sha256:e0ed9d99c3c494a0e6903e11a0f6fb773b3b23d0cd802380cf58efc97d9d332d", size = 45156, upload-time = "2025-07-30T18:52:52.01Z" }, + { url = "https://files.pythonhosted.org/packages/95/28/5b871e0ac1b76e560f75226f70897cb3e7cb66022cfb58507d0e7d6217ca/exa_py-1.16.1-py3-none-any.whl", hash = "sha256:3b323ed32725b72110720306ea12da09161cfa9c8ac64797a9c0b66869741f27", size = 56631, upload-time = "2025-10-09T21:09:07.099Z" }, ] [[package]] name = "fastapi" -version = "0.116.1" -source = { registry = "https://pypi.org/simple/" } +version = "0.119.1" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/d7/6c8b3bfe33eeffa208183ec037fee0cce9f7f024089ab1c5d12ef04bd27c/fastapi-0.116.1.tar.gz", hash = "sha256:ed52cbf946abfd70c5a0dccb24673f0670deeb517a88b3544d03c2a6bf283143", size = 296485, upload-time = "2025-07-11T16:22:32.057Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/f4/152127681182e6413e7a89684c434e19e7414ed7ac0c632999c3c6980640/fastapi-0.119.1.tar.gz", hash = "sha256:a5e3426edce3fe221af4e1992c6d79011b247e3b03cc57999d697fe76cbf8ae0", size = 338616, upload-time = "2025-10-20T11:30:27.734Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e5/47/d63c60f59a59467fda0f93f46335c9d18526d7071f025cb5b89d5353ea42/fastapi-0.116.1-py3-none-any.whl", hash = "sha256:c46ac7c312df840f0c9e220f7964bada936781bc4e2e6eb71f1c4d7553786565", size = 95631, upload-time = "2025-07-11T16:22:30.485Z" }, + { url = "https://files.pythonhosted.org/packages/b1/26/e6d959b4ac959fdb3e9c4154656fc160794db6af8e64673d52759456bf07/fastapi-0.119.1-py3-none-any.whl", hash = "sha256:0b8c2a2cce853216e150e9bd4faaed88227f8eb37de21cb200771f491586a27f", size = 108123, upload-time = "2025-10-20T11:30:26.185Z" }, ] [[package]] name = "fastapi-babel" version = "1.0.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "babel" }, { name = "fastapi" }, @@ -351,7 +445,7 @@ wheels = [ [[package]] name = "fastapi-filter" version = "2.0.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "fastapi" }, { name = "pydantic" }, @@ -363,46 +457,72 @@ wheels = [ [[package]] name = "fastapi-pagination" -version = "0.13.3" -source = { registry = "https://pypi.org/simple/" } +version = "0.14.3" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "fastapi" }, { name = "pydantic" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/10/4c/e98a1665b6ac2e9e4ed98450e4e0ea48108f3bc52de517d9a70cc22761c2/fastapi_pagination-0.13.3.tar.gz", hash = "sha256:40c2383aff13a3a0e4a2742dfbf004572e88458cd8f338d85f90a27e07abab4a", size = 550898, upload-time = "2025-06-25T21:22:15.287Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/df/b8a227a621713ed0133a737dee91066beb09e8769ff875225319da4a3a26/fastapi_pagination-0.14.3.tar.gz", hash = "sha256:be8e81e21235c0758cbdd2f0e597c65bcb82a85062e2b99a9474418d23006791", size = 568147, upload-time = "2025-10-08T10:58:01.833Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bf/73/ef1ab892c2d189d8b6bd72325e9e710df6737c3b7976e12aa5749a56ea01/fastapi_pagination-0.13.3-py3-none-any.whl", hash = "sha256:e1b1cc7fa5c773c61087845ef8a73ed6b516071c057418698b9242461573f44e", size = 50986, upload-time = "2025-06-25T21:22:13.591Z" }, + { url = "https://files.pythonhosted.org/packages/a2/6a/0b6804e1c20013855379fe58e02206e9cc7f7131653d8daad1af6be67851/fastapi_pagination-0.14.3-py3-none-any.whl", hash = "sha256:e87350b64010fd3b2df840218b1f65a21eec6078238cd3a1794c2468a03ea45f", size = 52559, upload-time = "2025-10-08T10:58:00.428Z" }, +] + +[[package]] +name = "googleapis-common-protos" +version = "1.71.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/30/43/b25abe02db2911397819003029bef768f68a974f2ece483e6084d1a5f754/googleapis_common_protos-1.71.0.tar.gz", hash = "sha256:1aec01e574e29da63c80ba9f7bbf1ccfaacf1da877f23609fe236ca7c72a2e2e", size = 146454, upload-time = "2025-10-20T14:58:08.732Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/25/e8/eba9fece11d57a71e3e22ea672742c8f3cf23b35730c9e96db768b295216/googleapis_common_protos-1.71.0-py3-none-any.whl", hash = "sha256:59034a1d849dc4d18971997a72ac56246570afdd17f9369a0ff68218d50ab78c", size = 294576, upload-time = "2025-10-20T14:56:21.295Z" }, ] [[package]] name = "greenlet" -version = "3.2.3" -source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/c9/92/bb85bd6e80148a4d2e0c59f7c0c2891029f8fd510183afc7d8d2feeed9b6/greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365", size = 185752, upload-time = "2025-06-05T16:16:09.955Z" } +version = "3.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260, upload-time = "2025-08-07T13:24:33.51Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/cf/f5c0b23309070ae93de75c90d29300751a5aacefc0a3ed1b1d8edb28f08b/greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad", size = 270732, upload-time = "2025-06-05T16:10:08.26Z" }, - { url = "https://files.pythonhosted.org/packages/48/ae/91a957ba60482d3fecf9be49bc3948f341d706b52ddb9d83a70d42abd498/greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef", size = 639033, upload-time = "2025-06-05T16:38:53.983Z" }, - { url = "https://files.pythonhosted.org/packages/6f/df/20ffa66dd5a7a7beffa6451bdb7400d66251374ab40b99981478c69a67a8/greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3", size = 652999, upload-time = "2025-06-05T16:41:37.89Z" }, - { url = "https://files.pythonhosted.org/packages/51/b4/ebb2c8cb41e521f1d72bf0465f2f9a2fd803f674a88db228887e6847077e/greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95", size = 647368, upload-time = "2025-06-05T16:48:21.467Z" }, - { url = "https://files.pythonhosted.org/packages/8e/6a/1e1b5aa10dced4ae876a322155705257748108b7fd2e4fae3f2a091fe81a/greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb", size = 650037, upload-time = "2025-06-05T16:13:06.402Z" }, - { url = "https://files.pythonhosted.org/packages/26/f2/ad51331a157c7015c675702e2d5230c243695c788f8f75feba1af32b3617/greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b", size = 608402, upload-time = "2025-06-05T16:12:51.91Z" }, - { url = "https://files.pythonhosted.org/packages/26/bc/862bd2083e6b3aff23300900a956f4ea9a4059de337f5c8734346b9b34fc/greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0", size = 1119577, upload-time = "2025-06-05T16:36:49.787Z" }, - { url = "https://files.pythonhosted.org/packages/86/94/1fc0cc068cfde885170e01de40a619b00eaa8f2916bf3541744730ffb4c3/greenlet-3.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:024571bbce5f2c1cfff08bf3fbaa43bbc7444f580ae13b0099e95d0e6e67ed36", size = 1147121, upload-time = "2025-06-05T16:12:42.527Z" }, - { url = "https://files.pythonhosted.org/packages/27/1a/199f9587e8cb08a0658f9c30f3799244307614148ffe8b1e3aa22f324dea/greenlet-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5195fb1e75e592dd04ce79881c8a22becdfa3e6f500e7feb059b1e6fdd54d3e3", size = 297603, upload-time = "2025-06-05T16:20:12.651Z" }, - { url = "https://files.pythonhosted.org/packages/d8/ca/accd7aa5280eb92b70ed9e8f7fd79dc50a2c21d8c73b9a0856f5b564e222/greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86", size = 271479, upload-time = "2025-06-05T16:10:47.525Z" }, - { url = "https://files.pythonhosted.org/packages/55/71/01ed9895d9eb49223280ecc98a557585edfa56b3d0e965b9fa9f7f06b6d9/greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97", size = 683952, upload-time = "2025-06-05T16:38:55.125Z" }, - { url = "https://files.pythonhosted.org/packages/ea/61/638c4bdf460c3c678a0a1ef4c200f347dff80719597e53b5edb2fb27ab54/greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728", size = 696917, upload-time = "2025-06-05T16:41:38.959Z" }, - { url = "https://files.pythonhosted.org/packages/22/cc/0bd1a7eb759d1f3e3cc2d1bc0f0b487ad3cc9f34d74da4b80f226fde4ec3/greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a", size = 692443, upload-time = "2025-06-05T16:48:23.113Z" }, - { url = "https://files.pythonhosted.org/packages/67/10/b2a4b63d3f08362662e89c103f7fe28894a51ae0bc890fabf37d1d780e52/greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892", size = 692995, upload-time = "2025-06-05T16:13:07.972Z" }, - { url = "https://files.pythonhosted.org/packages/5a/c6/ad82f148a4e3ce9564056453a71529732baf5448ad53fc323e37efe34f66/greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141", size = 655320, upload-time = "2025-06-05T16:12:53.453Z" }, - { url = "https://files.pythonhosted.org/packages/5c/4f/aab73ecaa6b3086a4c89863d94cf26fa84cbff63f52ce9bc4342b3087a06/greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a", size = 301236, upload-time = "2025-06-05T16:15:20.111Z" }, + { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, + { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, + { url = "https://files.pythonhosted.org/packages/3b/16/035dcfcc48715ccd345f3a93183267167cdd162ad123cd93067d86f27ce4/greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968", size = 655185, upload-time = "2025-08-07T13:45:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/31/da/0386695eef69ffae1ad726881571dfe28b41970173947e7c558d9998de0f/greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9", size = 649926, upload-time = "2025-08-07T13:53:15.251Z" }, + { url = "https://files.pythonhosted.org/packages/68/88/69bf19fd4dc19981928ceacbc5fd4bb6bc2215d53199e367832e98d1d8fe/greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6", size = 651839, upload-time = "2025-08-07T13:18:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" }, + { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" }, +] + +[[package]] +name = "grpcio" +version = "1.75.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/f7/8963848164c7604efb3a3e6ee457fdb3a469653e19002bd24742473254f8/grpcio-1.75.1.tar.gz", hash = "sha256:3e81d89ece99b9ace23a6916880baca613c03a799925afb2857887efa8b1b3d2", size = 12731327, upload-time = "2025-09-26T09:03:36.887Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/81/42be79e73a50aaa20af66731c2defeb0e8c9008d9935a64dd8ea8e8c44eb/grpcio-1.75.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:7b888b33cd14085d86176b1628ad2fcbff94cfbbe7809465097aa0132e58b018", size = 5668314, upload-time = "2025-09-26T09:01:55.424Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a7/3686ed15822fedc58c22f82b3a7403d9faf38d7c33de46d4de6f06e49426/grpcio-1.75.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:8775036efe4ad2085975531d221535329f5dac99b6c2a854a995456098f99546", size = 11476125, upload-time = "2025-09-26T09:01:57.927Z" }, + { url = "https://files.pythonhosted.org/packages/14/85/21c71d674f03345ab183c634ecd889d3330177e27baea8d5d247a89b6442/grpcio-1.75.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:bb658f703468d7fbb5dcc4037c65391b7dc34f808ac46ed9136c24fc5eeb041d", size = 6246335, upload-time = "2025-09-26T09:02:00.76Z" }, + { url = "https://files.pythonhosted.org/packages/fd/db/3beb661bc56a385ae4fa6b0e70f6b91ac99d47afb726fe76aaff87ebb116/grpcio-1.75.1-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4b7177a1cdb3c51b02b0c0a256b0a72fdab719600a693e0e9037949efffb200b", size = 6916309, upload-time = "2025-09-26T09:02:02.894Z" }, + { url = "https://files.pythonhosted.org/packages/1e/9c/eda9fe57f2b84343d44c1b66cf3831c973ba29b078b16a27d4587a1fdd47/grpcio-1.75.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7d4fa6ccc3ec2e68a04f7b883d354d7fea22a34c44ce535a2f0c0049cf626ddf", size = 6435419, upload-time = "2025-09-26T09:02:05.055Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b8/090c98983e0a9d602e3f919a6e2d4e470a8b489452905f9a0fa472cac059/grpcio-1.75.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3d86880ecaeb5b2f0a8afa63824de93adb8ebe4e49d0e51442532f4e08add7d6", size = 7064893, upload-time = "2025-09-26T09:02:07.275Z" }, + { url = "https://files.pythonhosted.org/packages/ec/c0/6d53d4dbbd00f8bd81571f5478d8a95528b716e0eddb4217cc7cb45aae5f/grpcio-1.75.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a8041d2f9e8a742aeae96f4b047ee44e73619f4f9d24565e84d5446c623673b6", size = 8011922, upload-time = "2025-09-26T09:02:09.527Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7c/48455b2d0c5949678d6982c3e31ea4d89df4e16131b03f7d5c590811cbe9/grpcio-1.75.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3652516048bf4c314ce12be37423c79829f46efffb390ad64149a10c6071e8de", size = 7466181, upload-time = "2025-09-26T09:02:12.279Z" }, + { url = "https://files.pythonhosted.org/packages/fd/12/04a0e79081e3170b6124f8cba9b6275871276be06c156ef981033f691880/grpcio-1.75.1-cp312-cp312-win32.whl", hash = "sha256:44b62345d8403975513af88da2f3d5cc76f73ca538ba46596f92a127c2aea945", size = 3938543, upload-time = "2025-09-26T09:02:14.77Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d7/11350d9d7fb5adc73d2b0ebf6ac1cc70135577701e607407fe6739a90021/grpcio-1.75.1-cp312-cp312-win_amd64.whl", hash = "sha256:b1e191c5c465fa777d4cafbaacf0c01e0d5278022082c0abbd2ee1d6454ed94d", size = 4641938, upload-time = "2025-09-26T09:02:16.927Z" }, ] [[package]] name = "h11" version = "0.16.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, @@ -411,7 +531,7 @@ wheels = [ [[package]] name = "httpcore" version = "1.0.9" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "h11" }, @@ -424,7 +544,7 @@ wheels = [ [[package]] name = "httpx" version = "0.28.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "certifi" }, @@ -437,18 +557,39 @@ wheels = [ ] [[package]] -name = "idna" -version = "3.10" -source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +name = "httpx-sse" +version = "0.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, + { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, ] [[package]] name = "itsdangerous" version = "2.2.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, @@ -456,57 +597,69 @@ wheels = [ [[package]] name = "jiter" -version = "0.10.0" -source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/9d/ae7ddb4b8ab3fb1b51faf4deb36cb48a4fbbd7cb36bad6a5fca4741306f7/jiter-0.10.0.tar.gz", hash = "sha256:07a7142c38aacc85194391108dc91b5b57093c978a9932bd86a36862759d9500", size = 162759, upload-time = "2025-05-18T19:04:59.73Z" } +version = "0.11.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/68/0357982493a7b20925aece061f7fb7a2678e3b232f8d73a6edb7e5304443/jiter-0.11.1.tar.gz", hash = "sha256:849dcfc76481c0ea0099391235b7ca97d7279e0fa4c86005457ac7c88e8b76dc", size = 168385, upload-time = "2025-10-17T11:31:15.186Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/2e/b0/279597e7a270e8d22623fea6c5d4eeac328e7d95c236ed51a2b884c54f70/jiter-0.10.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e0588107ec8e11b6f5ef0e0d656fb2803ac6cf94a96b2b9fc675c0e3ab5e8644", size = 311617, upload-time = "2025-05-18T19:04:02.078Z" }, - { url = "https://files.pythonhosted.org/packages/91/e3/0916334936f356d605f54cc164af4060e3e7094364add445a3bc79335d46/jiter-0.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cafc4628b616dc32530c20ee53d71589816cf385dd9449633e910d596b1f5c8a", size = 318947, upload-time = "2025-05-18T19:04:03.347Z" }, - { url = "https://files.pythonhosted.org/packages/6a/8e/fd94e8c02d0e94539b7d669a7ebbd2776e51f329bb2c84d4385e8063a2ad/jiter-0.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:520ef6d981172693786a49ff5b09eda72a42e539f14788124a07530f785c3ad6", size = 344618, upload-time = "2025-05-18T19:04:04.709Z" }, - { url = "https://files.pythonhosted.org/packages/6f/b0/f9f0a2ec42c6e9c2e61c327824687f1e2415b767e1089c1d9135f43816bd/jiter-0.10.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:554dedfd05937f8fc45d17ebdf298fe7e0c77458232bcb73d9fbbf4c6455f5b3", size = 368829, upload-time = "2025-05-18T19:04:06.912Z" }, - { url = "https://files.pythonhosted.org/packages/e8/57/5bbcd5331910595ad53b9fd0c610392ac68692176f05ae48d6ce5c852967/jiter-0.10.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc299da7789deacf95f64052d97f75c16d4fc8c4c214a22bf8d859a4288a1c2", size = 491034, upload-time = "2025-05-18T19:04:08.222Z" }, - { url = "https://files.pythonhosted.org/packages/9b/be/c393df00e6e6e9e623a73551774449f2f23b6ec6a502a3297aeeece2c65a/jiter-0.10.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5161e201172de298a8a1baad95eb85db4fb90e902353b1f6a41d64ea64644e25", size = 388529, upload-time = "2025-05-18T19:04:09.566Z" }, - { url = "https://files.pythonhosted.org/packages/42/3e/df2235c54d365434c7f150b986a6e35f41ebdc2f95acea3036d99613025d/jiter-0.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2227db6ba93cb3e2bf67c87e594adde0609f146344e8207e8730364db27041", size = 350671, upload-time = "2025-05-18T19:04:10.98Z" }, - { url = "https://files.pythonhosted.org/packages/c6/77/71b0b24cbcc28f55ab4dbfe029f9a5b73aeadaba677843fc6dc9ed2b1d0a/jiter-0.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:15acb267ea5e2c64515574b06a8bf393fbfee6a50eb1673614aa45f4613c0cca", size = 390864, upload-time = "2025-05-18T19:04:12.722Z" }, - { url = "https://files.pythonhosted.org/packages/6a/d3/ef774b6969b9b6178e1d1e7a89a3bd37d241f3d3ec5f8deb37bbd203714a/jiter-0.10.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:901b92f2e2947dc6dfcb52fd624453862e16665ea909a08398dde19c0731b7f4", size = 522989, upload-time = "2025-05-18T19:04:14.261Z" }, - { url = "https://files.pythonhosted.org/packages/0c/41/9becdb1d8dd5d854142f45a9d71949ed7e87a8e312b0bede2de849388cb9/jiter-0.10.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d0cb9a125d5a3ec971a094a845eadde2db0de85b33c9f13eb94a0c63d463879e", size = 513495, upload-time = "2025-05-18T19:04:15.603Z" }, - { url = "https://files.pythonhosted.org/packages/9c/36/3468e5a18238bdedae7c4d19461265b5e9b8e288d3f86cd89d00cbb48686/jiter-0.10.0-cp313-cp313-win32.whl", hash = "sha256:48a403277ad1ee208fb930bdf91745e4d2d6e47253eedc96e2559d1e6527006d", size = 211289, upload-time = "2025-05-18T19:04:17.541Z" }, - { url = "https://files.pythonhosted.org/packages/7e/07/1c96b623128bcb913706e294adb5f768fb7baf8db5e1338ce7b4ee8c78ef/jiter-0.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:75f9eb72ecb640619c29bf714e78c9c46c9c4eaafd644bf78577ede459f330d4", size = 205074, upload-time = "2025-05-18T19:04:19.21Z" }, - { url = "https://files.pythonhosted.org/packages/54/46/caa2c1342655f57d8f0f2519774c6d67132205909c65e9aa8255e1d7b4f4/jiter-0.10.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:28ed2a4c05a1f32ef0e1d24c2611330219fed727dae01789f4a335617634b1ca", size = 318225, upload-time = "2025-05-18T19:04:20.583Z" }, - { url = "https://files.pythonhosted.org/packages/43/84/c7d44c75767e18946219ba2d703a5a32ab37b0bc21886a97bc6062e4da42/jiter-0.10.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14a4c418b1ec86a195f1ca69da8b23e8926c752b685af665ce30777233dfe070", size = 350235, upload-time = "2025-05-18T19:04:22.363Z" }, - { url = "https://files.pythonhosted.org/packages/01/16/f5a0135ccd968b480daad0e6ab34b0c7c5ba3bc447e5088152696140dcb3/jiter-0.10.0-cp313-cp313t-win_amd64.whl", hash = "sha256:d7bfed2fe1fe0e4dda6ef682cee888ba444b21e7a6553e03252e4feb6cf0adca", size = 207278, upload-time = "2025-05-18T19:04:23.627Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9b/1d646da42c3de6c2188fdaa15bce8ecb22b635904fc68be025e21249ba44/jiter-0.10.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:5e9251a5e83fab8d87799d3e1a46cb4b7f2919b895c6f4483629ed2446f66522", size = 310866, upload-time = "2025-05-18T19:04:24.891Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0e/26538b158e8a7c7987e94e7aeb2999e2e82b1f9d2e1f6e9874ddf71ebda0/jiter-0.10.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:023aa0204126fe5b87ccbcd75c8a0d0261b9abdbbf46d55e7ae9f8e22424eeb8", size = 318772, upload-time = "2025-05-18T19:04:26.161Z" }, - { url = "https://files.pythonhosted.org/packages/7b/fb/d302893151caa1c2636d6574d213e4b34e31fd077af6050a9c5cbb42f6fb/jiter-0.10.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c189c4f1779c05f75fc17c0c1267594ed918996a231593a21a5ca5438445216", size = 344534, upload-time = "2025-05-18T19:04:27.495Z" }, - { url = "https://files.pythonhosted.org/packages/01/d8/5780b64a149d74e347c5128d82176eb1e3241b1391ac07935693466d6219/jiter-0.10.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:15720084d90d1098ca0229352607cd68256c76991f6b374af96f36920eae13c4", size = 369087, upload-time = "2025-05-18T19:04:28.896Z" }, - { url = "https://files.pythonhosted.org/packages/e8/5b/f235a1437445160e777544f3ade57544daf96ba7e96c1a5b24a6f7ac7004/jiter-0.10.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4f2fb68e5f1cfee30e2b2a09549a00683e0fde4c6a2ab88c94072fc33cb7426", size = 490694, upload-time = "2025-05-18T19:04:30.183Z" }, - { url = "https://files.pythonhosted.org/packages/85/a9/9c3d4617caa2ff89cf61b41e83820c27ebb3f7b5fae8a72901e8cd6ff9be/jiter-0.10.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce541693355fc6da424c08b7edf39a2895f58d6ea17d92cc2b168d20907dee12", size = 388992, upload-time = "2025-05-18T19:04:32.028Z" }, - { url = "https://files.pythonhosted.org/packages/68/b1/344fd14049ba5c94526540af7eb661871f9c54d5f5601ff41a959b9a0bbd/jiter-0.10.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31c50c40272e189d50006ad5c73883caabb73d4e9748a688b216e85a9a9ca3b9", size = 351723, upload-time = "2025-05-18T19:04:33.467Z" }, - { url = "https://files.pythonhosted.org/packages/41/89/4c0e345041186f82a31aee7b9d4219a910df672b9fef26f129f0cda07a29/jiter-0.10.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fa3402a2ff9815960e0372a47b75c76979d74402448509ccd49a275fa983ef8a", size = 392215, upload-time = "2025-05-18T19:04:34.827Z" }, - { url = "https://files.pythonhosted.org/packages/55/58/ee607863e18d3f895feb802154a2177d7e823a7103f000df182e0f718b38/jiter-0.10.0-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:1956f934dca32d7bb647ea21d06d93ca40868b505c228556d3373cbd255ce853", size = 522762, upload-time = "2025-05-18T19:04:36.19Z" }, - { url = "https://files.pythonhosted.org/packages/15/d0/9123fb41825490d16929e73c212de9a42913d68324a8ce3c8476cae7ac9d/jiter-0.10.0-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:fcedb049bdfc555e261d6f65a6abe1d5ad68825b7202ccb9692636c70fcced86", size = 513427, upload-time = "2025-05-18T19:04:37.544Z" }, - { url = "https://files.pythonhosted.org/packages/d8/b3/2bd02071c5a2430d0b70403a34411fc519c2f227da7b03da9ba6a956f931/jiter-0.10.0-cp314-cp314-win32.whl", hash = "sha256:ac509f7eccca54b2a29daeb516fb95b6f0bd0d0d8084efaf8ed5dfc7b9f0b357", size = 210127, upload-time = "2025-05-18T19:04:38.837Z" }, - { url = "https://files.pythonhosted.org/packages/03/0c/5fe86614ea050c3ecd728ab4035534387cd41e7c1855ef6c031f1ca93e3f/jiter-0.10.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5ed975b83a2b8639356151cef5c0d597c68376fc4922b45d0eb384ac058cfa00", size = 318527, upload-time = "2025-05-18T19:04:40.612Z" }, - { url = "https://files.pythonhosted.org/packages/b3/4a/4175a563579e884192ba6e81725fc0448b042024419be8d83aa8a80a3f44/jiter-0.10.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3aa96f2abba33dc77f79b4cf791840230375f9534e5fac927ccceb58c5e604a5", size = 354213, upload-time = "2025-05-18T19:04:41.894Z" }, + { url = "https://files.pythonhosted.org/packages/15/8b/318e8af2c904a9d29af91f78c1e18f0592e189bbdb8a462902d31fe20682/jiter-0.11.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c92148eec91052538ce6823dfca9525f5cfc8b622d7f07e9891a280f61b8c96c", size = 305655, upload-time = "2025-10-17T11:29:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/f7/29/6c7de6b5d6e511d9e736312c0c9bfcee8f9b6bef68182a08b1d78767e627/jiter-0.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ecd4da91b5415f183a6be8f7158d127bdd9e6a3174138293c0d48d6ea2f2009d", size = 315645, upload-time = "2025-10-17T11:29:20.889Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5f/ef9e5675511ee0eb7f98dd8c90509e1f7743dbb7c350071acae87b0145f3/jiter-0.11.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7e3ac25c00b9275684d47aa42febaa90a9958e19fd1726c4ecf755fbe5e553b", size = 348003, upload-time = "2025-10-17T11:29:22.712Z" }, + { url = "https://files.pythonhosted.org/packages/56/1b/abe8c4021010b0a320d3c62682769b700fb66f92c6db02d1a1381b3db025/jiter-0.11.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:57d7305c0a841858f866cd459cd9303f73883fb5e097257f3d4a3920722c69d4", size = 365122, upload-time = "2025-10-17T11:29:24.408Z" }, + { url = "https://files.pythonhosted.org/packages/2a/2d/4a18013939a4f24432f805fbd5a19893e64650b933edb057cd405275a538/jiter-0.11.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e86fa10e117dce22c547f31dd6d2a9a222707d54853d8de4e9a2279d2c97f239", size = 488360, upload-time = "2025-10-17T11:29:25.724Z" }, + { url = "https://files.pythonhosted.org/packages/f0/77/38124f5d02ac4131f0dfbcfd1a19a0fac305fa2c005bc4f9f0736914a1a4/jiter-0.11.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ae5ef1d48aec7e01ee8420155d901bb1d192998fa811a65ebb82c043ee186711", size = 376884, upload-time = "2025-10-17T11:29:27.056Z" }, + { url = "https://files.pythonhosted.org/packages/7b/43/59fdc2f6267959b71dd23ce0bd8d4aeaf55566aa435a5d00f53d53c7eb24/jiter-0.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb68e7bf65c990531ad8715e57d50195daf7c8e6f1509e617b4e692af1108939", size = 358827, upload-time = "2025-10-17T11:29:28.698Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d0/b3cc20ff5340775ea3bbaa0d665518eddecd4266ba7244c9cb480c0c82ec/jiter-0.11.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43b30c8154ded5845fa454ef954ee67bfccce629b2dea7d01f795b42bc2bda54", size = 385171, upload-time = "2025-10-17T11:29:30.078Z" }, + { url = "https://files.pythonhosted.org/packages/d2/bc/94dd1f3a61f4dc236f787a097360ec061ceeebebf4ea120b924d91391b10/jiter-0.11.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:586cafbd9dd1f3ce6a22b4a085eaa6be578e47ba9b18e198d4333e598a91db2d", size = 518359, upload-time = "2025-10-17T11:29:31.464Z" }, + { url = "https://files.pythonhosted.org/packages/7e/8c/12ee132bd67e25c75f542c227f5762491b9a316b0dad8e929c95076f773c/jiter-0.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:677cc2517d437a83bb30019fd4cf7cad74b465914c56ecac3440d597ac135250", size = 509205, upload-time = "2025-10-17T11:29:32.895Z" }, + { url = "https://files.pythonhosted.org/packages/39/d5/9de848928ce341d463c7e7273fce90ea6d0ea4343cd761f451860fa16b59/jiter-0.11.1-cp312-cp312-win32.whl", hash = "sha256:fa992af648fcee2b850a3286a35f62bbbaeddbb6dbda19a00d8fbc846a947b6e", size = 205448, upload-time = "2025-10-17T11:29:34.217Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b0/8002d78637e05009f5e3fb5288f9d57d65715c33b5d6aa20fd57670feef5/jiter-0.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:88b5cae9fa51efeb3d4bd4e52bfd4c85ccc9cac44282e2a9640893a042ba4d87", size = 204285, upload-time = "2025-10-17T11:29:35.446Z" }, + { url = "https://files.pythonhosted.org/packages/9f/a2/bb24d5587e4dff17ff796716542f663deee337358006a80c8af43ddc11e5/jiter-0.11.1-cp312-cp312-win_arm64.whl", hash = "sha256:9a6cae1ab335551917f882f2c3c1efe7617b71b4c02381e4382a8fc80a02588c", size = 188712, upload-time = "2025-10-17T11:29:37.027Z" }, + { url = "https://files.pythonhosted.org/packages/a6/bc/950dd7f170c6394b6fdd73f989d9e729bd98907bcc4430ef080a72d06b77/jiter-0.11.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:0d4d6993edc83cf75e8c6828a8d6ce40a09ee87e38c7bfba6924f39e1337e21d", size = 302626, upload-time = "2025-10-17T11:31:09.645Z" }, + { url = "https://files.pythonhosted.org/packages/3a/65/43d7971ca82ee100b7b9b520573eeef7eabc0a45d490168ebb9a9b5bb8b2/jiter-0.11.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:f78d151c83a87a6cf5461d5ee55bc730dd9ae227377ac6f115b922989b95f838", size = 297034, upload-time = "2025-10-17T11:31:10.975Z" }, + { url = "https://files.pythonhosted.org/packages/19/4c/000e1e0c0c67e96557a279f8969487ea2732d6c7311698819f977abae837/jiter-0.11.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9022974781155cd5521d5cb10997a03ee5e31e8454c9d999dcdccd253f2353f", size = 337328, upload-time = "2025-10-17T11:31:12.399Z" }, + { url = "https://files.pythonhosted.org/packages/d9/71/71408b02c6133153336d29fa3ba53000f1e1a3f78bb2fc2d1a1865d2e743/jiter-0.11.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18c77aaa9117510d5bdc6a946baf21b1f0cfa58ef04d31c8d016f206f2118960", size = 343697, upload-time = "2025-10-17T11:31:13.773Z" }, ] [[package]] -name = "loguru" -version = "0.7.3" -source = { registry = "https://pypi.org/simple/" } -dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "win32-setctime", marker = "sys_platform == 'win32'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/3a/05/a1dae3dffd1116099471c643b8924f5aa6524411dc6c63fdae648c4f1aca/loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6", size = 63559, upload-time = "2024-12-06T11:20:56.608Z" } +name = "jmespath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/00/2a/e867e8531cf3e36b41201936b7fa7ba7b5702dbef42922193f05c8976cd6/jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe", size = 25843, upload-time = "2022-06-17T18:00:12.224Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/29/0348de65b8cc732daa3e33e67806420b2ae89bdce2b04af740289c5c6c8c/loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c", size = 61595, upload-time = "2024-12-06T11:20:54.538Z" }, + { url = "https://files.pythonhosted.org/packages/31/b4/b9b800c45527aadd64d5b442f9b932b00648617eb5d63d2c7a6587b7cafc/jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980", size = 20256, upload-time = "2022-06-17T18:00:10.251Z" }, +] + +[[package]] +name = "jsonschema" +version = "4.25.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/69/f7185de793a29082a9f3c7728268ffb31cb5095131a9c139a74078e27336/jsonschema-4.25.1.tar.gz", hash = "sha256:e4a9655ce0da0c0b67a085847e00a3a51449e1157f4f75e9fb5aa545e122eb85", size = 357342, upload-time = "2025-08-18T17:03:50.038Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/9c/8c95d856233c1f82500c2450b8c68576b4cf1c871db3afac5c34ff84e6fd/jsonschema-4.25.1-py3-none-any.whl", hash = "sha256:3fba0169e345c7175110351d456342c364814cfcf3b964ba4587f22915230a63", size = 90040, upload-time = "2025-08-18T17:03:48.373Z" }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2025.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, ] [[package]] name = "mako" version = "1.3.10" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] @@ -517,88 +670,68 @@ wheels = [ [[package]] name = "markupsafe" -version = "3.0.2" -source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, +] + +[[package]] +name = "mcp" +version = "1.18.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "httpx" }, + { name = "httpx-sse" }, + { name = "jsonschema" }, + { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, + { name = "sse-starlette" }, + { name = "starlette" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1a/e0/fe34ce16ea2bacce489ab859abd1b47ae28b438c3ef60b9c5eee6c02592f/mcp-1.18.0.tar.gz", hash = "sha256:aa278c44b1efc0a297f53b68df865b988e52dd08182d702019edcf33a8e109f6", size = 482926, upload-time = "2025-10-16T19:19:55.125Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/44/f5970e3e899803823826283a70b6003afd46f28e082544407e24575eccd3/mcp-1.18.0-py3-none-any.whl", hash = "sha256:42f10c270de18e7892fdf9da259029120b1ea23964ff688248c69db9d72b1d0a", size = 168762, upload-time = "2025-10-16T19:19:53.2Z" }, ] [[package]] name = "numpy" -version = "2.3.2" -source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/37/7d/3fec4199c5ffb892bed55cff901e4f39a58c81df9c44c280499e92cad264/numpy-2.3.2.tar.gz", hash = "sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48", size = 20489306, upload-time = "2025-07-24T21:32:07.553Z" } +version = "2.3.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/f4/098d2270d52b41f1bd7db9fc288aaa0400cb48c2a3e2af6fa365d9720947/numpy-2.3.4.tar.gz", hash = "sha256:a7d018bfedb375a8d979ac758b120ba846a7fe764911a64465fd87b8729f4a6a", size = 20582187, upload-time = "2025-10-15T16:18:11.77Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/1c/c0/c6bb172c916b00700ed3bf71cb56175fd1f7dbecebf8353545d0b5519f6c/numpy-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c8d9727f5316a256425892b043736d63e89ed15bbfe6556c5ff4d9d4448ff3b3", size = 20949074, upload-time = "2025-07-24T20:43:07.813Z" }, - { url = "https://files.pythonhosted.org/packages/20/4e/c116466d22acaf4573e58421c956c6076dc526e24a6be0903219775d862e/numpy-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:efc81393f25f14d11c9d161e46e6ee348637c0a1e8a54bf9dedc472a3fae993b", size = 14177311, upload-time = "2025-07-24T20:43:29.335Z" }, - { url = "https://files.pythonhosted.org/packages/78/45/d4698c182895af189c463fc91d70805d455a227261d950e4e0f1310c2550/numpy-2.3.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dd937f088a2df683cbb79dda9a772b62a3e5a8a7e76690612c2737f38c6ef1b6", size = 5106022, upload-time = "2025-07-24T20:43:37.999Z" }, - { url = "https://files.pythonhosted.org/packages/9f/76/3e6880fef4420179309dba72a8c11f6166c431cf6dee54c577af8906f914/numpy-2.3.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:11e58218c0c46c80509186e460d79fbdc9ca1eb8d8aee39d8f2dc768eb781089", size = 6640135, upload-time = "2025-07-24T20:43:49.28Z" }, - { url = "https://files.pythonhosted.org/packages/34/fa/87ff7f25b3c4ce9085a62554460b7db686fef1e0207e8977795c7b7d7ba1/numpy-2.3.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ad4ebcb683a1f99f4f392cc522ee20a18b2bb12a2c1c42c3d48d5a1adc9d3d2", size = 14278147, upload-time = "2025-07-24T20:44:10.328Z" }, - { url = "https://files.pythonhosted.org/packages/1d/0f/571b2c7a3833ae419fe69ff7b479a78d313581785203cc70a8db90121b9a/numpy-2.3.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:938065908d1d869c7d75d8ec45f735a034771c6ea07088867f713d1cd3bbbe4f", size = 16635989, upload-time = "2025-07-24T20:44:34.88Z" }, - { url = "https://files.pythonhosted.org/packages/24/5a/84ae8dca9c9a4c592fe11340b36a86ffa9fd3e40513198daf8a97839345c/numpy-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66459dccc65d8ec98cc7df61307b64bf9e08101f9598755d42d8ae65d9a7a6ee", size = 16053052, upload-time = "2025-07-24T20:44:58.872Z" }, - { url = "https://files.pythonhosted.org/packages/57/7c/e5725d99a9133b9813fcf148d3f858df98511686e853169dbaf63aec6097/numpy-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7af9ed2aa9ec5950daf05bb11abc4076a108bd3c7db9aa7251d5f107079b6a6", size = 18577955, upload-time = "2025-07-24T20:45:26.714Z" }, - { url = "https://files.pythonhosted.org/packages/ae/11/7c546fcf42145f29b71e4d6f429e96d8d68e5a7ba1830b2e68d7418f0bbd/numpy-2.3.2-cp313-cp313-win32.whl", hash = "sha256:906a30249315f9c8e17b085cc5f87d3f369b35fedd0051d4a84686967bdbbd0b", size = 6311843, upload-time = "2025-07-24T20:49:24.444Z" }, - { url = "https://files.pythonhosted.org/packages/aa/6f/a428fd1cb7ed39b4280d057720fed5121b0d7754fd2a9768640160f5517b/numpy-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:c63d95dc9d67b676e9108fe0d2182987ccb0f11933c1e8959f42fa0da8d4fa56", size = 12782876, upload-time = "2025-07-24T20:49:43.227Z" }, - { url = "https://files.pythonhosted.org/packages/65/85/4ea455c9040a12595fb6c43f2c217257c7b52dd0ba332c6a6c1d28b289fe/numpy-2.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:b05a89f2fb84d21235f93de47129dd4f11c16f64c87c33f5e284e6a3a54e43f2", size = 10192786, upload-time = "2025-07-24T20:49:59.443Z" }, - { url = "https://files.pythonhosted.org/packages/80/23/8278f40282d10c3f258ec3ff1b103d4994bcad78b0cba9208317f6bb73da/numpy-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e6ecfeddfa83b02318f4d84acf15fbdbf9ded18e46989a15a8b6995dfbf85ab", size = 21047395, upload-time = "2025-07-24T20:45:58.821Z" }, - { url = "https://files.pythonhosted.org/packages/1f/2d/624f2ce4a5df52628b4ccd16a4f9437b37c35f4f8a50d00e962aae6efd7a/numpy-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:508b0eada3eded10a3b55725b40806a4b855961040180028f52580c4729916a2", size = 14300374, upload-time = "2025-07-24T20:46:20.207Z" }, - { url = "https://files.pythonhosted.org/packages/f6/62/ff1e512cdbb829b80a6bd08318a58698867bca0ca2499d101b4af063ee97/numpy-2.3.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:754d6755d9a7588bdc6ac47dc4ee97867271b17cee39cb87aef079574366db0a", size = 5228864, upload-time = "2025-07-24T20:46:30.58Z" }, - { url = "https://files.pythonhosted.org/packages/7d/8e/74bc18078fff03192d4032cfa99d5a5ca937807136d6f5790ce07ca53515/numpy-2.3.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f66e7d2b2d7712410d3bc5684149040ef5f19856f20277cd17ea83e5006286", size = 6737533, upload-time = "2025-07-24T20:46:46.111Z" }, - { url = "https://files.pythonhosted.org/packages/19/ea/0731efe2c9073ccca5698ef6a8c3667c4cf4eea53fcdcd0b50140aba03bc/numpy-2.3.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de6ea4e5a65d5a90c7d286ddff2b87f3f4ad61faa3db8dabe936b34c2275b6f8", size = 14352007, upload-time = "2025-07-24T20:47:07.1Z" }, - { url = "https://files.pythonhosted.org/packages/cf/90/36be0865f16dfed20f4bc7f75235b963d5939707d4b591f086777412ff7b/numpy-2.3.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3ef07ec8cbc8fc9e369c8dcd52019510c12da4de81367d8b20bc692aa07573a", size = 16701914, upload-time = "2025-07-24T20:47:32.459Z" }, - { url = "https://files.pythonhosted.org/packages/94/30/06cd055e24cb6c38e5989a9e747042b4e723535758e6153f11afea88c01b/numpy-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:27c9f90e7481275c7800dc9c24b7cc40ace3fdb970ae4d21eaff983a32f70c91", size = 16132708, upload-time = "2025-07-24T20:47:58.129Z" }, - { url = "https://files.pythonhosted.org/packages/9a/14/ecede608ea73e58267fd7cb78f42341b3b37ba576e778a1a06baffbe585c/numpy-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:07b62978075b67eee4065b166d000d457c82a1efe726cce608b9db9dd66a73a5", size = 18651678, upload-time = "2025-07-24T20:48:25.402Z" }, - { url = "https://files.pythonhosted.org/packages/40/f3/2fe6066b8d07c3685509bc24d56386534c008b462a488b7f503ba82b8923/numpy-2.3.2-cp313-cp313t-win32.whl", hash = "sha256:c771cfac34a4f2c0de8e8c97312d07d64fd8f8ed45bc9f5726a7e947270152b5", size = 6441832, upload-time = "2025-07-24T20:48:37.181Z" }, - { url = "https://files.pythonhosted.org/packages/0b/ba/0937d66d05204d8f28630c9c60bc3eda68824abde4cf756c4d6aad03b0c6/numpy-2.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:72dbebb2dcc8305c431b2836bcc66af967df91be793d63a24e3d9b741374c450", size = 12927049, upload-time = "2025-07-24T20:48:56.24Z" }, - { url = "https://files.pythonhosted.org/packages/e9/ed/13542dd59c104d5e654dfa2ac282c199ba64846a74c2c4bcdbc3a0f75df1/numpy-2.3.2-cp313-cp313t-win_arm64.whl", hash = "sha256:72c6df2267e926a6d5286b0a6d556ebe49eae261062059317837fda12ddf0c1a", size = 10262935, upload-time = "2025-07-24T20:49:13.136Z" }, - { url = "https://files.pythonhosted.org/packages/c9/7c/7659048aaf498f7611b783e000c7268fcc4dcf0ce21cd10aad7b2e8f9591/numpy-2.3.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:448a66d052d0cf14ce9865d159bfc403282c9bc7bb2a31b03cc18b651eca8b1a", size = 20950906, upload-time = "2025-07-24T20:50:30.346Z" }, - { url = "https://files.pythonhosted.org/packages/80/db/984bea9d4ddf7112a04cfdfb22b1050af5757864cfffe8e09e44b7f11a10/numpy-2.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:546aaf78e81b4081b2eba1d105c3b34064783027a06b3ab20b6eba21fb64132b", size = 14185607, upload-time = "2025-07-24T20:50:51.923Z" }, - { url = "https://files.pythonhosted.org/packages/e4/76/b3d6f414f4eca568f469ac112a3b510938d892bc5a6c190cb883af080b77/numpy-2.3.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:87c930d52f45df092f7578889711a0768094debf73cfcde105e2d66954358125", size = 5114110, upload-time = "2025-07-24T20:51:01.041Z" }, - { url = "https://files.pythonhosted.org/packages/9e/d2/6f5e6826abd6bca52392ed88fe44a4b52aacb60567ac3bc86c67834c3a56/numpy-2.3.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:8dc082ea901a62edb8f59713c6a7e28a85daddcb67454c839de57656478f5b19", size = 6642050, upload-time = "2025-07-24T20:51:11.64Z" }, - { url = "https://files.pythonhosted.org/packages/c4/43/f12b2ade99199e39c73ad182f103f9d9791f48d885c600c8e05927865baf/numpy-2.3.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af58de8745f7fa9ca1c0c7c943616c6fe28e75d0c81f5c295810e3c83b5be92f", size = 14296292, upload-time = "2025-07-24T20:51:33.488Z" }, - { url = "https://files.pythonhosted.org/packages/5d/f9/77c07d94bf110a916b17210fac38680ed8734c236bfed9982fd8524a7b47/numpy-2.3.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed5527c4cf10f16c6d0b6bee1f89958bccb0ad2522c8cadc2efd318bcd545f5", size = 16638913, upload-time = "2025-07-24T20:51:58.517Z" }, - { url = "https://files.pythonhosted.org/packages/9b/d1/9d9f2c8ea399cc05cfff8a7437453bd4e7d894373a93cdc46361bbb49a7d/numpy-2.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:095737ed986e00393ec18ec0b21b47c22889ae4b0cd2d5e88342e08b01141f58", size = 16071180, upload-time = "2025-07-24T20:52:22.827Z" }, - { url = "https://files.pythonhosted.org/packages/4c/41/82e2c68aff2a0c9bf315e47d61951099fed65d8cb2c8d9dc388cb87e947e/numpy-2.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5e40e80299607f597e1a8a247ff8d71d79c5b52baa11cc1cce30aa92d2da6e0", size = 18576809, upload-time = "2025-07-24T20:52:51.015Z" }, - { url = "https://files.pythonhosted.org/packages/14/14/4b4fd3efb0837ed252d0f583c5c35a75121038a8c4e065f2c259be06d2d8/numpy-2.3.2-cp314-cp314-win32.whl", hash = "sha256:7d6e390423cc1f76e1b8108c9b6889d20a7a1f59d9a60cac4a050fa734d6c1e2", size = 6366410, upload-time = "2025-07-24T20:56:44.949Z" }, - { url = "https://files.pythonhosted.org/packages/11/9e/b4c24a6b8467b61aced5c8dc7dcfce23621baa2e17f661edb2444a418040/numpy-2.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:b9d0878b21e3918d76d2209c924ebb272340da1fb51abc00f986c258cd5e957b", size = 12918821, upload-time = "2025-07-24T20:57:06.479Z" }, - { url = "https://files.pythonhosted.org/packages/0e/0f/0dc44007c70b1007c1cef86b06986a3812dd7106d8f946c09cfa75782556/numpy-2.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:2738534837c6a1d0c39340a190177d7d66fdf432894f469728da901f8f6dc910", size = 10477303, upload-time = "2025-07-24T20:57:22.879Z" }, - { url = "https://files.pythonhosted.org/packages/8b/3e/075752b79140b78ddfc9c0a1634d234cfdbc6f9bbbfa6b7504e445ad7d19/numpy-2.3.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:4d002ecf7c9b53240be3bb69d80f86ddbd34078bae04d87be81c1f58466f264e", size = 21047524, upload-time = "2025-07-24T20:53:22.086Z" }, - { url = "https://files.pythonhosted.org/packages/fe/6d/60e8247564a72426570d0e0ea1151b95ce5bd2f1597bb878a18d32aec855/numpy-2.3.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:293b2192c6bcce487dbc6326de5853787f870aeb6c43f8f9c6496db5b1781e45", size = 14300519, upload-time = "2025-07-24T20:53:44.053Z" }, - { url = "https://files.pythonhosted.org/packages/4d/73/d8326c442cd428d47a067070c3ac6cc3b651a6e53613a1668342a12d4479/numpy-2.3.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0a4f2021a6da53a0d580d6ef5db29947025ae8b35b3250141805ea9a32bbe86b", size = 5228972, upload-time = "2025-07-24T20:53:53.81Z" }, - { url = "https://files.pythonhosted.org/packages/34/2e/e71b2d6dad075271e7079db776196829019b90ce3ece5c69639e4f6fdc44/numpy-2.3.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9c144440db4bf3bb6372d2c3e49834cc0ff7bb4c24975ab33e01199e645416f2", size = 6737439, upload-time = "2025-07-24T20:54:04.742Z" }, - { url = "https://files.pythonhosted.org/packages/15/b0/d004bcd56c2c5e0500ffc65385eb6d569ffd3363cb5e593ae742749b2daa/numpy-2.3.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f92d6c2a8535dc4fe4419562294ff957f83a16ebdec66df0805e473ffaad8bd0", size = 14352479, upload-time = "2025-07-24T20:54:25.819Z" }, - { url = "https://files.pythonhosted.org/packages/11/e3/285142fcff8721e0c99b51686426165059874c150ea9ab898e12a492e291/numpy-2.3.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cefc2219baa48e468e3db7e706305fcd0c095534a192a08f31e98d83a7d45fb0", size = 16702805, upload-time = "2025-07-24T20:54:50.814Z" }, - { url = "https://files.pythonhosted.org/packages/33/c3/33b56b0e47e604af2c7cd065edca892d180f5899599b76830652875249a3/numpy-2.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:76c3e9501ceb50b2ff3824c3589d5d1ab4ac857b0ee3f8f49629d0de55ecf7c2", size = 16133830, upload-time = "2025-07-24T20:55:17.306Z" }, - { url = "https://files.pythonhosted.org/packages/6e/ae/7b1476a1f4d6a48bc669b8deb09939c56dd2a439db1ab03017844374fb67/numpy-2.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:122bf5ed9a0221b3419672493878ba4967121514b1d7d4656a7580cd11dddcbf", size = 18652665, upload-time = "2025-07-24T20:55:46.665Z" }, - { url = "https://files.pythonhosted.org/packages/14/ba/5b5c9978c4bb161034148ade2de9db44ec316fab89ce8c400db0e0c81f86/numpy-2.3.2-cp314-cp314t-win32.whl", hash = "sha256:6f1ae3dcb840edccc45af496f312528c15b1f79ac318169d094e85e4bb35fdf1", size = 6514777, upload-time = "2025-07-24T20:55:57.66Z" }, - { url = "https://files.pythonhosted.org/packages/eb/46/3dbaf0ae7c17cdc46b9f662c56da2054887b8d9e737c1476f335c83d33db/numpy-2.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:087ffc25890d89a43536f75c5fe8770922008758e8eeeef61733957041ed2f9b", size = 13111856, upload-time = "2025-07-24T20:56:17.318Z" }, - { url = "https://files.pythonhosted.org/packages/c1/9e/1652778bce745a67b5fe05adde60ed362d38eb17d919a540e813d30f6874/numpy-2.3.2-cp314-cp314t-win_arm64.whl", hash = "sha256:092aeb3449833ea9c0bf0089d70c29ae480685dd2377ec9cdbbb620257f84631", size = 10544226, upload-time = "2025-07-24T20:56:34.509Z" }, + { url = "https://files.pythonhosted.org/packages/96/7a/02420400b736f84317e759291b8edaeee9dc921f72b045475a9cbdb26b17/numpy-2.3.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ef1b5a3e808bc40827b5fa2c8196151a4c5abe110e1726949d7abddfe5c7ae11", size = 20957727, upload-time = "2025-10-15T16:15:44.9Z" }, + { url = "https://files.pythonhosted.org/packages/18/90/a014805d627aa5750f6f0e878172afb6454552da929144b3c07fcae1bb13/numpy-2.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2f91f496a87235c6aaf6d3f3d89b17dba64996abadccb289f48456cff931ca9", size = 14187262, upload-time = "2025-10-15T16:15:47.761Z" }, + { url = "https://files.pythonhosted.org/packages/c7/e4/0a94b09abe89e500dc748e7515f21a13e30c5c3fe3396e6d4ac108c25fca/numpy-2.3.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f77e5b3d3da652b474cc80a14084927a5e86a5eccf54ca8ca5cbd697bf7f2667", size = 5115992, upload-time = "2025-10-15T16:15:50.144Z" }, + { url = "https://files.pythonhosted.org/packages/88/dd/db77c75b055c6157cbd4f9c92c4458daef0dd9cbe6d8d2fe7f803cb64c37/numpy-2.3.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:8ab1c5f5ee40d6e01cbe96de5863e39b215a4d24e7d007cad56c7184fdf4aeef", size = 6648672, upload-time = "2025-10-15T16:15:52.442Z" }, + { url = "https://files.pythonhosted.org/packages/e1/e6/e31b0d713719610e406c0ea3ae0d90760465b086da8783e2fd835ad59027/numpy-2.3.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77b84453f3adcb994ddbd0d1c5d11db2d6bda1a2b7fd5ac5bd4649d6f5dc682e", size = 14284156, upload-time = "2025-10-15T16:15:54.351Z" }, + { url = "https://files.pythonhosted.org/packages/f9/58/30a85127bfee6f108282107caf8e06a1f0cc997cb6b52cdee699276fcce4/numpy-2.3.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4121c5beb58a7f9e6dfdee612cb24f4df5cd4db6e8261d7f4d7450a997a65d6a", size = 16641271, upload-time = "2025-10-15T16:15:56.67Z" }, + { url = "https://files.pythonhosted.org/packages/06/f2/2e06a0f2adf23e3ae29283ad96959267938d0efd20a2e25353b70065bfec/numpy-2.3.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:65611ecbb00ac9846efe04db15cbe6186f562f6bb7e5e05f077e53a599225d16", size = 16059531, upload-time = "2025-10-15T16:15:59.412Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e7/b106253c7c0d5dc352b9c8fab91afd76a93950998167fa3e5afe4ef3a18f/numpy-2.3.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dabc42f9c6577bcc13001b8810d300fe814b4cfbe8a92c873f269484594f9786", size = 18578983, upload-time = "2025-10-15T16:16:01.804Z" }, + { url = "https://files.pythonhosted.org/packages/73/e3/04ecc41e71462276ee867ccbef26a4448638eadecf1bc56772c9ed6d0255/numpy-2.3.4-cp312-cp312-win32.whl", hash = "sha256:a49d797192a8d950ca59ee2d0337a4d804f713bb5c3c50e8db26d49666e351dc", size = 6291380, upload-time = "2025-10-15T16:16:03.938Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a8/566578b10d8d0e9955b1b6cd5db4e9d4592dd0026a941ff7994cedda030a/numpy-2.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:985f1e46358f06c2a09921e8921e2c98168ed4ae12ccd6e5e87a4f1857923f32", size = 12787999, upload-time = "2025-10-15T16:16:05.801Z" }, + { url = "https://files.pythonhosted.org/packages/58/22/9c903a957d0a8071b607f5b1bff0761d6e608b9a965945411f867d515db1/numpy-2.3.4-cp312-cp312-win_arm64.whl", hash = "sha256:4635239814149e06e2cb9db3dd584b2fa64316c96f10656983b8026a82e6e4db", size = 10197412, upload-time = "2025-10-15T16:16:07.854Z" }, ] [[package]] name = "openai" -version = "1.98.0" -source = { registry = "https://pypi.org/simple/" } +version = "1.109.1" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "distro" }, @@ -609,15 +742,15 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d8/9d/52eadb15c92802711d6b6cf00df3a6d0d18b588f4c5ba5ff210c6419fc03/openai-1.98.0.tar.gz", hash = "sha256:3ee0fcc50ae95267fd22bd1ad095ba5402098f3df2162592e68109999f685427", size = 496695, upload-time = "2025-07-30T12:48:03.701Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/a1/a303104dc55fc546a3f6914c842d3da471c64eec92043aef8f652eb6c524/openai-1.109.1.tar.gz", hash = "sha256:d173ed8dbca665892a6db099b4a2dfac624f94d20a93f46eb0b56aae940ed869", size = 564133, upload-time = "2025-09-24T13:00:53.075Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a8/fe/f64631075b3d63a613c0d8ab761d5941631a470f6fa87eaaee1aa2b4ec0c/openai-1.98.0-py3-none-any.whl", hash = "sha256:b99b794ef92196829120e2df37647722104772d2a74d08305df9ced5f26eae34", size = 767713, upload-time = "2025-07-30T12:48:01.264Z" }, + { url = "https://files.pythonhosted.org/packages/1d/2a/7dd3d207ec669cacc1f186fd856a0f61dbc255d24f6fdc1a6715d6051b0f/openai-1.109.1-py3-none-any.whl", hash = "sha256:6bcaf57086cf59159b8e27447e4e7dd019db5d29a438072fbd49c290c7e65315", size = 948627, upload-time = "2025-09-24T13:00:50.754Z" }, ] [[package]] name = "openpyxl" version = "3.1.5" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "et-xmlfile" }, ] @@ -626,37 +759,233 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910, upload-time = "2024-06-28T14:03:41.161Z" }, ] +[[package]] +name = "opentelemetry-api" +version = "1.34.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4d/5e/94a8cb759e4e409022229418294e098ca7feca00eb3c467bb20cbd329bda/opentelemetry_api-1.34.1.tar.gz", hash = "sha256:64f0bd06d42824843731d05beea88d4d4b6ae59f9fe347ff7dfa2cc14233bbb3", size = 64987, upload-time = "2025-06-10T08:55:19.818Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/3a/2ba85557e8dc024c0842ad22c570418dc02c36cbd1ab4b832a93edf071b8/opentelemetry_api-1.34.1-py3-none-any.whl", hash = "sha256:b7df4cb0830d5a6c29ad0c0691dbae874d8daefa934b8b1d642de48323d32a8c", size = 65767, upload-time = "2025-06-10T08:54:56.717Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp" +version = "1.34.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-exporter-otlp-proto-grpc" }, + { name = "opentelemetry-exporter-otlp-proto-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/44/ba/786b4de7e39d88043622d901b92c4485835f43e0be76c2824d2687911bc2/opentelemetry_exporter_otlp-1.34.1.tar.gz", hash = "sha256:71c9ad342d665d9e4235898d205db17c5764cd7a69acb8a5dcd6d5e04c4c9988", size = 6173, upload-time = "2025-06-10T08:55:21.595Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/c1/259b8d8391c968e8f005d8a0ccefcb41aeef64cf55905cd0c0db4e22aaee/opentelemetry_exporter_otlp-1.34.1-py3-none-any.whl", hash = "sha256:f4a453e9cde7f6362fd4a090d8acf7881d1dc585540c7b65cbd63e36644238d4", size = 7040, upload-time = "2025-06-10T08:54:59.655Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.34.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-proto" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/86/f0/ff235936ee40db93360233b62da932d4fd9e8d103cd090c6bcb9afaf5f01/opentelemetry_exporter_otlp_proto_common-1.34.1.tar.gz", hash = "sha256:b59a20a927facd5eac06edaf87a07e49f9e4a13db487b7d8a52b37cb87710f8b", size = 20817, upload-time = "2025-06-10T08:55:22.55Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/e8/8b292a11cc8d8d87ec0c4089ae21b6a58af49ca2e51fa916435bc922fdc7/opentelemetry_exporter_otlp_proto_common-1.34.1-py3-none-any.whl", hash = "sha256:8e2019284bf24d3deebbb6c59c71e6eef3307cd88eff8c633e061abba33f7e87", size = 18834, upload-time = "2025-06-10T08:55:00.806Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-grpc" +version = "1.34.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "grpcio" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/41/f7/bb63837a3edb9ca857aaf5760796874e7cecddc88a2571b0992865a48fb6/opentelemetry_exporter_otlp_proto_grpc-1.34.1.tar.gz", hash = "sha256:7c841b90caa3aafcfc4fee58487a6c71743c34c6dc1787089d8b0578bbd794dd", size = 22566, upload-time = "2025-06-10T08:55:23.214Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b4/42/0a4dd47e7ef54edf670c81fc06a83d68ea42727b82126a1df9dd0477695d/opentelemetry_exporter_otlp_proto_grpc-1.34.1-py3-none-any.whl", hash = "sha256:04bb8b732b02295be79f8a86a4ad28fae3d4ddb07307a98c7aa6f331de18cca6", size = 18615, upload-time = "2025-06-10T08:55:02.214Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.34.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/8f/954bc725961cbe425a749d55c0ba1df46832a5999eae764d1a7349ac1c29/opentelemetry_exporter_otlp_proto_http-1.34.1.tar.gz", hash = "sha256:aaac36fdce46a8191e604dcf632e1f9380c7d5b356b27b3e0edb5610d9be28ad", size = 15351, upload-time = "2025-06-10T08:55:24.657Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/54/b05251c04e30c1ac70cf4a7c5653c085dfcf2c8b98af71661d6a252adc39/opentelemetry_exporter_otlp_proto_http-1.34.1-py3-none-any.whl", hash = "sha256:5251f00ca85872ce50d871f6d3cc89fe203b94c3c14c964bbdc3883366c705d8", size = 17744, upload-time = "2025-06-10T08:55:03.802Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation" +version = "0.55b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "packaging" }, + { name = "wrapt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cb/69/d8995f229ddf4d98b9c85dd126aeca03dd1742f6dc5d3bc0d2f6dae1535c/opentelemetry_instrumentation-0.55b1.tar.gz", hash = "sha256:2dc50aa207b9bfa16f70a1a0571e011e737a9917408934675b89ef4d5718c87b", size = 28552, upload-time = "2025-06-10T08:58:15.312Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/7d/8ddfda1506c2fcca137924d5688ccabffa1aed9ec0955b7d0772de02cec3/opentelemetry_instrumentation-0.55b1-py3-none-any.whl", hash = "sha256:cbb1496b42bc394e01bc63701b10e69094e8564e281de063e4328d122cc7a97e", size = 31108, upload-time = "2025-06-10T08:57:14.355Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation-asgi" +version = "0.55b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "asgiref" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/51/4a/900ea42d36757e3b7219f873d3d16358107da43fcb8d7f11a2b1d0bb56a0/opentelemetry_instrumentation_asgi-0.55b1.tar.gz", hash = "sha256:615cde388dd3af4d0e52629a6c75828253618aebcc6e65d93068463811528606", size = 24356, upload-time = "2025-06-10T08:58:19.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/45/b5f78f0456f8e2e2ec152d7b6496197f5661c7ca49f610fe19c63b350aa4/opentelemetry_instrumentation_asgi-0.55b1-py3-none-any.whl", hash = "sha256:186620f7d0a71c8c817c5cbe91c80faa8f9c50967d458b8131c5694e21eb8583", size = 16402, upload-time = "2025-06-10T08:57:22.034Z" }, +] + +[[package]] +name = "opentelemetry-instrumentation-fastapi" +version = "0.55b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-instrumentation-asgi" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2b/76/0df9cdff4cce18b1967e97152d419e2325c307ff96eb6ba8e69294690c18/opentelemetry_instrumentation_fastapi-0.55b1.tar.gz", hash = "sha256:bb9f8c13a053e7ff7da221248067529cc320e9308d57f3908de0afa36f6c5744", size = 20275, upload-time = "2025-06-10T08:58:29.281Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/6e/d608a9336ede3d15869c70ebdd4ec670f774641104b0873bb973bce9d822/opentelemetry_instrumentation_fastapi-0.55b1-py3-none-any.whl", hash = "sha256:af4c09aebb0bd6b4a0881483b175e76547d2bc96329c94abfb794bf44f29f6bb", size = 12713, upload-time = "2025-06-10T08:57:39.712Z" }, +] + +[[package]] +name = "opentelemetry-propagator-aws-xray" +version = "1.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f4/31/40004e9e55b1e5694ef3a7526f0b7637df44196fc68a8b7d248a3684680f/opentelemetry_propagator_aws_xray-1.0.2.tar.gz", hash = "sha256:6b2cee5479d2ef0172307b66ed2ed151f598a0fd29b3c01133ac87ca06326260", size = 10994, upload-time = "2024-08-05T17:45:57.601Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/89/849a0847871fd9745315896ad9e23d6479db84d90b8b36c4c26dc46e92b8/opentelemetry_propagator_aws_xray-1.0.2-py3-none-any.whl", hash = "sha256:1c99181ee228e99bddb638a0c911a297fa21f1c3a0af951f841e79919b5f1934", size = 10856, upload-time = "2024-08-05T17:45:56.492Z" }, +] + +[[package]] +name = "opentelemetry-proto" +version = "1.34.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/b3/c3158dd012463bb7c0eb7304a85a6f63baeeb5b4c93a53845cf89f848c7e/opentelemetry_proto-1.34.1.tar.gz", hash = "sha256:16286214e405c211fc774187f3e4bbb1351290b8dfb88e8948af209ce85b719e", size = 34344, upload-time = "2025-06-10T08:55:32.25Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/28/ab/4591bfa54e946350ce8b3f28e5c658fe9785e7cd11e9c11b1671a867822b/opentelemetry_proto-1.34.1-py3-none-any.whl", hash = "sha256:eb4bb5ac27f2562df2d6857fc557b3a481b5e298bc04f94cc68041f00cebcbd2", size = 55692, upload-time = "2025-06-10T08:55:14.904Z" }, +] + +[[package]] +name = "opentelemetry-sdk" +version = "1.34.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/41/fe20f9036433da8e0fcef568984da4c1d1c771fa072ecd1a4d98779dccdd/opentelemetry_sdk-1.34.1.tar.gz", hash = "sha256:8091db0d763fcd6098d4781bbc80ff0971f94e260739aa6afe6fd379cdf3aa4d", size = 159441, upload-time = "2025-06-10T08:55:33.028Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/1b/def4fe6aa73f483cabf4c748f4c25070d5f7604dcc8b52e962983491b29e/opentelemetry_sdk-1.34.1-py3-none-any.whl", hash = "sha256:308effad4059562f1d92163c61c8141df649da24ce361827812c40abb2a1e96e", size = 118477, upload-time = "2025-06-10T08:55:16.02Z" }, +] + +[[package]] +name = "opentelemetry-sdk-extension-aws" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-sdk" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/b3/825c93fe4c238845f1356297abea33d03b2adaafb5ae98fc257b394de124/opentelemetry_sdk_extension_aws-2.1.0.tar.gz", hash = "sha256:ff68ddecc1910f62c019d22ec0f7461713ead7f662d6a2304d4089c1a0b20416", size = 16334, upload-time = "2024-12-24T15:01:57.387Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/61/47a6a43b7935d54b5734fbf3fb0357dd5a7d0dfaa9677b7318518fe8d507/opentelemetry_sdk_extension_aws-2.1.0-py3-none-any.whl", hash = "sha256:c7cf6efc275d2c24108a468d954287ce5aab9733bac816a080cfb3117374e63a", size = 18776, upload-time = "2024-12-24T15:01:56.053Z" }, +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.55b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5d/f0/f33458486da911f47c4aa6db9bda308bb80f3236c111bf848bd870c16b16/opentelemetry_semantic_conventions-0.55b1.tar.gz", hash = "sha256:ef95b1f009159c28d7a7849f5cbc71c4c34c845bb514d66adfdf1b3fff3598b3", size = 119829, upload-time = "2025-06-10T08:55:33.881Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/89/267b0af1b1d0ba828f0e60642b6a5116ac1fd917cde7fc02821627029bd1/opentelemetry_semantic_conventions-0.55b1-py3-none-any.whl", hash = "sha256:5da81dfdf7d52e3d37f8fe88d5e771e191de924cfff5f550ab0b8f7b2409baed", size = 196223, upload-time = "2025-06-10T08:55:17.638Z" }, +] + +[[package]] +name = "opentelemetry-util-http" +version = "0.55b1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/12/f7/3cc23b95921177cdda6d61d3475659b86bac335ed02dd19f994a850ceee3/opentelemetry_util_http-0.55b1.tar.gz", hash = "sha256:29e119c1f6796cccf5fc2aedb55274435cde5976d0ac3fec3ca20a80118f821e", size = 8038, upload-time = "2025-06-10T08:58:53.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/0a/49c5464efc0e6f6aa94a9ec054879efe2a59d7c1f6aacc500665b3d8afdc/opentelemetry_util_http-0.55b1-py3-none-any.whl", hash = "sha256:e134218df8ff010e111466650e5f019496b29c3b4f1b7de0e8ff8ebeafeebdf4", size = 7299, upload-time = "2025-06-10T08:58:11.785Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + [[package]] name = "pandas" -version = "2.3.1" -source = { registry = "https://pypi.org/simple/" } +version = "2.3.3" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, { name = "python-dateutil" }, { name = "pytz" }, { name = "tzdata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d1/6f/75aa71f8a14267117adeeed5d21b204770189c0a0025acbdc03c337b28fc/pandas-2.3.1.tar.gz", hash = "sha256:0a95b9ac964fe83ce317827f80304d37388ea77616b1425f0ae41c9d2d0d7bb2", size = 4487493, upload-time = "2025-07-07T19:20:04.079Z" } +sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/32/ed/ff0a67a2c5505e1854e6715586ac6693dd860fbf52ef9f81edee200266e7/pandas-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9026bd4a80108fac2239294a15ef9003c4ee191a0f64b90f170b40cfb7cf2d22", size = 11531393, upload-time = "2025-07-07T19:19:12.245Z" }, - { url = "https://files.pythonhosted.org/packages/c7/db/d8f24a7cc9fb0972adab0cc80b6817e8bef888cfd0024eeb5a21c0bb5c4a/pandas-2.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6de8547d4fdb12421e2d047a2c446c623ff4c11f47fddb6b9169eb98ffba485a", size = 10668750, upload-time = "2025-07-07T19:19:14.612Z" }, - { url = "https://files.pythonhosted.org/packages/0f/b0/80f6ec783313f1e2356b28b4fd8d2148c378370045da918c73145e6aab50/pandas-2.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:782647ddc63c83133b2506912cc6b108140a38a37292102aaa19c81c83db2928", size = 11342004, upload-time = "2025-07-07T19:19:16.857Z" }, - { url = "https://files.pythonhosted.org/packages/e9/e2/20a317688435470872885e7fc8f95109ae9683dec7c50be29b56911515a5/pandas-2.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba6aff74075311fc88504b1db890187a3cd0f887a5b10f5525f8e2ef55bfdb9", size = 12050869, upload-time = "2025-07-07T19:19:19.265Z" }, - { url = "https://files.pythonhosted.org/packages/55/79/20d746b0a96c67203a5bee5fb4e00ac49c3e8009a39e1f78de264ecc5729/pandas-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e5635178b387bd2ba4ac040f82bc2ef6e6b500483975c4ebacd34bec945fda12", size = 12750218, upload-time = "2025-07-07T19:19:21.547Z" }, - { url = "https://files.pythonhosted.org/packages/7c/0f/145c8b41e48dbf03dd18fdd7f24f8ba95b8254a97a3379048378f33e7838/pandas-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f3bf5ec947526106399a9e1d26d40ee2b259c66422efdf4de63c848492d91bb", size = 13416763, upload-time = "2025-07-07T19:19:23.939Z" }, - { url = "https://files.pythonhosted.org/packages/b2/c0/54415af59db5cdd86a3d3bf79863e8cc3fa9ed265f0745254061ac09d5f2/pandas-2.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:1c78cf43c8fde236342a1cb2c34bcff89564a7bfed7e474ed2fffa6aed03a956", size = 10987482, upload-time = "2025-07-07T19:19:42.699Z" }, - { url = "https://files.pythonhosted.org/packages/48/64/2fd2e400073a1230e13b8cd604c9bc95d9e3b962e5d44088ead2e8f0cfec/pandas-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8dfc17328e8da77be3cf9f47509e5637ba8f137148ed0e9b5241e1baf526e20a", size = 12029159, upload-time = "2025-07-07T19:19:26.362Z" }, - { url = "https://files.pythonhosted.org/packages/d8/0a/d84fd79b0293b7ef88c760d7dca69828d867c89b6d9bc52d6a27e4d87316/pandas-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ec6c851509364c59a5344458ab935e6451b31b818be467eb24b0fe89bd05b6b9", size = 11393287, upload-time = "2025-07-07T19:19:29.157Z" }, - { url = "https://files.pythonhosted.org/packages/50/ae/ff885d2b6e88f3c7520bb74ba319268b42f05d7e583b5dded9837da2723f/pandas-2.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:911580460fc4884d9b05254b38a6bfadddfcc6aaef856fb5859e7ca202e45275", size = 11309381, upload-time = "2025-07-07T19:19:31.436Z" }, - { url = "https://files.pythonhosted.org/packages/85/86/1fa345fc17caf5d7780d2699985c03dbe186c68fee00b526813939062bb0/pandas-2.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f4d6feeba91744872a600e6edbbd5b033005b431d5ae8379abee5bcfa479fab", size = 11883998, upload-time = "2025-07-07T19:19:34.267Z" }, - { url = "https://files.pythonhosted.org/packages/81/aa/e58541a49b5e6310d89474333e994ee57fea97c8aaa8fc7f00b873059bbf/pandas-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fe37e757f462d31a9cd7580236a82f353f5713a80e059a29753cf938c6775d96", size = 12704705, upload-time = "2025-07-07T19:19:36.856Z" }, - { url = "https://files.pythonhosted.org/packages/d5/f9/07086f5b0f2a19872554abeea7658200824f5835c58a106fa8f2ae96a46c/pandas-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5db9637dbc24b631ff3707269ae4559bce4b7fd75c1c4d7e13f40edc42df4444", size = 13189044, upload-time = "2025-07-07T19:19:39.999Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" }, + { url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" }, + { url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" }, + { url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" }, + { url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" }, + { url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" }, ] [[package]] name = "passlib" version = "1.7.4" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/b6/06/9da9ee59a67fae7761aab3ccc84fa4f3f33f125b370f1ccdb915bf967c11/passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04", size = 689844, upload-time = "2020-10-08T19:00:52.121Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/3b/a4/ab6b7589382ca3df236e03faa71deac88cae040af60c071a78d254a62172/passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1", size = 525554, upload-time = "2020-10-08T19:00:49.856Z" }, @@ -668,46 +997,91 @@ bcrypt = [ ] [[package]] -name = "psycopg2-binary" -version = "2.9.10" -source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764, upload-time = "2024-10-16T11:24:58.126Z" } +name = "pillow" +version = "10.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/74/ad3d526f3bf7b6d3f408b73fde271ec69dfac8b81341a318ce825f2b3812/pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06", size = 46555059, upload-time = "2024-07-01T09:48:43.583Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3e/30/d41d3ba765609c0763505d565c4d12d8f3c79793f0d0f044ff5a28bf395b/psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d", size = 3044699, upload-time = "2024-10-16T11:21:42.841Z" }, - { url = "https://files.pythonhosted.org/packages/35/44/257ddadec7ef04536ba71af6bc6a75ec05c5343004a7ec93006bee66c0bc/psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb", size = 3275245, upload-time = "2024-10-16T11:21:51.989Z" }, - { url = "https://files.pythonhosted.org/packages/1b/11/48ea1cd11de67f9efd7262085588790a95d9dfcd9b8a687d46caf7305c1a/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7", size = 2851631, upload-time = "2024-10-16T11:21:57.584Z" }, - { url = "https://files.pythonhosted.org/packages/62/e0/62ce5ee650e6c86719d621a761fe4bc846ab9eff8c1f12b1ed5741bf1c9b/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d", size = 3082140, upload-time = "2024-10-16T11:22:02.005Z" }, - { url = "https://files.pythonhosted.org/packages/27/ce/63f946c098611f7be234c0dd7cb1ad68b0b5744d34f68062bb3c5aa510c8/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73", size = 3264762, upload-time = "2024-10-16T11:22:06.412Z" }, - { url = "https://files.pythonhosted.org/packages/43/25/c603cd81402e69edf7daa59b1602bd41eb9859e2824b8c0855d748366ac9/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673", size = 3020967, upload-time = "2024-10-16T11:22:11.583Z" }, - { url = "https://files.pythonhosted.org/packages/5f/d6/8708d8c6fca531057fa170cdde8df870e8b6a9b136e82b361c65e42b841e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f", size = 2872326, upload-time = "2024-10-16T11:22:16.406Z" }, - { url = "https://files.pythonhosted.org/packages/ce/ac/5b1ea50fc08a9df82de7e1771537557f07c2632231bbab652c7e22597908/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909", size = 2822712, upload-time = "2024-10-16T11:22:21.366Z" }, - { url = "https://files.pythonhosted.org/packages/c4/fc/504d4503b2abc4570fac3ca56eb8fed5e437bf9c9ef13f36b6621db8ef00/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1", size = 2920155, upload-time = "2024-10-16T11:22:25.684Z" }, - { url = "https://files.pythonhosted.org/packages/b2/d1/323581e9273ad2c0dbd1902f3fb50c441da86e894b6e25a73c3fda32c57e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567", size = 2959356, upload-time = "2024-10-16T11:22:30.562Z" }, - { url = "https://files.pythonhosted.org/packages/08/50/d13ea0a054189ae1bc21af1d85b6f8bb9bbc5572991055d70ad9006fe2d6/psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142", size = 2569224, upload-time = "2025-01-04T20:09:19.234Z" }, + { url = "https://files.pythonhosted.org/packages/05/cb/0353013dc30c02a8be34eb91d25e4e4cf594b59e5a55ea1128fde1e5f8ea/pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94", size = 3509350, upload-time = "2024-07-01T09:46:17.177Z" }, + { url = "https://files.pythonhosted.org/packages/e7/cf/5c558a0f247e0bf9cec92bff9b46ae6474dd736f6d906315e60e4075f737/pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597", size = 3374980, upload-time = "2024-07-01T09:46:19.169Z" }, + { url = "https://files.pythonhosted.org/packages/84/48/6e394b86369a4eb68b8a1382c78dc092245af517385c086c5094e3b34428/pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80", size = 4343799, upload-time = "2024-07-01T09:46:21.883Z" }, + { url = "https://files.pythonhosted.org/packages/3b/f3/a8c6c11fa84b59b9df0cd5694492da8c039a24cd159f0f6918690105c3be/pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca", size = 4459973, upload-time = "2024-07-01T09:46:24.321Z" }, + { url = "https://files.pythonhosted.org/packages/7d/1b/c14b4197b80150fb64453585247e6fb2e1d93761fa0fa9cf63b102fde822/pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef", size = 4370054, upload-time = "2024-07-01T09:46:26.825Z" }, + { url = "https://files.pythonhosted.org/packages/55/77/40daddf677897a923d5d33329acd52a2144d54a9644f2a5422c028c6bf2d/pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a", size = 4539484, upload-time = "2024-07-01T09:46:29.355Z" }, + { url = "https://files.pythonhosted.org/packages/40/54/90de3e4256b1207300fb2b1d7168dd912a2fb4b2401e439ba23c2b2cabde/pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b", size = 4477375, upload-time = "2024-07-01T09:46:31.756Z" }, + { url = "https://files.pythonhosted.org/packages/13/24/1bfba52f44193860918ff7c93d03d95e3f8748ca1de3ceaf11157a14cf16/pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9", size = 4608773, upload-time = "2024-07-01T09:46:33.73Z" }, + { url = "https://files.pythonhosted.org/packages/55/04/5e6de6e6120451ec0c24516c41dbaf80cce1b6451f96561235ef2429da2e/pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42", size = 2235690, upload-time = "2024-07-01T09:46:36.587Z" }, + { url = "https://files.pythonhosted.org/packages/74/0a/d4ce3c44bca8635bd29a2eab5aa181b654a734a29b263ca8efe013beea98/pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a", size = 2554951, upload-time = "2024-07-01T09:46:38.777Z" }, + { url = "https://files.pythonhosted.org/packages/b5/ca/184349ee40f2e92439be9b3502ae6cfc43ac4b50bc4fc6b3de7957563894/pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9", size = 2243427, upload-time = "2024-07-01T09:46:43.15Z" }, +] + +[[package]] +name = "protobuf" +version = "5.29.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/29/d09e70352e4e88c9c7a198d5645d7277811448d76c23b00345670f7c8a38/protobuf-5.29.5.tar.gz", hash = "sha256:bc1463bafd4b0929216c35f437a8e28731a2b7fe3d98bb77a600efced5a15c84", size = 425226, upload-time = "2025-05-28T23:51:59.82Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/11/6e40e9fc5bba02988a214c07cf324595789ca7820160bfd1f8be96e48539/protobuf-5.29.5-cp310-abi3-win32.whl", hash = "sha256:3f1c6468a2cfd102ff4703976138844f78ebd1fb45f49011afc5139e9e283079", size = 422963, upload-time = "2025-05-28T23:51:41.204Z" }, + { url = "https://files.pythonhosted.org/packages/81/7f/73cefb093e1a2a7c3ffd839e6f9fcafb7a427d300c7f8aef9c64405d8ac6/protobuf-5.29.5-cp310-abi3-win_amd64.whl", hash = "sha256:3f76e3a3675b4a4d867b52e4a5f5b78a2ef9565549d4037e06cf7b0942b1d3fc", size = 434818, upload-time = "2025-05-28T23:51:44.297Z" }, + { url = "https://files.pythonhosted.org/packages/dd/73/10e1661c21f139f2c6ad9b23040ff36fee624310dc28fba20d33fdae124c/protobuf-5.29.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e38c5add5a311f2a6eb0340716ef9b039c1dfa428b28f25a7838ac329204a671", size = 418091, upload-time = "2025-05-28T23:51:45.907Z" }, + { url = "https://files.pythonhosted.org/packages/6c/04/98f6f8cf5b07ab1294c13f34b4e69b3722bb609c5b701d6c169828f9f8aa/protobuf-5.29.5-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:fa18533a299d7ab6c55a238bf8629311439995f2e7eca5caaff08663606e9015", size = 319824, upload-time = "2025-05-28T23:51:47.545Z" }, + { url = "https://files.pythonhosted.org/packages/85/e4/07c80521879c2d15f321465ac24c70efe2381378c00bf5e56a0f4fbac8cd/protobuf-5.29.5-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:63848923da3325e1bf7e9003d680ce6e14b07e55d0473253a690c3a8b8fd6e61", size = 319942, upload-time = "2025-05-28T23:51:49.11Z" }, + { url = "https://files.pythonhosted.org/packages/7e/cc/7e77861000a0691aeea8f4566e5d3aa716f2b1dece4a24439437e41d3d25/protobuf-5.29.5-py3-none-any.whl", hash = "sha256:6cf42630262c59b2d8de33954443d94b746c952b01434fc58a417fdbd2e84bd5", size = 172823, upload-time = "2025-05-28T23:51:58.157Z" }, +] + +[[package]] +name = "psutil" +version = "5.9.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/90/c7/6dc0a455d111f68ee43f27793971cf03fe29b6ef972042549db29eec39a2/psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c", size = 503247, upload-time = "2024-01-19T20:47:09.517Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/e3/07ae864a636d70a8a6f58da27cb1179192f1140d5d1da10886ade9405797/psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81", size = 248702, upload-time = "2024-01-19T20:47:36.303Z" }, + { url = "https://files.pythonhosted.org/packages/b3/bd/28c5f553667116b2598b9cc55908ec435cb7f77a34f2bff3e3ca765b0f78/psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421", size = 285242, upload-time = "2024-01-19T20:47:39.65Z" }, + { url = "https://files.pythonhosted.org/packages/c5/4f/0e22aaa246f96d6ac87fe5ebb9c5a693fbe8877f537a1022527c47ca43c5/psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4", size = 288191, upload-time = "2024-01-19T20:47:43.078Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f5/2aa3a4acdc1e5940b59d421742356f133185667dd190b166dbcfcf5d7b43/psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0", size = 251252, upload-time = "2024-01-19T20:47:52.88Z" }, + { url = "https://files.pythonhosted.org/packages/93/52/3e39d26feae7df0aa0fd510b14012c3678b36ed068f7d78b8d8784d61f0e/psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf", size = 255090, upload-time = "2024-01-19T20:47:56.019Z" }, + { url = "https://files.pythonhosted.org/packages/05/33/2d74d588408caedd065c2497bdb5ef83ce6082db01289a1e1147f6639802/psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8", size = 249898, upload-time = "2024-01-19T20:47:59.238Z" }, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, + { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, + { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, + { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, + { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, + { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, + { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, + { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, ] [[package]] name = "pycparser" -version = "2.22" -source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } +version = "2.23" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, ] [[package]] name = "pydantic" -version = "2.11.7" -source = { registry = "https://pypi.org/simple/" } +version = "2.12.3" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-types" }, { name = "pydantic-core" }, { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/00/dd/4325abf92c39ba8623b5af936ddb36ffcfe0beae70405d456ab1fb2f5b8c/pydantic-2.11.7.tar.gz", hash = "sha256:d989c3c6cb79469287b1569f7447a17848c998458d49ebe294e975b9baf0f0db", size = 788350, upload-time = "2025-06-14T08:33:17.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/1e/4f0a3233767010308f2fd6bd0814597e3f63f1dc98304a9112b8759df4ff/pydantic-2.12.3.tar.gz", hash = "sha256:1da1c82b0fc140bb0103bc1441ffe062154c8d38491189751ee00fd8ca65ce74", size = 819383, upload-time = "2025-10-17T15:04:21.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/c0/ec2b1c8712ca690e5d61979dee872603e92b8a32f94cc1b72d53beab008a/pydantic-2.11.7-py3-none-any.whl", hash = "sha256:dde5df002701f6de26248661f6835bbe296a47bf73990135c7d07ce741b9623b", size = 444782, upload-time = "2025-06-14T08:33:14.905Z" }, + { url = "https://files.pythonhosted.org/packages/a1/6b/83661fa77dcefa195ad5f8cd9af3d1a7450fd57cc883ad04d65446ac2029/pydantic-2.12.3-py3-none-any.whl", hash = "sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf", size = 462431, upload-time = "2025-10-17T15:04:19.346Z" }, ] [package.optional-dependencies] @@ -717,36 +1091,37 @@ email = [ [[package]] name = "pydantic-core" -version = "2.33.2" -source = { registry = "https://pypi.org/simple/" } +version = "2.41.4" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" } +sdist = { url = "https://files.pythonhosted.org/packages/df/18/d0944e8eaaa3efd0a91b0f1fc537d3be55ad35091b6a87638211ba691964/pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5", size = 457557, upload-time = "2025-10-14T10:23:47.909Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" }, - { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" }, - { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" }, - { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" }, - { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" }, - { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" }, - { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" }, - { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" }, - { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" }, - { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" }, - { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" }, - { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" }, - { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" }, - { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" }, - { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" }, + { url = "https://files.pythonhosted.org/packages/e9/81/d3b3e95929c4369d30b2a66a91db63c8ed0a98381ae55a45da2cd1cc1288/pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887", size = 2099043, upload-time = "2025-10-14T10:20:28.561Z" }, + { url = "https://files.pythonhosted.org/packages/58/da/46fdac49e6717e3a94fc9201403e08d9d61aa7a770fab6190b8740749047/pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2", size = 1910699, upload-time = "2025-10-14T10:20:30.217Z" }, + { url = "https://files.pythonhosted.org/packages/1e/63/4d948f1b9dd8e991a5a98b77dd66c74641f5f2e5225fee37994b2e07d391/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999", size = 1952121, upload-time = "2025-10-14T10:20:32.246Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a7/e5fc60a6f781fc634ecaa9ecc3c20171d238794cef69ae0af79ac11b89d7/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4", size = 2041590, upload-time = "2025-10-14T10:20:34.332Z" }, + { url = "https://files.pythonhosted.org/packages/70/69/dce747b1d21d59e85af433428978a1893c6f8a7068fa2bb4a927fba7a5ff/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f", size = 2219869, upload-time = "2025-10-14T10:20:35.965Z" }, + { url = "https://files.pythonhosted.org/packages/83/6a/c070e30e295403bf29c4df1cb781317b6a9bac7cd07b8d3acc94d501a63c/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b", size = 2345169, upload-time = "2025-10-14T10:20:37.627Z" }, + { url = "https://files.pythonhosted.org/packages/f0/83/06d001f8043c336baea7fd202a9ac7ad71f87e1c55d8112c50b745c40324/pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47", size = 2070165, upload-time = "2025-10-14T10:20:39.246Z" }, + { url = "https://files.pythonhosted.org/packages/14/0a/e567c2883588dd12bcbc110232d892cf385356f7c8a9910311ac997ab715/pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970", size = 2189067, upload-time = "2025-10-14T10:20:41.015Z" }, + { url = "https://files.pythonhosted.org/packages/f4/1d/3d9fca34273ba03c9b1c5289f7618bc4bd09c3ad2289b5420481aa051a99/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed", size = 2132997, upload-time = "2025-10-14T10:20:43.106Z" }, + { url = "https://files.pythonhosted.org/packages/52/70/d702ef7a6cd41a8afc61f3554922b3ed8d19dd54c3bd4bdbfe332e610827/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8", size = 2307187, upload-time = "2025-10-14T10:20:44.849Z" }, + { url = "https://files.pythonhosted.org/packages/68/4c/c06be6e27545d08b802127914156f38d10ca287a9e8489342793de8aae3c/pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431", size = 2305204, upload-time = "2025-10-14T10:20:46.781Z" }, + { url = "https://files.pythonhosted.org/packages/b0/e5/35ae4919bcd9f18603419e23c5eaf32750224a89d41a8df1a3704b69f77e/pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd", size = 1972536, upload-time = "2025-10-14T10:20:48.39Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/49c5bb6d2a49eb2ee3647a93e3dae7080c6409a8a7558b075027644e879c/pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff", size = 2031132, upload-time = "2025-10-14T10:20:50.421Z" }, + { url = "https://files.pythonhosted.org/packages/06/23/936343dbcba6eec93f73e95eb346810fc732f71ba27967b287b66f7b7097/pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8", size = 1969483, upload-time = "2025-10-14T10:20:52.35Z" }, + { url = "https://files.pythonhosted.org/packages/c4/48/ae937e5a831b7c0dc646b2ef788c27cd003894882415300ed21927c21efa/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537", size = 2112087, upload-time = "2025-10-14T10:22:56.818Z" }, + { url = "https://files.pythonhosted.org/packages/5e/db/6db8073e3d32dae017da7e0d16a9ecb897d0a4d92e00634916e486097961/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94", size = 1920387, upload-time = "2025-10-14T10:22:59.342Z" }, + { url = "https://files.pythonhosted.org/packages/0d/c1/dd3542d072fcc336030d66834872f0328727e3b8de289c662faa04aa270e/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c", size = 1951495, upload-time = "2025-10-14T10:23:02.089Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c6/db8d13a1f8ab3f1eb08c88bd00fd62d44311e3456d1e85c0e59e0a0376e7/pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335", size = 2139008, upload-time = "2025-10-14T10:23:04.539Z" }, ] [[package]] name = "pydantic-i18n" version = "0.4.5" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, ] @@ -755,10 +1130,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7e/3b/4d2630503016cedef1751bc9ddea85b437fbfc9ca65d6af87285d76b7c2c/pydantic_i18n-0.4.5-py3-none-any.whl", hash = "sha256:592ae6b4fee13eb0193dc0c7bdc1e629d2ab1d732d5508368412a338b16cfece", size = 10436, upload-time = "2024-09-22T15:29:38.397Z" }, ] +[[package]] +name = "pydantic-settings" +version = "2.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/20/c5/dbbc27b814c71676593d1c3f718e6cd7d4f00652cefa24b75f7aa3efb25e/pydantic_settings-2.11.0.tar.gz", hash = "sha256:d0e87a1c7d33593beb7194adb8470fc426e95ba02af83a0f23474a04c9a08180", size = 188394, upload-time = "2025-09-24T14:19:11.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/d6/887a1ff844e64aa823fb4905978d882a633cfe295c32eacad582b78a7d8b/pydantic_settings-2.11.0-py3-none-any.whl", hash = "sha256:fe2cea3413b9530d10f3a5875adffb17ada5c1e1bab0b2885546d7310415207c", size = 48608, upload-time = "2025-09-24T14:19:10.015Z" }, +] + [[package]] name = "pydash" version = "8.0.5" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] @@ -770,7 +1159,7 @@ wheels = [ [[package]] name = "pyjwt" version = "2.10.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, @@ -779,7 +1168,7 @@ wheels = [ [[package]] name = "python-dateutil" version = "2.9.0.post0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] @@ -791,7 +1180,7 @@ wheels = [ [[package]] name = "python-dotenv" version = "1.1.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" }, @@ -800,7 +1189,7 @@ wheels = [ [[package]] name = "python-multipart" version = "0.0.20" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, @@ -809,31 +1198,129 @@ wheels = [ [[package]] name = "pytz" version = "2025.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, ] +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, +] + +[[package]] +name = "referencing" +version = "0.37.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, +] + +[[package]] +name = "regex" +version = "2025.10.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/90/f2/97d95db85e11cc85f97581cfc8b4a0405c7fb6099003c23ffaaa0cb4f31d/regex-2025.10.22.tar.gz", hash = "sha256:cc50db098b9d678ace33176a3ab4099616726ae4680fee6ac292302e8950fc4c", size = 400985, upload-time = "2025-10-21T00:48:37.365Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/a8/3380a8cb20c255878a9f1165b33c4d6a31d8f5417650c22b73bdcaadd281/regex-2025.10.22-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8b66971471306def7e6baf18ead3f416347d56eb5e295f8a75014d13be92e9fd", size = 489185, upload-time = "2025-10-21T00:45:52.929Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1c/e1eb33fc1f3a7851cc0f53b588790e14edeeb618e80fd5fd7ea987f9957d/regex-2025.10.22-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8c93b179960f4f2f517fe47da9984848d8342a6903b4d24649f4ee9bd22ccd3c", size = 291124, upload-time = "2025-10-21T00:45:54.934Z" }, + { url = "https://files.pythonhosted.org/packages/1b/21/6cc0fe9d4ebd7d6e19c08e77f41082103d52c671eb7eb01cc032e9bccbd4/regex-2025.10.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9b4fa8d221b5db3226029978c8c3f66f2e4c6d871e94b726bcd357e746b7a63", size = 288796, upload-time = "2025-10-21T00:45:56.248Z" }, + { url = "https://files.pythonhosted.org/packages/23/b0/d74069acbcc60b54977e693dd673099352b024f7f037cec201b0d96b7d99/regex-2025.10.22-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2a0d4e5f63c8de13fbab94d4a25cc6b02f1007b84e2d4c74f48c242eacb06f1", size = 798441, upload-time = "2025-10-21T00:45:57.896Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f3/69cd09c226ce0fc6a5cf48b5dea716c0139abed41d02fa81fa774e56e713/regex-2025.10.22-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d8df6c82c544eed8314667a1fb8f705a9a802a9d6368045354319588ff56708d", size = 864038, upload-time = "2025-10-21T00:46:00.298Z" }, + { url = "https://files.pythonhosted.org/packages/8e/b0/77bd0e6838f579cc5a02b9e18bc0a759d0ed85b9a8d4d44ad6d3478a40ec/regex-2025.10.22-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a114c2735369334a755a844abd15d5a12716635cc4677fb4e6d793ce369310f6", size = 912054, upload-time = "2025-10-21T00:46:02.358Z" }, + { url = "https://files.pythonhosted.org/packages/2d/41/c320c3408050eefa516d352d9e05fd4d6af5da7ec0daea56d1e68bb9096c/regex-2025.10.22-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5d53115edada199723b831a49c7e1585ddda7940fb2ba7a78d12bf22e92f23e2", size = 803374, upload-time = "2025-10-21T00:46:03.837Z" }, + { url = "https://files.pythonhosted.org/packages/88/ed/0942c27223ce6bff95087f4859991634d995d6e186807e038fd1c2c3759c/regex-2025.10.22-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b4a7d813fdffe99ae0ecc17c80f652c8946c05a6a090eb2560719d02dfdb4b0", size = 787714, upload-time = "2025-10-21T00:46:05.934Z" }, + { url = "https://files.pythonhosted.org/packages/1c/40/10e2657ed24966742efd68eeb566e26af1eea3925dfe761ce14260a69161/regex-2025.10.22-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:81fb24976e3f71d765edec8a3175abb10359918d8997ca6a756fd68dd3c051f6", size = 858392, upload-time = "2025-10-21T00:46:07.801Z" }, + { url = "https://files.pythonhosted.org/packages/f3/48/bd382281e2f3bcfc2f355b5283ef16d8175b6df4cb6ed532529b715baf07/regex-2025.10.22-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d881e96a443528a83f46ab69714befeb35f4d0caf359c43a606b82cb717a5df9", size = 850482, upload-time = "2025-10-21T00:46:09.893Z" }, + { url = "https://files.pythonhosted.org/packages/2e/5c/fdc0ac5eb3f21a6f19158cce3150e57a65d9770709b8521e09fe9febe813/regex-2025.10.22-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:42abc81ee54e06bef4dbc8e7b8394a57882c718ed3c6aabfea47e429feb94ee9", size = 789633, upload-time = "2025-10-21T00:46:11.687Z" }, + { url = "https://files.pythonhosted.org/packages/a2/ef/c2e63968c9130a17d79431ba8aa98ada02962435436ef506fb4cef139760/regex-2025.10.22-cp312-cp312-win32.whl", hash = "sha256:db30ab87b3d745b7e95e69099e1c4bf544c3f3800b9376b935943e86f650705a", size = 266060, upload-time = "2025-10-21T00:46:13.577Z" }, + { url = "https://files.pythonhosted.org/packages/5d/9d/57bc04978add42a62391f8082e94ec3a8c3448d49e349ede8c2c66ca0a55/regex-2025.10.22-cp312-cp312-win_amd64.whl", hash = "sha256:64190fa0432ed254416898ff3b687648e025445bfa357988f20f1332f651f650", size = 276928, upload-time = "2025-10-21T00:46:15.18Z" }, + { url = "https://files.pythonhosted.org/packages/89/50/760700909a618de1c2405f3a0557a3ec9b4eba516a261aa85fe973d3a354/regex-2025.10.22-cp312-cp312-win_arm64.whl", hash = "sha256:cdfc74d0af9b0cb9bd442619489582b32efc348db651a44967ba5fb71b8d3dee", size = 270103, upload-time = "2025-10-21T00:46:16.903Z" }, +] + [[package]] name = "requests" -version = "2.32.4" -source = { registry = "https://pypi.org/simple/" } +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "charset-normalizer" }, { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" }, + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "rpds-py" +version = "0.27.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/dd/2c0cbe774744272b0ae725f44032c77bdcab6e8bcf544bffa3b6e70c8dba/rpds_py-0.27.1.tar.gz", hash = "sha256:26a1c73171d10b7acccbded82bf6a586ab8203601e565badc74bbbf8bc5a10f8", size = 27479, upload-time = "2025-08-27T12:16:36.024Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/fe/38de28dee5df58b8198c743fe2bea0c785c6d40941b9950bac4cdb71a014/rpds_py-0.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ae2775c1973e3c30316892737b91f9283f9908e3cc7625b9331271eaaed7dc90", size = 361887, upload-time = "2025-08-27T12:13:10.233Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/4b6c7eedc7dd90986bf0fab6ea2a091ec11c01b15f8ba0a14d3f80450468/rpds_py-0.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2643400120f55c8a96f7c9d858f7be0c88d383cd4653ae2cf0d0c88f668073e5", size = 345795, upload-time = "2025-08-27T12:13:11.65Z" }, + { url = "https://files.pythonhosted.org/packages/6f/0e/e650e1b81922847a09cca820237b0edee69416a01268b7754d506ade11ad/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:16323f674c089b0360674a4abd28d5042947d54ba620f72514d69be4ff64845e", size = 385121, upload-time = "2025-08-27T12:13:13.008Z" }, + { url = "https://files.pythonhosted.org/packages/1b/ea/b306067a712988e2bff00dcc7c8f31d26c29b6d5931b461aa4b60a013e33/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a1f4814b65eacac94a00fc9a526e3fdafd78e439469644032032d0d63de4881", size = 398976, upload-time = "2025-08-27T12:13:14.368Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0a/26dc43c8840cb8fe239fe12dbc8d8de40f2365e838f3d395835dde72f0e5/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ba32c16b064267b22f1850a34051121d423b6f7338a12b9459550eb2096e7ec", size = 525953, upload-time = "2025-08-27T12:13:15.774Z" }, + { url = "https://files.pythonhosted.org/packages/22/14/c85e8127b573aaf3a0cbd7fbb8c9c99e735a4a02180c84da2a463b766e9e/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5c20f33fd10485b80f65e800bbe5f6785af510b9f4056c5a3c612ebc83ba6cb", size = 407915, upload-time = "2025-08-27T12:13:17.379Z" }, + { url = "https://files.pythonhosted.org/packages/ed/7b/8f4fee9ba1fb5ec856eb22d725a4efa3deb47f769597c809e03578b0f9d9/rpds_py-0.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:466bfe65bd932da36ff279ddd92de56b042f2266d752719beb97b08526268ec5", size = 386883, upload-time = "2025-08-27T12:13:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/86/47/28fa6d60f8b74fcdceba81b272f8d9836ac0340570f68f5df6b41838547b/rpds_py-0.27.1-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:41e532bbdcb57c92ba3be62c42e9f096431b4cf478da9bc3bc6ce5c38ab7ba7a", size = 405699, upload-time = "2025-08-27T12:13:20.089Z" }, + { url = "https://files.pythonhosted.org/packages/d0/fd/c5987b5e054548df56953a21fe2ebed51fc1ec7c8f24fd41c067b68c4a0a/rpds_py-0.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f149826d742b406579466283769a8ea448eed82a789af0ed17b0cd5770433444", size = 423713, upload-time = "2025-08-27T12:13:21.436Z" }, + { url = "https://files.pythonhosted.org/packages/ac/ba/3c4978b54a73ed19a7d74531be37a8bcc542d917c770e14d372b8daea186/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80c60cfb5310677bd67cb1e85a1e8eb52e12529545441b43e6f14d90b878775a", size = 562324, upload-time = "2025-08-27T12:13:22.789Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6c/6943a91768fec16db09a42b08644b960cff540c66aab89b74be6d4a144ba/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7ee6521b9baf06085f62ba9c7a3e5becffbc32480d2f1b351559c001c38ce4c1", size = 593646, upload-time = "2025-08-27T12:13:24.122Z" }, + { url = "https://files.pythonhosted.org/packages/11/73/9d7a8f4be5f4396f011a6bb7a19fe26303a0dac9064462f5651ced2f572f/rpds_py-0.27.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a512c8263249a9d68cac08b05dd59d2b3f2061d99b322813cbcc14c3c7421998", size = 558137, upload-time = "2025-08-27T12:13:25.557Z" }, + { url = "https://files.pythonhosted.org/packages/6e/96/6772cbfa0e2485bcceef8071de7821f81aeac8bb45fbfd5542a3e8108165/rpds_py-0.27.1-cp312-cp312-win32.whl", hash = "sha256:819064fa048ba01b6dadc5116f3ac48610435ac9a0058bbde98e569f9e785c39", size = 221343, upload-time = "2025-08-27T12:13:26.967Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/c82f0faa9af1c6a64669f73a17ee0eeef25aff30bb9a1c318509efe45d84/rpds_py-0.27.1-cp312-cp312-win_amd64.whl", hash = "sha256:d9199717881f13c32c4046a15f024971a3b78ad4ea029e8da6b86e5aa9cf4594", size = 232497, upload-time = "2025-08-27T12:13:28.326Z" }, + { url = "https://files.pythonhosted.org/packages/e1/96/2817b44bd2ed11aebacc9251da03689d56109b9aba5e311297b6902136e2/rpds_py-0.27.1-cp312-cp312-win_arm64.whl", hash = "sha256:33aa65b97826a0e885ef6e278fbd934e98cdcfed80b63946025f01e2f5b29502", size = 222790, upload-time = "2025-08-27T12:13:29.71Z" }, +] + +[[package]] +name = "s3transfer" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "botocore" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/74/8d69dcb7a9efe8baa2046891735e5dfe433ad558ae23d9e3c14c633d1d58/s3transfer-0.14.0.tar.gz", hash = "sha256:eff12264e7c8b4985074ccce27a3b38a485bb7f7422cc8046fee9be4983e4125", size = 151547, upload-time = "2025-09-09T19:23:31.089Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/f0/ae7ca09223a81a1d890b2557186ea015f6e0502e9b8cb8e1813f1d8cfa4e/s3transfer-0.14.0-py3-none-any.whl", hash = "sha256:ea3b790c7077558ed1f02a3072fb3cb992bbbd253392f4b6e9e8976941c7d456", size = 85712, upload-time = "2025-09-09T19:23:30.041Z" }, ] [[package]] name = "six" version = "1.17.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, @@ -842,7 +1329,7 @@ wheels = [ [[package]] name = "sniffio" version = "1.3.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, @@ -851,7 +1338,7 @@ wheels = [ [[package]] name = "sqids" version = "0.5.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/33/5b/98c1b37109210631875092d9e7cb7aef3fda2f03780dd999fe5854afa5f3/sqids-0.5.2.tar.gz", hash = "sha256:5ac08f0c5c9b6814bc2e7c79ee5931e0849d25d95c50e415771b022a44f58af9", size = 18213, upload-time = "2025-05-13T16:36:35.644Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/7c/96/178018f3d5b871042e257e9e1db26c6aeb2a704e72cdc884cd2a8918ac2b/sqids-0.5.2-py3-none-any.whl", hash = "sha256:0089ba823e21fd44290c7225f02fb0b5140c36e41959c04d86d3f6f2513799be", size = 8870, upload-time = "2025-05-13T16:36:34.072Z" }, @@ -859,66 +1346,98 @@ wheels = [ [[package]] name = "sqlalchemy" -version = "2.0.42" -source = { registry = "https://pypi.org/simple/" } +version = "2.0.44" +source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" }, + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/03/a0af991e3a43174d6b83fca4fb399745abceddd1171bdabae48ce877ff47/sqlalchemy-2.0.42.tar.gz", hash = "sha256:160bedd8a5c28765bd5be4dec2d881e109e33b34922e50a3b881a7681773ac5f", size = 9749972, upload-time = "2025-07-29T12:48:09.323Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/f2/840d7b9496825333f532d2e3976b8eadbf52034178aac53630d09fe6e1ef/sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22", size = 9819830, upload-time = "2025-10-10T14:39:12.935Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/7e/25d8c28b86730c9fb0e09156f601d7a96d1c634043bf8ba36513eb78887b/sqlalchemy-2.0.42-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:941804f55c7d507334da38133268e3f6e5b0340d584ba0f277dd884197f4ae8c", size = 2127905, upload-time = "2025-07-29T13:29:22.249Z" }, - { url = "https://files.pythonhosted.org/packages/e5/a1/9d8c93434d1d983880d976400fcb7895a79576bd94dca61c3b7b90b1ed0d/sqlalchemy-2.0.42-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:95d3d06a968a760ce2aa6a5889fefcbdd53ca935735e0768e1db046ec08cbf01", size = 2115726, upload-time = "2025-07-29T13:29:23.496Z" }, - { url = "https://files.pythonhosted.org/packages/a2/cc/d33646fcc24c87cc4e30a03556b611a4e7bcfa69a4c935bffb923e3c89f4/sqlalchemy-2.0.42-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cf10396a8a700a0f38ccd220d940be529c8f64435c5d5b29375acab9267a6c9", size = 3246007, upload-time = "2025-07-29T13:26:44.166Z" }, - { url = "https://files.pythonhosted.org/packages/67/08/4e6c533d4c7f5e7c4cbb6fe8a2c4e813202a40f05700d4009a44ec6e236d/sqlalchemy-2.0.42-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cae6c2b05326d7c2c7c0519f323f90e0fb9e8afa783c6a05bb9ee92a90d0f04", size = 3250919, upload-time = "2025-07-29T13:22:33.74Z" }, - { url = "https://files.pythonhosted.org/packages/5c/82/f680e9a636d217aece1b9a8030d18ad2b59b5e216e0c94e03ad86b344af3/sqlalchemy-2.0.42-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f50f7b20677b23cfb35b6afcd8372b2feb348a38e3033f6447ee0704540be894", size = 3180546, upload-time = "2025-07-29T13:26:45.648Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a2/8c8f6325f153894afa3775584c429cc936353fb1db26eddb60a549d0ff4b/sqlalchemy-2.0.42-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d88a1c0d66d24e229e3938e1ef16ebdbd2bf4ced93af6eff55225f7465cf350", size = 3216683, upload-time = "2025-07-29T13:22:34.977Z" }, - { url = "https://files.pythonhosted.org/packages/39/44/3a451d7fa4482a8ffdf364e803ddc2cfcafc1c4635fb366f169ecc2c3b11/sqlalchemy-2.0.42-cp313-cp313-win32.whl", hash = "sha256:45c842c94c9ad546c72225a0c0d1ae8ef3f7c212484be3d429715a062970e87f", size = 2093990, upload-time = "2025-07-29T13:16:13.036Z" }, - { url = "https://files.pythonhosted.org/packages/4b/9e/9bce34f67aea0251c8ac104f7bdb2229d58fb2e86a4ad8807999c4bee34b/sqlalchemy-2.0.42-cp313-cp313-win_amd64.whl", hash = "sha256:eb9905f7f1e49fd57a7ed6269bc567fcbbdac9feadff20ad6bd7707266a91577", size = 2120473, upload-time = "2025-07-29T13:16:14.502Z" }, - { url = "https://files.pythonhosted.org/packages/ee/55/ba2546ab09a6adebc521bf3974440dc1d8c06ed342cceb30ed62a8858835/sqlalchemy-2.0.42-py3-none-any.whl", hash = "sha256:defcdff7e661f0043daa381832af65d616e060ddb54d3fe4476f51df7eaa1835", size = 1922072, upload-time = "2025-07-29T13:09:17.061Z" }, + { url = "https://files.pythonhosted.org/packages/62/c4/59c7c9b068e6813c898b771204aad36683c96318ed12d4233e1b18762164/sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250", size = 2139675, upload-time = "2025-10-10T16:03:31.064Z" }, + { url = "https://files.pythonhosted.org/packages/d6/ae/eeb0920537a6f9c5a3708e4a5fc55af25900216bdb4847ec29cfddf3bf3a/sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29", size = 2127726, upload-time = "2025-10-10T16:03:35.934Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d5/2ebbabe0379418eda8041c06b0b551f213576bfe4c2f09d77c06c07c8cc5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44", size = 3327603, upload-time = "2025-10-10T15:35:28.322Z" }, + { url = "https://files.pythonhosted.org/packages/45/e5/5aa65852dadc24b7d8ae75b7efb8d19303ed6ac93482e60c44a585930ea5/sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1", size = 3337842, upload-time = "2025-10-10T15:43:45.431Z" }, + { url = "https://files.pythonhosted.org/packages/41/92/648f1afd3f20b71e880ca797a960f638d39d243e233a7082c93093c22378/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7", size = 3264558, upload-time = "2025-10-10T15:35:29.93Z" }, + { url = "https://files.pythonhosted.org/packages/40/cf/e27d7ee61a10f74b17740918e23cbc5bc62011b48282170dc4c66da8ec0f/sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d", size = 3301570, upload-time = "2025-10-10T15:43:48.407Z" }, + { url = "https://files.pythonhosted.org/packages/3b/3d/3116a9a7b63e780fb402799b6da227435be878b6846b192f076d2f838654/sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4", size = 2103447, upload-time = "2025-10-10T15:03:21.678Z" }, + { url = "https://files.pythonhosted.org/packages/25/83/24690e9dfc241e6ab062df82cc0df7f4231c79ba98b273fa496fb3dd78ed/sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e", size = 2130912, upload-time = "2025-10-10T15:03:24.656Z" }, + { url = "https://files.pythonhosted.org/packages/9c/5e/6a29fa884d9fb7ddadf6b69490a9d45fded3b38541713010dad16b77d015/sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05", size = 1928718, upload-time = "2025-10-10T15:29:45.32Z" }, ] [[package]] name = "sqlalchemy-utils" -version = "0.41.2" -source = { registry = "https://pypi.org/simple/" } +version = "0.42.0" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4d/bf/abfd5474cdd89ddd36dbbde9c6efba16bfa7f5448913eba946fed14729da/SQLAlchemy-Utils-0.41.2.tar.gz", hash = "sha256:bc599c8c3b3319e53ce6c5c3c471120bd325d0071fb6f38a10e924e3d07b9990", size = 138017, upload-time = "2024-03-24T15:17:28.196Z" } +sdist = { url = "https://files.pythonhosted.org/packages/63/80/4e15fdcfc25a2226122bf316f0ebac86d840ab3fb38b38ca4cabc395865e/sqlalchemy_utils-0.42.0.tar.gz", hash = "sha256:6d1ecd3eed8b941f0faf8a531f5d5cee7cffa2598fcf8163de8c31c7a417a5e0", size = 130531, upload-time = "2025-08-30T18:43:41.904Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/f0/dc4757b83ac1ab853cf222df8535ed73973e0c203d983982ba7b8bc60508/SQLAlchemy_Utils-0.41.2-py3-none-any.whl", hash = "sha256:85cf3842da2bf060760f955f8467b87983fb2e30f1764fd0e24a48307dc8ec6e", size = 93083, upload-time = "2024-03-24T15:17:24.533Z" }, + { url = "https://files.pythonhosted.org/packages/52/86/21e97809b017a4ebc88971eea335130782421851b0ed8dc3ab6126b479f1/sqlalchemy_utils-0.42.0-py3-none-any.whl", hash = "sha256:c8c0b7f00f4734f6f20e9a4d06b39d79d58c8629cba50924fcaeb20e28eb4f48", size = 91744, upload-time = "2025-08-30T18:43:40.199Z" }, ] [[package]] name = "sqlmodel" -version = "0.0.24" -source = { registry = "https://pypi.org/simple/" } +version = "0.0.27" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/86/4b/c2ad0496f5bdc6073d9b4cef52be9c04f2b37a5773441cc6600b1857648b/sqlmodel-0.0.24.tar.gz", hash = "sha256:cc5c7613c1a5533c9c7867e1aab2fd489a76c9e8a061984da11b4e613c182423", size = 116780, upload-time = "2025-03-07T05:43:32.887Z" } +sdist = { url = "https://files.pythonhosted.org/packages/90/5a/693d90866233e837d182da76082a6d4c2303f54d3aaaa5c78e1238c5d863/sqlmodel-0.0.27.tar.gz", hash = "sha256:ad1227f2014a03905aef32e21428640848ac09ff793047744a73dfdd077ff620", size = 118053, upload-time = "2025-10-08T16:39:11.938Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/16/91/484cd2d05569892b7fef7f5ceab3bc89fb0f8a8c0cde1030d383dbc5449c/sqlmodel-0.0.24-py3-none-any.whl", hash = "sha256:6778852f09370908985b667d6a3ab92910d0d5ec88adcaf23dbc242715ff7193", size = 28622, upload-time = "2025-03-07T05:43:30.37Z" }, + { url = "https://files.pythonhosted.org/packages/8c/92/c35e036151fe53822893979f8a13e6f235ae8191f4164a79ae60a95d66aa/sqlmodel-0.0.27-py3-none-any.whl", hash = "sha256:667fe10aa8ff5438134668228dc7d7a08306f4c5c4c7e6ad3ad68defa0e7aa49", size = 29131, upload-time = "2025-10-08T16:39:10.917Z" }, +] + +[[package]] +name = "sse-starlette" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/42/6f/22ed6e33f8a9e76ca0a412405f31abb844b779d52c5f96660766edcd737c/sse_starlette-3.0.2.tar.gz", hash = "sha256:ccd60b5765ebb3584d0de2d7a6e4f745672581de4f5005ab31c3a25d10b52b3a", size = 20985, upload-time = "2025-07-27T09:07:44.565Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/10/c78f463b4ef22eef8491f218f692be838282cd65480f6e423d7730dfd1fb/sse_starlette-3.0.2-py3-none-any.whl", hash = "sha256:16b7cbfddbcd4eaca11f7b586f3b8a080f1afe952c15813455b162edea619e5a", size = 11297, upload-time = "2025-07-27T09:07:43.268Z" }, ] [[package]] name = "starlette" -version = "0.47.2" -source = { registry = "https://pypi.org/simple/" } +version = "0.48.0" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, + { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/04/57/d062573f391d062710d4088fa1369428c38d51460ab6fedff920efef932e/starlette-0.47.2.tar.gz", hash = "sha256:6ae9aa5db235e4846decc1e7b79c4f346adf41e9777aebeb49dfd09bbd7023d8", size = 2583948, upload-time = "2025-07-20T17:31:58.522Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/a5/d6f429d43394057b67a6b5bbe6eae2f77a6bf7459d961fdb224bf206eee6/starlette-0.48.0.tar.gz", hash = "sha256:7e8cee469a8ab2352911528110ce9088fdc6a37d9876926e73da7ce4aa4c7a46", size = 2652949, upload-time = "2025-09-13T08:41:05.699Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f7/1f/b876b1f83aef204198a42dc101613fefccb32258e5428b5f9259677864b4/starlette-0.47.2-py3-none-any.whl", hash = "sha256:c5847e96134e5c5371ee9fac6fdf1a67336d5815e09eb2a01fdb57a351ef915b", size = 72984, upload-time = "2025-07-20T17:31:56.738Z" }, + { url = "https://files.pythonhosted.org/packages/be/72/2db2f49247d0a18b4f1bb9a5a39a0162869acf235f3a96418363947b3d46/starlette-0.48.0-py3-none-any.whl", hash = "sha256:0764ca97b097582558ecb498132ed0c7d942f233f365b86ba37770e026510659", size = 73736, upload-time = "2025-09-13T08:41:03.869Z" }, +] + +[[package]] +name = "tiktoken" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "regex" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/4a/abaec53e93e3ef37224a4dd9e2fc6bb871e7a538c2b6b9d2a6397271daf4/tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6", size = 33437, upload-time = "2024-05-13T18:03:28.793Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/46/4cdda4186ce900608f522da34acf442363346688c71b938a90a52d7b84cc/tiktoken-0.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:71c55d066388c55a9c00f61d2c456a6086673ab7dec22dd739c23f77195b1908", size = 960446, upload-time = "2024-05-13T18:02:54.409Z" }, + { url = "https://files.pythonhosted.org/packages/b6/30/09ced367d280072d7a3e21f34263dfbbf6378661e7a0f6414e7c18971083/tiktoken-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09ed925bccaa8043e34c519fbb2f99110bd07c6fd67714793c21ac298e449410", size = 906652, upload-time = "2024-05-13T18:02:56.25Z" }, + { url = "https://files.pythonhosted.org/packages/e6/7b/c949e4954441a879a67626963dff69096e3c774758b9f2bb0853f7b4e1e7/tiktoken-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03c6c40ff1db0f48a7b4d2dafeae73a5607aacb472fa11f125e7baf9dce73704", size = 1047904, upload-time = "2024-05-13T18:02:57.707Z" }, + { url = "https://files.pythonhosted.org/packages/50/81/1842a22f15586072280364c2ab1e40835adaf64e42fe80e52aff921ee021/tiktoken-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20b5c6af30e621b4aca094ee61777a44118f52d886dbe4f02b70dfe05c15350", size = 1079836, upload-time = "2024-05-13T18:02:59.009Z" }, + { url = "https://files.pythonhosted.org/packages/6d/87/51a133a3d5307cf7ae3754249b0faaa91d3414b85c3d36f80b54d6817aa6/tiktoken-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d427614c3e074004efa2f2411e16c826f9df427d3c70a54725cae860f09e4bf4", size = 1092472, upload-time = "2024-05-13T18:03:00.597Z" }, + { url = "https://files.pythonhosted.org/packages/a5/1f/c93517dc6d3b2c9e988b8e24f87a8b2d4a4ab28920a3a3f3ea338397ae0c/tiktoken-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c46d7af7b8c6987fac9b9f61041b452afe92eb087d29c9ce54951280f899a97", size = 1141881, upload-time = "2024-05-13T18:03:02.743Z" }, + { url = "https://files.pythonhosted.org/packages/bf/4b/48ca098cb580c099b5058bf62c4cb5e90ca6130fa43ef4df27088536245b/tiktoken-0.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0bc603c30b9e371e7c4c7935aba02af5994a909fc3c0fe66e7004070858d3f8f", size = 799281, upload-time = "2024-05-13T18:03:04.036Z" }, ] [[package]] name = "tqdm" version = "4.67.1" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] @@ -928,39 +1447,58 @@ wheels = [ ] [[package]] -name = "types-python-dateutil" -version = "2.9.0.20250708" -source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/c9/95/6bdde7607da2e1e99ec1c1672a759d42f26644bbacf939916e086db34870/types_python_dateutil-2.9.0.20250708.tar.gz", hash = "sha256:ccdbd75dab2d6c9696c350579f34cffe2c281e4c5f27a585b2a2438dd1d5c8ab", size = 15834, upload-time = "2025-07-08T03:14:03.382Z" } +name = "traceroot" +version = "0.0.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-exporter-otlp-proto-grpc" }, + { name = "opentelemetry-exporter-otlp-proto-http" }, + { name = "opentelemetry-instrumentation" }, + { name = "opentelemetry-instrumentation-asgi" }, + { name = "opentelemetry-instrumentation-fastapi" }, + { name = "opentelemetry-propagator-aws-xray" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "opentelemetry-sdk-extension-aws" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "opentelemetry-util-http" }, + { name = "pandas" }, + { name = "pyyaml" }, + { name = "watchtower" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/c0/9f047cc761a9f98a2e7a9a8fef4c01ea1eeb7b2383fe1f3ad82d24ac98b3/traceroot-0.0.7.tar.gz", hash = "sha256:7792def0bb466977318f0126756c02e8950a1c208bcec7a8efed1e05e02b189d", size = 25710, upload-time = "2025-10-16T06:17:39.587Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/72/52/43e70a8e57fefb172c22a21000b03ebcc15e47e97f5cb8495b9c2832efb4/types_python_dateutil-2.9.0.20250708-py3-none-any.whl", hash = "sha256:4d6d0cc1cc4d24a2dc3816024e502564094497b713f7befda4d5bc7a8e3fd21f", size = 17724, upload-time = "2025-07-08T03:14:02.593Z" }, + { url = "https://files.pythonhosted.org/packages/45/59/8593afb3615fb0c2e0cf6888dc49d9ae05d365c76ee053f43a36519f889c/traceroot-0.0.7-py3-none-any.whl", hash = "sha256:2a20a8e2dfa6b10e1f96bc98d5b84dc40c14c01d47098f86068393ece99a2862", size = 24026, upload-time = "2025-10-16T06:17:38.573Z" }, ] [[package]] name = "typing-extensions" -version = "4.14.1" -source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] [[package]] name = "typing-inspection" -version = "0.4.1" -source = { registry = "https://pypi.org/simple/" } +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, ] [[package]] name = "tzdata" version = "2025.2" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, @@ -969,7 +1507,7 @@ wheels = [ [[package]] name = "urllib3" version = "2.5.0" -source = { registry = "https://pypi.org/simple/" } +source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } wheels = [ { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, @@ -977,22 +1515,73 @@ wheels = [ [[package]] name = "uvicorn" -version = "0.35.0" -source = { registry = "https://pypi.org/simple/" } +version = "0.38.0" +source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8d3061399c6a961ffe5fbb7e2aa63c9234df7259e9cd/uvicorn-0.35.0.tar.gz", hash = "sha256:bc662f087f7cf2ce11a1d7fd70b90c9f98ef2e2831556dd078d131b96cc94a01", size = 78473, upload-time = "2025-06-28T16:15:46.058Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/ce/f06b84e2697fef4688ca63bdb2fdf113ca0a3be33f94488f2cadb690b0cf/uvicorn-0.38.0.tar.gz", hash = "sha256:fd97093bdd120a2609fc0d3afe931d4d4ad688b6e75f0f929fde1bc36fe0e91d", size = 80605, upload-time = "2025-10-18T13:46:44.63Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, + { url = "https://files.pythonhosted.org/packages/ee/d9/d88e73ca598f4f6ff671fb5fde8a32925c2e08a637303a1d12883c7305fa/uvicorn-0.38.0-py3-none-any.whl", hash = "sha256:48c0afd214ceb59340075b4a052ea1ee91c16fbc2a9b1469cca0e54566977b02", size = 68109, upload-time = "2025-10-18T13:46:42.958Z" }, ] [[package]] -name = "win32-setctime" -version = "1.2.0" -source = { registry = "https://pypi.org/simple/" } -sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/705086c9d734d3b663af0e9bb3d4de6578d08f46b1b101c2442fd9aecaa2/win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0", size = 4867, upload-time = "2024-12-07T15:28:28.314Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/07/c6fe3ad3e685340704d314d765b7912993bcb8dc198f0e7a89382d37974b/win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390", size = 4083, upload-time = "2024-12-07T15:28:26.465Z" }, +name = "watchtower" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "boto3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f6/e1/40e6940383b7202e7c12343ab58c4a0acd5552217d829a4f0ed19cd9cf0b/watchtower-3.4.0.tar.gz", hash = "sha256:7d3c116aff72a73ce8f6fc0addd1d0daa04d3f9d53d87cedca3a5a65a264bf7d", size = 27128, upload-time = "2025-02-25T15:07:05.374Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/ac/7caa56d4cf82e66b5cdc46d7fa6edd0d4bcd407ec33e46f28a8be83cca28/watchtower-3.4.0-py3-none-any.whl", hash = "sha256:5eac65cbf2a7350bb43c3518485230a6135ed7dec7ccb88468828d68ab9fea26", size = 18022, upload-time = "2025-02-25T15:07:02.851Z" }, +] + +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +] + +[[package]] +name = "wrapt" +version = "1.17.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/8f/aeb76c5b46e273670962298c23e7ddde79916cb74db802131d49a85e4b7d/wrapt-1.17.3.tar.gz", hash = "sha256:f66eb08feaa410fe4eebd17f2a2c8e2e46d3476e9f8c783daa8e09e0faa666d0", size = 55547, upload-time = "2025-08-12T05:53:21.714Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/41/cad1aba93e752f1f9268c77270da3c469883d56e2798e7df6240dcb2287b/wrapt-1.17.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ab232e7fdb44cdfbf55fc3afa31bcdb0d8980b9b95c38b6405df2acb672af0e0", size = 53998, upload-time = "2025-08-12T05:51:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/60/f8/096a7cc13097a1869fe44efe68dace40d2a16ecb853141394047f0780b96/wrapt-1.17.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9baa544e6acc91130e926e8c802a17f3b16fbea0fd441b5a60f5cf2cc5c3deba", size = 39020, upload-time = "2025-08-12T05:51:35.906Z" }, + { url = "https://files.pythonhosted.org/packages/33/df/bdf864b8997aab4febb96a9ae5c124f700a5abd9b5e13d2a3214ec4be705/wrapt-1.17.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6b538e31eca1a7ea4605e44f81a48aa24c4632a277431a6ed3f328835901f4fd", size = 39098, upload-time = "2025-08-12T05:51:57.474Z" }, + { url = "https://files.pythonhosted.org/packages/9f/81/5d931d78d0eb732b95dc3ddaeeb71c8bb572fb01356e9133916cd729ecdd/wrapt-1.17.3-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:042ec3bb8f319c147b1301f2393bc19dba6e176b7da446853406d041c36c7828", size = 88036, upload-time = "2025-08-12T05:52:34.784Z" }, + { url = "https://files.pythonhosted.org/packages/ca/38/2e1785df03b3d72d34fc6252d91d9d12dc27a5c89caef3335a1bbb8908ca/wrapt-1.17.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3af60380ba0b7b5aeb329bc4e402acd25bd877e98b3727b0135cb5c2efdaefe9", size = 88156, upload-time = "2025-08-12T05:52:13.599Z" }, + { url = "https://files.pythonhosted.org/packages/b3/8b/48cdb60fe0603e34e05cffda0b2a4adab81fd43718e11111a4b0100fd7c1/wrapt-1.17.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b02e424deef65c9f7326d8c19220a2c9040c51dc165cddb732f16198c168396", size = 87102, upload-time = "2025-08-12T05:52:14.56Z" }, + { url = "https://files.pythonhosted.org/packages/3c/51/d81abca783b58f40a154f1b2c56db1d2d9e0d04fa2d4224e357529f57a57/wrapt-1.17.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:74afa28374a3c3a11b3b5e5fca0ae03bef8450d6aa3ab3a1e2c30e3a75d023dc", size = 87732, upload-time = "2025-08-12T05:52:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/9e/b1/43b286ca1392a006d5336412d41663eeef1ad57485f3e52c767376ba7e5a/wrapt-1.17.3-cp312-cp312-win32.whl", hash = "sha256:4da9f45279fff3543c371d5ababc57a0384f70be244de7759c85a7f989cb4ebe", size = 36705, upload-time = "2025-08-12T05:53:07.123Z" }, + { url = "https://files.pythonhosted.org/packages/28/de/49493f962bd3c586ab4b88066e967aa2e0703d6ef2c43aa28cb83bf7b507/wrapt-1.17.3-cp312-cp312-win_amd64.whl", hash = "sha256:e71d5c6ebac14875668a1e90baf2ea0ef5b7ac7918355850c0908ae82bcb297c", size = 38877, upload-time = "2025-08-12T05:53:05.436Z" }, + { url = "https://files.pythonhosted.org/packages/f1/48/0f7102fe9cb1e8a5a77f80d4f0956d62d97034bbe88d33e94699f99d181d/wrapt-1.17.3-cp312-cp312-win_arm64.whl", hash = "sha256:604d076c55e2fdd4c1c03d06dc1a31b95130010517b5019db15365ec4a405fc6", size = 36885, upload-time = "2025-08-12T05:52:54.367Z" }, + { url = "https://files.pythonhosted.org/packages/1f/f6/a933bd70f98e9cf3e08167fc5cd7aaaca49147e48411c0bd5ae701bb2194/wrapt-1.17.3-py3-none-any.whl", hash = "sha256:7171ae35d2c33d326ac19dd8facb1e82e5fd04ef8c6c0e394d7af55a55051c22", size = 23591, upload-time = "2025-08-12T05:53:20.674Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, ] diff --git a/src/api/http.ts b/src/api/http.ts index 39960be94..1a60855ad 100644 --- a/src/api/http.ts +++ b/src/api/http.ts @@ -170,7 +170,6 @@ async function proxyFetchRequest( ...customHeaders, } - console.debug('url', url, token) if (!url.includes('http://') && !url.includes('https://') && token) { headers['Authorization'] = `Bearer ${token}` } diff --git a/src/assets/chevron_left.svg b/src/assets/gift-white.svg similarity index 100% rename from src/assets/chevron_left.svg rename to src/assets/gift-white.svg diff --git a/src/assets/gift.svg b/src/assets/gift.svg new file mode 100644 index 000000000..7954271a5 --- /dev/null +++ b/src/assets/gift.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/src/assets/wechat_qr_1.jpg b/src/assets/wechat_qr_1.jpg index a3814f7f0..857ac9d9d 100644 Binary files a/src/assets/wechat_qr_1.jpg and b/src/assets/wechat_qr_1.jpg differ diff --git a/src/assets/wechat_qr_2.jpg b/src/assets/wechat_qr_2.jpg index c560e36c9..a3d5ee49c 100644 Binary files a/src/assets/wechat_qr_2.jpg and b/src/assets/wechat_qr_2.jpg differ diff --git a/src/assets/wechat_qr_3.jpg b/src/assets/wechat_qr_3.jpg index d5d31392f..5663db8e0 100644 Binary files a/src/assets/wechat_qr_3.jpg and b/src/assets/wechat_qr_3.jpg differ diff --git a/src/assets/wechat_qr_4.jpg b/src/assets/wechat_qr_4.jpg index 7ac6837ff..f1e72d6bd 100644 Binary files a/src/assets/wechat_qr_4.jpg and b/src/assets/wechat_qr_4.jpg differ diff --git a/src/components/AddWorker/IntegrationList.tsx b/src/components/AddWorker/IntegrationList.tsx index 089a3e87f..778ac5d7d 100644 --- a/src/components/AddWorker/IntegrationList.tsx +++ b/src/components/AddWorker/IntegrationList.tsx @@ -429,10 +429,10 @@ export default function IntegrationList({ "Reddit", "Github", ].includes(item.name) - ? t("setting.coming-soon") + ? t("layout.coming-soon") : isInstalled - ? t("setting.uninstall") - : t("setting.install")} + ? t("layout.uninstall") + : t("layout.install")} )} diff --git a/src/components/AddWorker/ToolSelect.tsx b/src/components/AddWorker/ToolSelect.tsx index d8e048522..97b6fe714 100644 --- a/src/components/AddWorker/ToolSelect.tsx +++ b/src/components/AddWorker/ToolSelect.tsx @@ -9,6 +9,7 @@ import { Badge } from "@/components/ui/badge"; import { CircleAlert, Store, X } from "lucide-react"; import { proxyFetchGet, proxyFetchPost, proxyFetchPut, fetchPost } from "@/api/http"; import { Input } from "../ui/input"; +import { Textarea } from "../ui/textarea"; import { Button } from "../ui/button"; import githubIcon from "@/assets/github.svg"; import { TooltipSimple } from "../ui/tooltip"; @@ -167,13 +168,13 @@ const ToolSelect = forwardRef< toolkit: value.toolkit, desc: value.env_vars && value.env_vars.length > 0 - ? `Environmental variables required: ${value.env_vars.join( + ? `${t("layout.environmental-variables-required")} ${value.env_vars.join( ", " )}` : key.toLowerCase() === 'notion' - ? "Notion workspace integration for reading and managing Notion pages" + ? t("layout.notion-workspace-integration") : key.toLowerCase() === 'google calendar' - ? "Google Calendar integration for managing events and schedules" + ? t("layout.google-calendar-integration") : "", onInstall, }; @@ -184,7 +185,7 @@ const ToolSelect = forwardRef< }; // Refs - const inputRef = useRef(null); + const inputRef = useRef(null); const debounceTimerRef = useRef(null); const containerRef = useRef(null); @@ -445,10 +446,10 @@ const ToolSelect = forwardRef< }; const getInstallButtonText = (itemId: number) => { - if (installedIds.includes(itemId)) return t("setting.installed"); - if (installing[itemId]) return t("setting.installing"); - if (installed[itemId]) return t("setting.installed"); - return t("setting.install"); + if (installedIds.includes(itemId)) return t("layout.installed"); + if (installing[itemId]) return t("layout.installing"); + if (installed[itemId]) return t("layout.installed"); + return t("layout.install"); }; // Effects @@ -606,36 +607,43 @@ const ToolSelect = forwardRef< className="leading-17 text-xs font-bold text-button-secondary-text-default h-6 px-sm py-xs bg-button-secondary-fill-default hover:bg-button-tertiery-text-default rounded-md shadow-sm" disabled={true} > - {t("setting.installed")} + {t("layout.installed")} ); return (
-
+
+
+ {t("workforce.agent-tool")} + + + +
{ inputRef.current?.focus(); setIsOpen(true); }} - className="flex flex-wrap gap-1 justify-start px-[6px] py-1 min-h-[60px] max-h-[120px] overflow-y-auto w-full rounded-sm border border-solid border-input-border-default bg-input-bg-default !shadow-none text-sm leading-normal" + className="flex flex-wrap gap-1 justify-start px-[6px] py-1 min-h-[60px] max-h-[120px] overflow-y-auto w-full rounded-lg border border-solid border-input-border-default bg-input-bg-default" > {renderSelectedItems()} - setKeyword(e.target.value)} onFocus={() => setIsOpen(true)} ref={inputRef} - className="bg-transparent border-none !shadow-none text-sm leading-normal !ring-0 !ring-offset-0 w-auto !h-[20px] p-0" + className="bg-transparent border-none !shadow-none text-sm leading-normal !ring-0 !ring-offset-0 w-auto !h-[20px] p-0 resize-none" />
{/* floating dropdown */} {isOpen && ( -
-
+
+
state.activeTaskId); - const tasks = useChatStore((state) => state.tasks); + const { chatStore, projectStore } = useChatStoreAdapter(); + if (!chatStore) { + return
Loading...
; + } + const activeProjectId = projectStore.activeProjectId; + const activeTaskId = chatStore.activeTaskId; + const tasks = chatStore.tasks; const [showEnvConfig, setShowEnvConfig] = useState(false); const [activeMcp, setActiveMcp] = useState(null); const [envValues, setEnvValues] = useState<{ [key: string]: EnvValue }>({}); @@ -254,7 +259,7 @@ export function AddWorker({ }; setWorkerList([...workerList, worker]); } else { - fetchPost(`/task/${activeTaskId}/add-agent`, { + fetchPost(`/task/${activeProjectId}/add-agent`, { name: workerName, description: workerDescription, tools: localTool, @@ -314,33 +319,19 @@ export function AddWorker({ )} - - - -
- {showEnvConfig && ( - - )} -
- {showEnvConfig - ? t("workforce.configure-mcp-server") - : t("workforce.add-your-agent")} -
- - - -
-
-
+ + {showEnvConfig ? ( // environment configuration interface <> -
+
{getCategoryIcon(activeMcp?.category?.name)}
@@ -389,19 +380,19 @@ export function AddWorker({ ) )}
-
- - - +
+ + {/* hidden but keep rendering ToolSelect component */}
@@ -416,72 +407,64 @@ export function AddWorker({ ) : ( // default add interface <> -
-
-
+ +
+
+
+
{ setWorkerName(e.target.value); // when user starts input, clear error if (nameError) setNameError(""); }} - className={`!border-none !bg-transparent !shadow-none text-xl leading-2xl font-bold !ring-0 !ring-offset-0 ${ - nameError ? "border-red-500" : "" - }`} + state={nameError ? "error" : "default"} + note={nameError || ""} + backIcon={} + onBackIconClick={() => { + // Handle refresh/regenerate logic here + console.log("Refresh agent name"); + }} required /> -
- {nameError && ( -
- {nameError} -
- )}
-
-
- {t("workforce.description-optional")} -
-