Merge branch 'main' into enhance/stealth

This commit is contained in:
Wendong-Fan 2026-01-20 00:25:38 +00:00 committed by GitHub
commit d8666ac8aa
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
46 changed files with 1458 additions and 571 deletions

View file

@ -20,6 +20,8 @@ jobs:
arch: x64
- os: windows-latest
arch: x64
- os: ubuntu-latest
arch: x64
steps:
- name: Checkout Code
@ -46,6 +48,13 @@ jobs:
- name: Install Dependencies
run: npm install
# Install libfuse2 for Linux AppImage builds
- name: Install libfuse2 (Linux)
if: runner.os == 'Linux'
run: |
sudo apt-get update
sudo apt-get install -y libfuse2
# Step for macOS builds with signing
- name: Build Release Files (macOS with signing)
if: runner.os == 'macOS'
@ -78,6 +87,19 @@ jobs:
VITE_STACK_SECRET_SERVER_KEY: ${{ secrets.VITE_STACK_SECRET_SERVER_KEY }}
USE_NPM_INSTALL_BUN: 'true'
# Step for Linux builds
- name: Build Release Files (Linux)
if: runner.os == 'Linux'
timeout-minutes: 90
run: npm run build:linux
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
VITE_BASE_URL: ${{ secrets.VITE_BASE_URL }}
VITE_STACK_PROJECT_ID: ${{ secrets.VITE_STACK_PROJECT_ID }}
VITE_STACK_PUBLISHABLE_CLIENT_KEY: ${{ secrets.VITE_STACK_PUBLISHABLE_CLIENT_KEY }}
VITE_STACK_SECRET_SERVER_KEY: ${{ secrets.VITE_STACK_SECRET_SERVER_KEY }}
USE_NPM_INSTALL_BUN: 'true'
- name: Upload Artifact (macOS - dmg only)
if: runner.os == 'macOS'
uses: actions/upload-artifact@v6
@ -95,13 +117,22 @@ jobs:
path: |
release/*.exe
retention-days: 5
- name: Upload Artifact (Linux - AppImage only)
if: runner.os == 'Linux'
uses: actions/upload-artifact@v6
with:
name: release-${{ matrix.os }}-${{ matrix.arch }}
path: |
release/*.AppImage
retention-days: 5
merge-release:
needs: build
runs-on: ubuntu-latest
steps:
- name: Create directories
run: |
mkdir -p release/mac-x64 release/mac-arm64 release/win-x64
mkdir -p release/mac-x64 release/mac-arm64 release/win-x64 release/linux-x64
# Download all artifacts with correct names
- name: Download mac-x64 artifact
@ -122,7 +153,13 @@ jobs:
name: release-windows-latest-x64
path: temp-win-x64
# Move only dmg files for macOS and exe files for Windows
- name: Download linux-x64 artifact
uses: actions/download-artifact@v7
with:
name: release-ubuntu-latest-x64
path: temp-linux-x64
# Move only dmg files for macOS, exe files for Windows, and AppImage for Linux
- name: Move files to clean folders
shell: bash
run: |
@ -146,3 +183,10 @@ jobs:
else
find temp-win-x64 -name "*.exe" -exec mv {} release/win-x64/ \; || true
fi
# linux-x64 - only move AppImage files
if [ -d "temp-linux-x64/release" ]; then
find temp-linux-x64/release -name "*.AppImage" -exec mv {} release/linux-x64/ \; || true
else
find temp-linux-x64 -name "*.AppImage" -exec mv {} release/linux-x64/ \; || true
fi

View file

@ -31,6 +31,8 @@ jobs:
arch: x64
- os: windows-latest
arch: x64
- os: ubuntu-latest
arch: x64
steps:
- name: Checkout Code
@ -54,6 +56,13 @@ jobs:
- name: Install Dependencies
run: npm install
# Install libfuse2 for Linux AppImage builds
- name: Install libfuse2 (Linux)
if: runner.os == 'Linux'
run: |
sudo apt-get update
sudo apt-get install -y libfuse2
# Step for macOS builds with signing
- name: Build Release Files (macOS with signing)
if: runner.os == 'macOS'
@ -82,6 +91,17 @@ jobs:
VITE_STACK_PUBLISHABLE_CLIENT_KEY: ${{ secrets.VITE_STACK_PUBLISHABLE_CLIENT_KEY }}
VITE_STACK_SECRET_SERVER_KEY: ${{ secrets.VITE_STACK_SECRET_SERVER_KEY }}
# Step for Linux builds
- name: Build Release Files (Linux)
if: runner.os == 'Linux'
run: npm run build:linux
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
VITE_BASE_URL: ${{ secrets.VITE_BASE_URL }}
VITE_STACK_PROJECT_ID: ${{ secrets.VITE_STACK_PROJECT_ID }}
VITE_STACK_PUBLISHABLE_CLIENT_KEY: ${{ secrets.VITE_STACK_PUBLISHABLE_CLIENT_KEY }}
VITE_STACK_SECRET_SERVER_KEY: ${{ secrets.VITE_STACK_SECRET_SERVER_KEY }}
- name: Upload Artifact
uses: actions/upload-artifact@v6
with:
@ -97,7 +117,7 @@ jobs:
steps:
- name: Create directories
run: |
mkdir -p release/mac-x64 release/mac-arm64 release/win-x64
mkdir -p release/mac-x64 release/mac-arm64 release/win-x64 release/linux-x64
# Download all artifacts with correct names
- name: Download mac-x64 artifact
@ -118,6 +138,12 @@ jobs:
name: release-windows-latest-x64
path: temp-win-x64
- name: Download linux-x64 artifact
uses: actions/download-artifact@v7
with:
name: release-ubuntu-latest-x64
path: temp-linux-x64
# Move files to final release directory, removing any nested release/ directory
- name: Move files to clean folders
shell: bash
@ -143,6 +169,13 @@ jobs:
mv temp-win-x64/* release/win-x64/ || true
fi
# linux-x64
if [ -d "temp-linux-x64/release" ]; then
mv temp-linux-x64/release/* release/linux-x64/ || true
else
mv temp-linux-x64/* release/linux-x64/ || true
fi
- name: Rename duplicate files
run: |
mv release/mac-x64/latest-mac.yml release/mac-x64/latest-x64-mac.yml || true
@ -157,5 +190,6 @@ jobs:
release/mac-x64/*
release/mac-arm64/*
release/win-x64/*
release/linux-x64/*
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View file

@ -297,7 +297,7 @@ Please add this signature image to the Signature Areas in the PDF. You could ins
| **Context Engineering** | - Prompt caching<br> - System prompt optimize<br> - Toolkit docstring optimize<br> - Context compression | [**Join Discord →**](https://discord.gg/D2e3rBWD) |
| **Multi-modal Enhancement** | - More accurate image understanding when using browser<br> - Advanced video generation | [**Join Discord →**](https://discord.gg/kyapNCeJ) |
| **Multi-agent system** | - Workforce support fixed workflow<br> - Workforce support multi-round conversion | [**Join Discord →**](https://discord.gg/bFRmPuDB) |
| **Browser Toolkit** | - BrowseCamp integration<br> - Benchmark improvement<br> - Forbid repeated page visiting<br> - Automatic cache button clicking | [**Join Discord →**](https://discord.gg/NF73ze5v) |
| **Browser Toolkit** | - BrowseComp integration<br> - Benchmark improvement<br> - Forbid repeated page visiting<br> - Automatic cache button clicking | [**Join Discord →**](https://discord.gg/NF73ze5v) |
| **Document Toolkit** | - Support dynamic file editing | [**Join Discord →**](https://discord.gg/4yAWJxYr) |
| **Terminal Toolkit** | - Benchmark improvement<br> - Terminal-Bench integration | [**Join Discord →**](https://discord.gg/FjQfnsrV) |
| **Environment & RL** | - Environment design<br> - Data-generation<br> - RL framework integration (VERL, TRL, OpenRLHF) | [**Join Discord →**](https://discord.gg/MaVZXEn8) |

View file

@ -284,7 +284,7 @@ Eigent 完全开源。您可以下载、检查和修改代码,确保透明度
| **上下文工程** | - 提示缓存<br> - 系统提示优化<br> - 工具包文档优化<br> - 上下文压缩 | [**加入 Discord →**](https://discord.gg/D2e3rBWD) |
| **多模态增强** | - 使用浏览器时更准确的图像理解<br> - 高级视频生成 | [**加入 Discord →**](https://discord.gg/kyapNCeJ) |
| **多智能体系统** | - 工作流支持固定流程<br> - 工作流支持多轮对话 | [**加入 Discord →**](https://discord.gg/bFRmPuDB) |
| **浏览器工具包** | - BrowseCamp 集成<br> - 基准测试改进<br> - 禁止重复访问页面<br> - 自动缓存按钮点击 | [**加入 Discord →**](https://discord.gg/NF73ze5v) |
| **浏览器工具包** | - BrowseComp 集成<br> - 基准测试改进<br> - 禁止重复访问页面<br> - 自动缓存按钮点击 | [**加入 Discord →**](https://discord.gg/NF73ze5v) |
| **文档工具包** | - 支持动态文件编辑 | [**加入 Discord →**](https://discord.gg/4yAWJxYr) |
| **终端工具包** | - 基准测试改进<br> - Terminal-Bench 集成 | [**加入 Discord →**](https://discord.gg/FjQfnsrV) |
| **环境与强化学习** | - 环境设计<br> - 数据生成<br> - 强化学习框架集成VERL, TRL, OpenRLHF | [**加入 Discord →**](https://discord.gg/MaVZXEn8) |

View file

@ -295,7 +295,7 @@ Documentsディレクトリにmydocsというフォルダがあります。ス
| **コンテキストエンジニアリング** | - プロンプトキャッシング<br> - システムプロンプト最適化<br> - ツールキットdocstring最適化<br> - コンテキスト圧縮 | [**Discordに参加 →**](https://discord.gg/D2e3rBWD) |
| **マルチモーダル強化** | - ブラウザ使用時のより正確な画像理解<br> - 高度な動画生成 | [**Discordに参加 →**](https://discord.gg/kyapNCeJ) |
| **マルチエージェントシステム** | - 固定ワークフローをサポートするワークフォース<br> - マルチラウンド変換をサポートするワークフォース | [**Discordに参加 →**](https://discord.gg/bFRmPuDB) |
| **ブラウザツールキット** | - BrowseCamp統合<br> - ベンチマーク改善<br> - 繰り返しページ訪問の禁止<br> - 自動キャッシュボタンクリック | [**Discordに参加 →**](https://discord.gg/NF73ze5v) |
| **ブラウザツールキット** | - BrowseComp統合<br> - ベンチマーク改善<br> - 繰り返しページ訪問の禁止<br> - 自動キャッシュボタンクリック | [**Discordに参加 →**](https://discord.gg/NF73ze5v) |
| **ドキュメントツールキット** | - 動的ファイル編集のサポート | [**Discordに参加 →**](https://discord.gg/4yAWJxYr) |
| **ターミナルツールキット** | - ベンチマーク改善<br> - Terminal-Bench統合 | [**Discordに参加 →**](https://discord.gg/FjQfnsrV) |
| **環境 & RL** | - 環境設計<br> - データ生成<br> - RLフレームワーク統合VERL、TRL、OpenRLHF | [**Discordに参加 →**](https://discord.gg/MaVZXEn8) |

View file

@ -88,7 +88,7 @@ Construído sobre o aclamado projeto open source da [CAMEL-AI][camel-site], noss
A forma recomendada de executar o Eigent — totalmente independente, com controle completo sobre seus dados, sem necessidade de conta em nuvem.
👉 **[Guia Completo de Implantação Local](./server/README_EN.md)**
👉 **[Guia Completo de Implantação Local](./server/README_PT-BR.md)**
Esta configuração inclui:
- Servidor backend local com API completa
@ -292,7 +292,7 @@ Por favor, adicione esta imagem de assinatura às áreas de assinatura no PDF. V
| **Engenharia de Contexto** | - Cache de prompts<br> - Otimização de prompt do sistema<br> - Otimização de docstrings do toolkit<br> - Compressão de contexto | [**Entrar no Discord →**](https://discord.gg/D2e3rBWD) |
| **Aprimoramento Multimodal** | - Compreensão de imagens mais precisa ao usar o navegador<br> - Geração avançada de vídeo | [**Entrar no Discord →**](https://discord.gg/kyapNCeJ) |
| **Sistema Multiagente** | - Suporte do Workforce a fluxos fixos<br> - Suporte do Workforce a conversas em múltiplas rodadas | [**Entrar no Discord →**](https://discord.gg/bFRmPuDB) |
| **Toolkit de Navegador** | - Integração com BrowseCamp<br> - Melhoria de benchmark<br> - Proibir visitas repetidas a páginas<br> - Clique automático em botões de cache | [**Entrar no Discord →**](https://discord.gg/NF73ze5v) |
| **Toolkit de Navegador** | - Integração com BrowseComp<br> - Melhoria de benchmark<br> - Proibir visitas repetidas a páginas<br> - Clique automático em botões de cache | [**Entrar no Discord →**](https://discord.gg/NF73ze5v) |
| **Toolkit de Documentos** | - Suporte à edição dinâmica de arquivos | [**Entrar no Discord →**](https://discord.gg/4yAWJxYr) |
| **Toolkit de Terminal** | - Melhoria de benchmark<br> - Integração com Terminal-Bench | [**Entrar no Discord →**](https://discord.gg/FjQfnsrV) |
| **Ambiente & RL** | - Design de ambiente<br> - Geração de dados<br> - Integração de frameworks de RL (VERL, TRL, OpenRLHF) | [**Entrar no Discord →**](https://discord.gg/MaVZXEn8) |

View file

@ -23,6 +23,8 @@ from app.service.task import (
get_or_create_task_lock,
get_task_lock,
set_current_task_id,
delete_task_lock,
task_locks,
)
from app.component.environment import set_user_env_path
from app.utils.workforce import Workforce
@ -34,18 +36,50 @@ router = APIRouter()
# Create traceroot logger for chat controller
chat_logger = traceroot.get_logger("chat_controller")
# SSE timeout configuration (10 minutes in seconds)
SSE_TIMEOUT_SECONDS = 10 * 60
# SSE timeout configuration (30 minutes in seconds)
SSE_TIMEOUT_SECONDS = 30 * 60
async def timeout_stream_wrapper(stream_generator, timeout_seconds: int = SSE_TIMEOUT_SECONDS):
async def _cleanup_task_lock_safe(task_lock, reason: str) -> bool:
"""Safely cleanup task lock with existence check.
Args:
task_lock: The task lock to cleanup
reason: Reason for cleanup (for logging)
Returns:
True if cleanup was performed, False otherwise
"""
Wraps a stream generator with timeout handling.
if not task_lock:
return False
# Check if task_lock still exists before attempting cleanup
if task_lock.id not in task_locks:
chat_logger.debug(f"[{reason}] Task lock already removed, skipping cleanup",
extra={"task_id": task_lock.id})
return False
try:
task_lock.status = Status.done
await delete_task_lock(task_lock.id)
chat_logger.info(f"[{reason}] Task lock cleanup completed",
extra={"task_id": task_lock.id})
return True
except Exception as e:
chat_logger.error(f"[{reason}] Failed to cleanup task lock",
extra={"task_id": task_lock.id, "error": str(e)}, exc_info=True)
return False
async def timeout_stream_wrapper(stream_generator, timeout_seconds: int = SSE_TIMEOUT_SECONDS, task_lock=None):
"""Wraps a stream generator with timeout handling.
Closes the SSE connection if no data is received within the timeout period.
Triggers cleanup if timeout occurs to prevent resource leaks.
"""
last_data_time = time.time()
generator = stream_generator.__aiter__()
cleanup_triggered = False
try:
while True:
@ -57,18 +91,24 @@ async def timeout_stream_wrapper(stream_generator, timeout_seconds: int = SSE_TI
last_data_time = time.time()
yield data
except asyncio.TimeoutError:
chat_logger.warning(f"SSE timeout: No data received for {timeout_seconds} seconds, closing connection")
# yield sse_json("error", {"message": "Connection timeout: No data received for 10 minutes"})
# TODO: Temporary change: suppress error signal to frontend on timeout. Needs proper fix later.
chat_logger.warning("SSE timeout: No data received, closing connection",
extra={"timeout_seconds": timeout_seconds})
yield sse_json("error", {"message": f"Connection timeout: No data received for {timeout_seconds // 60} minutes"})
cleanup_triggered = await _cleanup_task_lock_safe(task_lock, "TIMEOUT")
break
except StopAsyncIteration:
break
except asyncio.CancelledError:
chat_logger.info("Stream cancelled")
chat_logger.info("[STREAM-CANCELLED] Stream cancelled, triggering cleanup")
if not cleanup_triggered:
await _cleanup_task_lock_safe(task_lock, "CANCELLED")
raise
except Exception as e:
chat_logger.error(f"Error in stream wrapper: {e}", exc_info=True)
chat_logger.error("[STREAM-ERROR] Unexpected error in stream wrapper",
extra={"error": str(e)}, exc_info=True)
if not cleanup_triggered:
await _cleanup_task_lock_safe(task_lock, "ERROR")
raise
@ -76,8 +116,10 @@ async def timeout_stream_wrapper(stream_generator, timeout_seconds: int = SSE_TI
@traceroot.trace()
async def post(data: Chat, request: Request):
chat_logger.info(
"Starting new chat session", extra={"project_id": data.project_id, "task_id": data.task_id, "user": data.email}
"Starting new chat session",
extra={"project_id": data.project_id, "task_id": data.task_id, "user": data.email}
)
task_lock = get_or_create_task_lock(data.project_id)
# Set user-specific environment path for this thread
@ -93,9 +135,9 @@ async def post(data: Chat, request: Request):
# Set user-specific search engine configuration if provided
if data.search_config:
for key, value in data.search_config.items():
if value: # Only set non-empty values
if value:
os.environ[key] = value
chat_logger.info(f"Set search config: {key}", extra={"project_id": data.project_id})
chat_logger.debug(f"Set search config: {key}", extra={"project_id": data.project_id})
email_sanitized = re.sub(r'[\\/*?:"<>|\s]', "_", data.email.split("@")[0]).strip(".")
camel_log = (
@ -120,11 +162,11 @@ async def post(data: Chat, request: Request):
await task_lock.put_queue(ActionImproveData(data=data.question, new_task_id=data.task_id))
chat_logger.info(
"Chat session initialized, starting streaming response",
"Chat session initialized",
extra={"project_id": data.project_id, "task_id": data.task_id, "log_dir": str(camel_log)},
)
return StreamingResponse(
timeout_stream_wrapper(step_solve(data, request, task_lock)), media_type="text/event-stream"
timeout_stream_wrapper(step_solve(data, request, task_lock), task_lock=task_lock), media_type="text/event-stream"
)
@ -315,5 +357,5 @@ def skip_task(project_id: str):
return Response(status_code=201)
except Exception as e:
chat_logger.error(f"[STOP-BUTTON] Error skipping task for project_id: {project_id}: {e}")
chat_logger.error(f"[STOP-BUTTON] Error skipping task for project_id: {project_id}: {e}")
raise UserException(code.error, f"Failed to skip task: {str(e)}")

View file

@ -36,6 +36,7 @@ McpServers = dict[Literal["mcpServers"], dict[str, dict]]
PLATFORM_MAPPING = {
"Z.ai": "openai-compatible-model",
"ModelArk": "openai-compatible-model",
}
class Chat(BaseModel):

View file

@ -14,6 +14,7 @@ from app.service.task import (
ActionImproveData,
ActionInstallMcpData,
ActionNewAgent,
ActionTimeoutData,
TaskLock,
delete_task_lock,
set_current_task_id,
@ -39,6 +40,7 @@ from app.utils.agent import (
social_medium_agent,
task_summary_agent,
question_confirm_agent,
set_main_event_loop,
)
from app.service.task import Action, Agents
from app.utils.server.sync_step import sync_step
@ -236,15 +238,9 @@ def build_context_for_workforce(task_lock: TaskLock, options: Chat) -> str:
@sync_step
@traceroot.trace()
async def step_solve(options: Chat, request: Request, task_lock: TaskLock):
# if True:
# import faulthandler
# faulthandler.enable()
# for second in [5, 10, 20, 30, 60, 120, 240]:
# faulthandler.dump_traceback_later(second)
start_event_loop = True
# Initialize task_lock attributes
if not hasattr(task_lock, 'conversation_history'):
task_lock.conversation_history = []
if not hasattr(task_lock, 'last_task_result'):
@ -257,15 +253,15 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock):
# Create or reuse persistent question_agent
if task_lock.question_agent is None:
task_lock.question_agent = question_confirm_agent(options)
logger.info(f"Created new persistent question_agent for project {options.project_id}")
else:
logger.info(f"Reusing existing question_agent with {len(task_lock.conversation_history)} history entries")
logger.debug(f"Reusing existing question_agent with {len(task_lock.conversation_history)} history entries")
question_agent = task_lock.question_agent
# Other variables
camel_task = None
workforce = None
mcp = None
last_completed_task_result = "" # Track the last completed task result
summary_task_content = "" # Track task summary
loop_iteration = 0
@ -338,14 +334,12 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock):
})
continue
# Simplified logic: attachments mean workforce, otherwise let agent decide
# Determine task complexity: attachments mean workforce, otherwise let agent decide
is_complex_task: bool
if len(options.attaches) > 0:
# Questions with attachments always need workforce
is_complex_task = True
logger.info(f"[NEW-QUESTION] Has attachments, treating as complex task")
else:
logger.info(f"[NEW-QUESTION] Calling question_confirm to determine complexity")
is_complex_task = await question_confirm(question_agent, question, task_lock)
logger.info(f"[NEW-QUESTION] question_confirm result: is_complex={is_complex_task}")
@ -385,56 +379,35 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock):
except Exception as e:
logger.error(f"Error cleaning up folder: {e}")
else:
logger.info(f"[NEW-QUESTION] 🔧 Complex task, creating workforce and decomposing")
logger.info(f"[NEW-QUESTION] Complex task, creating workforce and decomposing")
# Update the sync_step with new task_id
if hasattr(item, 'new_task_id') and item.new_task_id:
set_current_task_id(options.project_id, item.new_task_id)
# Reset summary generation flag for new tasks to ensure proper summaries
task_lock.summary_generated = False
logger.info("[NEW-QUESTION] Reset summary_generated flag for new task", extra={"project_id": options.project_id, "new_task_id": item.new_task_id})
logger.info(f"[NEW-QUESTION] Sending 'confirmed' SSE to frontend")
yield sse_json("confirmed", {"question": question})
logger.info(f"[NEW-QUESTION] Building context for coordinator")
context_for_coordinator = build_context_for_workforce(task_lock, options)
# Check if workforce exists - if so, reuse it (agents are preserved)
# Otherwise create new workforce
# Check if workforce exists - if so, reuse it; otherwise create new workforce
if workforce is not None:
logger.info(f"[NEW-QUESTION] 🔄 Workforce exists (id={id(workforce)}), state={workforce._state.name}, _running={workforce._running}")
logger.info(f"[NEW-QUESTION] ✅ Reusing existing workforce with preserved agents")
# Workforce is already stopped from skip_task, ready for new decomposition
logger.debug(f"[NEW-QUESTION] Reusing existing workforce (id={id(workforce)})")
else:
logger.info(f"[NEW-QUESTION] 🏭 Creating NEW workforce instance (workforce=None)")
logger.info(f"[NEW-QUESTION] Creating NEW workforce instance")
(workforce, mcp) = await construct_workforce(options)
logger.info(f"[NEW-QUESTION] ✅ NEW Workforce instance created, id={id(workforce)}")
for new_agent in options.new_agents:
workforce.add_single_agent_worker(
format_agent_description(new_agent), await new_agent_model(new_agent, options)
)
task_lock.status = Status.confirmed
# If camel_task already exists (from previous paused task), add new question as subtask
# Otherwise, create a new camel_task
if camel_task is not None:
logger.info(f"[NEW-QUESTION] 🔄 camel_task exists (id={camel_task.id}), adding new question as context")
# Update the task content with new question
clean_task_content = question + options.summary_prompt
logger.info(f"[NEW-QUESTION] Updating existing camel_task content with new question")
# We keep the existing task structure but update content for new decomposition
camel_task = Task(content=clean_task_content, id=options.task_id)
if len(options.attaches) > 0:
camel_task.additional_info = {Path(file_path).name: file_path for file_path in options.attaches}
else:
clean_task_content = question + options.summary_prompt
logger.info(f"[NEW-QUESTION] Creating NEW camel_task with id={options.task_id}")
camel_task = Task(content=clean_task_content, id=options.task_id)
if len(options.attaches) > 0:
camel_task.additional_info = {Path(file_path).name: file_path for file_path in options.attaches}
# Create camel_task for the question
clean_task_content = question + options.summary_prompt
camel_task = Task(content=clean_task_content, id=options.task_id)
if len(options.attaches) > 0:
camel_task.additional_info = {Path(file_path).name: file_path for file_path in options.attaches}
# Stream decomposition in background so queue items (decompose_text) are processed immediately
logger.info(f"[NEW-QUESTION] 🧩 Starting task decomposition via workforce.eigent_make_sub_tasks")
# Stream decomposition in background
stream_state = {"subtasks": [], "seen_ids": set(), "last_content": ""}
state_holder: dict[str, Any] = {"sub_tasks": [], "summary_task": ""}
@ -446,8 +419,6 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock):
def on_stream_text(chunk):
try:
# With task_agent using stream_accumulate=True, chunk.msg.content is accumulated content
# We need to calculate the delta to send only new content to frontend
accumulated_content = chunk.msg.content if hasattr(chunk, 'msg') and chunk.msg else str(chunk)
last_content = stream_state["last_content"]
@ -485,52 +456,45 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock):
on_stream_batch,
on_stream_text,
)
if stream_state["subtasks"]:
sub_tasks = stream_state["subtasks"]
state_holder["sub_tasks"] = sub_tasks
logger.info(f"[NEW-QUESTION] ✅ Task decomposed into {len(sub_tasks)} subtasks")
logger.info(f"Task decomposed into {len(sub_tasks)} subtasks")
try:
setattr(task_lock, "decompose_sub_tasks", sub_tasks)
except Exception:
pass
logger.info(f"[NEW-QUESTION] Generating task summary")
# Generate task summary
summary_task_agent = task_summary_agent(options)
try:
summary_task_content = await asyncio.wait_for(
summary_task(summary_task_agent, camel_task), timeout=10
)
task_lock.summary_generated = True
logger.info("[NEW-QUESTION] ✅ Summary generated successfully", extra={"project_id": options.project_id})
except asyncio.TimeoutError:
logger.warning("summary_task timeout", extra={"project_id": options.project_id, "task_id": options.task_id})
task_lock.summary_generated = True
fallback_name = "Task"
content_preview = camel_task.content if hasattr(camel_task, "content") else ""
if content_preview is None:
content_preview = ""
summary_task_content = (
(content_preview[:80] + "...") if len(content_preview) > 80 else content_preview
)
summary_task_content = f"{fallback_name}|{summary_task_content}"
summary_task_content = (content_preview[:80] + "...") if len(content_preview) > 80 else content_preview
summary_task_content = f"Task|{summary_task_content}"
except Exception:
task_lock.summary_generated = True
fallback_name = "Task"
content_preview = camel_task.content if hasattr(camel_task, "content") else ""
if content_preview is None:
content_preview = ""
summary_task_content = (
(content_preview[:80] + "...") if len(content_preview) > 80 else content_preview
)
summary_task_content = f"{fallback_name}|{summary_task_content}"
summary_task_content = (content_preview[:80] + "...") if len(content_preview) > 80 else content_preview
summary_task_content = f"Task|{summary_task_content}"
state_holder["summary_task"] = summary_task_content
try:
setattr(task_lock, "summary_task_content", summary_task_content)
except Exception:
pass
logger.info(f"[NEW-QUESTION] 📤 Sending to_sub_tasks SSE to frontend (task card)")
logger.info(f"[NEW-QUESTION] to_sub_tasks data: task_id={camel_task.id}, summary={summary_task_content[:50]}..., subtasks_count={len(camel_task.subtasks)}")
payload = {
"project_id": options.project_id,
"task_id": options.task_id,
@ -540,7 +504,6 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock):
"summary_task": summary_task_content,
}
await task_lock.put_queue(ActionDecomposeProgressData(data=payload))
logger.info(f"[NEW-QUESTION] ✅ to_sub_tasks SSE sent")
except Exception as e:
logger.error(f"Error in background decomposition: {e}", exc_info=True)
@ -780,7 +743,6 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock):
logger.info(f"[LIFECYCLE] Multi-turn: building context for workforce")
context_for_multi_turn = build_context_for_workforce(task_lock, options)
logger.info(f"[LIFECYCLE] Multi-turn: calling workforce.handle_decompose_append_task for new task decomposition")
stream_state = {"subtasks": [], "seen_ids": set(), "last_content": ""}
def on_stream_batch(new_tasks: list[Task], is_final: bool = False):
@ -791,8 +753,6 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock):
def on_stream_text(chunk):
try:
# With task_agent using stream_accumulate=True, chunk.msg.content is accumulated content
# We need to calculate the delta to send only new content to frontend
accumulated_content = chunk.msg.content if hasattr(chunk, 'msg') and chunk.msg else str(chunk)
last_content = stream_state["last_content"]
@ -906,6 +866,10 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock):
elif item.action == Action.search_mcp:
yield sse_json("search_mcp", item.data)
elif item.action == Action.install_mcp:
if mcp is None:
logger.error(f"Cannot install MCP: mcp agent not initialized for project {options.project_id}")
yield sse_json("error", {"message": "MCP agent not initialized. Please start a complex task first."})
continue
task = asyncio.create_task(install_mcp(mcp, item))
task_lock.add_background_task(task)
elif item.action == Action.terminal:
@ -936,6 +900,28 @@ async def step_solve(options: Chat, request: Request, task_lock: TaskLock):
format_agent_description(item), await new_agent_model(item, options)
)
workforce.resume()
elif item.action == Action.timeout:
logger.info("=" * 80)
logger.info(f"⏰ [LIFECYCLE] TIMEOUT action received for project {options.project_id}, task {options.task_id}")
logger.info(f"[LIFECYCLE] Timeout data: {item.data}")
logger.info("=" * 80)
# Send timeout error to frontend
timeout_message = item.data.get("message", "Task execution timeout")
in_flight = item.data.get("in_flight_tasks", 0)
pending = item.data.get("pending_tasks", 0)
timeout_seconds = item.data.get("timeout_seconds", 0)
yield sse_json("error", {
"message": timeout_message,
"type": "timeout",
"details": {
"in_flight_tasks": in_flight,
"pending_tasks": pending,
"timeout_seconds": timeout_seconds,
}
})
elif item.action == Action.end:
logger.info("=" * 80)
logger.info(f"🏁 [LIFECYCLE] END action received for project {options.project_id}, task {options.task_id}")
@ -1277,89 +1263,136 @@ async def get_task_result_with_optional_summary(task: Task, options: Chat) -> st
@traceroot.trace()
async def construct_workforce(options: Chat) -> tuple[Workforce, ListenChatAgent]:
logger.info("Constructing workforce", extra={"project_id": options.project_id, "task_id": options.task_id})
"""Construct a workforce with all required agents.
This function creates all agents in PARALLEL to minimize startup time.
Sync functions are run in thread pool, async functions are awaited concurrently.
"""
logger.debug("construct_workforce started", extra={"project_id": options.project_id, "task_id": options.task_id})
# Store main event loop reference for thread-safe async task scheduling
# This allows agent_model() to schedule tasks when called from worker threads
set_main_event_loop(asyncio.get_running_loop())
working_directory = get_working_directory(options)
logger.debug("Working directory set", extra={"working_directory": working_directory})
[coordinator_agent, task_agent] = [
agent_model(
key,
prompt,
options,
[
*(
ToolkitMessageIntegration(
message_handler=HumanToolkit(options.project_id, key).send_message_to_user
).register_toolkits(NoteTakingToolkit(options.project_id, working_directory=working_directory))
).get_tools()
],
)
for key, prompt in {
Agents.coordinator_agent: f"""
# ========================================================================
# Define agent creation functions
# ========================================================================
def _create_coordinator_and_task_agents() -> list[ListenChatAgent]:
"""Create coordinator and task agents (sync, runs in thread pool)."""
return [
agent_model(
key,
prompt,
options,
[
*(
ToolkitMessageIntegration(
message_handler=HumanToolkit(options.project_id, key).send_message_to_user
).register_toolkits(NoteTakingToolkit(options.project_id, working_directory=working_directory))
).get_tools()
],
)
for key, prompt in {
Agents.coordinator_agent: f"""
You are a helpful coordinator.
- You are now working in system {platform.system()} with architecture
{platform.machine()} at working directory `{working_directory}`. All local file operations must occur here, but you can access files from any place in the file system. For all file system operations, you MUST use absolute paths to ensure precision and avoid ambiguity.
The current date is {datetime.date.today()}. For any date-related tasks, you MUST use this as the current date.
- If a task assigned to another agent fails, you should re-assign it to the
`Developer_Agent`. The `Developer_Agent` is a powerful agent with terminal
access and can resolve a wide range of issues.
- If a task assigned to another agent fails, you should re-assign it to the
`Developer_Agent`. The `Developer_Agent` is a powerful agent with terminal
access and can resolve a wide range of issues.
""",
Agents.task_agent: f"""
Agents.task_agent: f"""
You are a helpful task planner.
- You are now working in system {platform.system()} with architecture
{platform.machine()} at working directory `{working_directory}`. All local file operations must occur here, but you can access files from any place in the file system. For all file system operations, you MUST use absolute paths to ensure precision and avoid ambiguity.
The current date is {datetime.date.today()}. For any date-related tasks, you MUST use this as the current date.
""",
}.items()
]
new_worker_agent = agent_model(
Agents.new_worker_agent,
f"""
}.items()
]
def _create_new_worker_agent() -> ListenChatAgent:
"""Create new worker agent (sync, runs in thread pool)."""
return agent_model(
Agents.new_worker_agent,
f"""
You are a helpful assistant.
- You are now working in system {platform.system()} with architecture
{platform.machine()} at working directory `{working_directory}`. All local file operations must occur here, but you can access files from any place in the file system. For all file system operations, you MUST use absolute paths to ensure precision and avoid ambiguity.
The current date is {datetime.date.today()}. For any date-related tasks, you MUST use this as the current date.
""",
options,
[
*HumanToolkit.get_can_use_tools(options.project_id, Agents.new_worker_agent),
*(
ToolkitMessageIntegration(
message_handler=HumanToolkit(options.project_id, Agents.new_worker_agent).send_message_to_user
).register_toolkits(NoteTakingToolkit(options.project_id, working_directory=working_directory))
).get_tools(),
],
)
# msg_toolkit = AgentCommunicationToolkit(max_message_history=100)
options,
[
*HumanToolkit.get_can_use_tools(options.project_id, Agents.new_worker_agent),
*(
ToolkitMessageIntegration(
message_handler=HumanToolkit(options.project_id, Agents.new_worker_agent).send_message_to_user
).register_toolkits(NoteTakingToolkit(options.project_id, working_directory=working_directory))
).get_tools(),
],
)
searcher = browser_agent(options)
developer = await developer_agent(options)
documenter = await document_agent(options)
multi_modaler = multi_modal_agent(options)
# ========================================================================
# Execute all agent creations in PARALLEL
# ========================================================================
# msg_toolkit.register_agent("Worker", new_worker_agent)
# msg_toolkit.register_agent("Browser_Agent", searcher)
# msg_toolkit.register_agent("Developer_Agent", developer)
# msg_toolkit.register_agent("Document_Agent", documenter)
# msg_toolkit.register_agent("Multi_Modal_Agent", multi_modaler)
try:
# asyncio.gather runs all coroutines concurrently
# asyncio.to_thread runs sync functions in thread pool without blocking event loop
results = await asyncio.gather(
asyncio.to_thread(_create_coordinator_and_task_agents),
asyncio.to_thread(_create_new_worker_agent),
asyncio.to_thread(browser_agent, options),
developer_agent(options),
document_agent(options),
asyncio.to_thread(multi_modal_agent, options),
mcp_agent(options),
)
except Exception as e:
logger.error(f"Failed to create agents in parallel: {e}", exc_info=True)
raise
finally:
# Always clear event loop reference after parallel agent creation completes
# This prevents stale references and potential cross-request interference
set_main_event_loop(None)
# Unpack results
(
coord_task_agents,
new_worker_agent,
searcher,
developer,
documenter,
multi_modaler,
mcp,
) = results
coordinator_agent, task_agent = coord_task_agents
# ========================================================================
# Create Workforce instance and add workers (must be sequential)
# ========================================================================
# Convert string model_platform to enum for comparison
try:
model_platform_enum = ModelPlatformType(options.model_platform.lower())
except (ValueError, AttributeError):
# If conversion fails, default to non-OpenAI behavior
model_platform_enum = None
workforce = Workforce(
options.project_id,
"A workforce",
graceful_shutdown_timeout=3, # 30 seconds for debugging
graceful_shutdown_timeout=3,
share_memory=False,
coordinator_agent=coordinator_agent,
task_agent=task_agent,
new_worker_agent=new_worker_agent,
use_structured_output_handler=False if model_platform_enum == ModelPlatformType.OPENAI else True,
)
workforce.add_single_agent_worker(
"Developer Agent: A master-level coding assistant with a powerful "
"terminal. It can write and execute code, manage files, automate "
@ -1387,18 +1420,7 @@ The current date is {datetime.date.today()}. For any date-related tasks, you MUS
"generate new images from text prompts.",
multi_modaler,
)
# workforce.add_single_agent_worker(
# "Social Media Agent: A social media management assistant for "
# "handling tasks related to WhatsApp, Twitter, LinkedIn, Reddit, "
# "Notion, Slack, and other social platforms.",
# await social_medium_agent(options),
# )
mcp = await mcp_agent(options)
# workforce.add_single_agent_worker(
# "MCP Agent: A Model Context Protocol agent that provides access "
# "to external tools and services through MCP integrations.",
# mcp,
# )
return workforce, mcp

View file

@ -45,6 +45,7 @@ class Action(str, Enum):
add_task = "add_task" # user -> backend
remove_task = "remove_task" # user -> backend
skip_task = "skip_task" # user -> backend
timeout = "timeout" # backend -> user (task timeout error)
class ActionImproveData(BaseModel):
@ -173,6 +174,11 @@ class ActionEndData(BaseModel):
action: Literal[Action.end] = Action.end
class ActionTimeoutData(BaseModel):
action: Literal[Action.timeout] = Action.timeout
data: dict[Literal["message", "in_flight_tasks", "pending_tasks", "timeout_seconds"], str | int]
class ActionSupplementData(BaseModel):
action: Literal[Action.supplement] = Action.supplement
data: SupplementChat
@ -233,6 +239,7 @@ ActionData = (
| ActionTerminalData
| ActionStopData
| ActionEndData
| ActionTimeoutData
| ActionSupplementData
| ActionTakeControl
| ActionNewAgent
@ -270,6 +277,8 @@ class TaskLock:
last_accessed: datetime
background_tasks: set[asyncio.Task]
"""Track all background tasks for cleanup"""
registered_toolkits: list[Any]
"""Track toolkits for cleanup (e.g., TerminalToolkit venvs)"""
# Context management fields
conversation_history: List[Dict[str, Any]]
@ -290,6 +299,7 @@ class TaskLock:
self.created_at = datetime.now()
self.last_accessed = datetime.now()
self.background_tasks = set()
self.registered_toolkits = []
# Initialize context management fields
self.conversation_history = []
@ -339,8 +349,42 @@ class TaskLock:
except asyncio.CancelledError:
pass
self.background_tasks.clear()
# Clean up registered toolkits (e.g., remove TerminalToolkit venvs)
for toolkit in self.registered_toolkits:
try:
if hasattr(toolkit, 'cleanup'):
toolkit.cleanup()
logger.info("Toolkit cleanup completed", extra={"task_id": self.id, "toolkit": type(toolkit).__name__})
except Exception as e:
logger.warning(f"Failed to cleanup toolkit: {e}", extra={"task_id": self.id, "toolkit": type(toolkit).__name__})
self.registered_toolkits.clear()
logger.info("Task lock cleanup completed", extra={"task_id": self.id})
def register_toolkit(self, toolkit: Any) -> None:
"""Register a toolkit for cleanup when task ends.
This is used to track toolkits that create resources (like venvs) that
should be cleaned up when the task is complete.
Note: Duplicate registrations of the same toolkit instance are ignored.
"""
# Prevent duplicate registration of the same toolkit instance
if any(t is toolkit for t in self.registered_toolkits):
logger.debug("Toolkit already registered, skipping", extra={
"task_id": self.id,
"toolkit": type(toolkit).__name__
})
return
self.registered_toolkits.append(toolkit)
logger.debug("Toolkit registered for cleanup", extra={
"task_id": self.id,
"toolkit": type(toolkit).__name__,
"total_registered": len(self.registered_toolkits)
})
def add_conversation(self, role: str, content: str | dict):
"""Add a conversation entry to history"""
logger.debug("Adding conversation entry", extra={"task_id": self.id, "role": role, "content_length": len(str(content))})

View file

@ -1,18 +1,79 @@
import asyncio
import contextvars
import json
import os
import platform
from threading import Event
from threading import Event, Lock
import traceback
from typing import Any, Callable, Dict, List, Tuple
import uuid
from utils import traceroot_wrapper as traceroot
# Thread-safe reference to main event loop using contextvars
# This ensures each request has its own event loop reference, avoiding race conditions
_main_event_loop_var: contextvars.ContextVar[asyncio.AbstractEventLoop | None] = contextvars.ContextVar(
'_main_event_loop', default=None
)
# Global fallback for main event loop reference
# Used when contextvars don't propagate to worker threads (e.g., asyncio.to_thread)
_GLOBAL_MAIN_LOOP: asyncio.AbstractEventLoop | None = None
_GLOBAL_MAIN_LOOP_LOCK = Lock()
def set_main_event_loop(loop: asyncio.AbstractEventLoop | None):
"""Set the main event loop reference for thread-safe task scheduling.
This should be called from the main async context before spawning threads
that need to schedule async tasks. Uses both contextvars (for request isolation)
and a global fallback (for thread pool workers where contextvars may not propagate).
"""
global _GLOBAL_MAIN_LOOP
_main_event_loop_var.set(loop)
with _GLOBAL_MAIN_LOOP_LOCK:
_GLOBAL_MAIN_LOOP = loop
def _schedule_async_task(coro):
"""Schedule an async coroutine as a task, thread-safe.
This function handles scheduling from both the main event loop thread
and from worker threads (e.g., when using asyncio.to_thread).
"""
try:
# Try to get the running loop (works in main event loop thread)
loop = asyncio.get_running_loop()
loop.create_task(coro)
except RuntimeError:
# No running loop in this thread (we're in a worker thread)
# First try contextvars, then fallback to global reference
main_loop = _main_event_loop_var.get()
if main_loop is None:
with _GLOBAL_MAIN_LOOP_LOCK:
main_loop = _GLOBAL_MAIN_LOOP
if main_loop is not None and main_loop.is_running():
asyncio.run_coroutine_threadsafe(coro, main_loop)
else:
# This should not happen in normal operation - log error and skip
traceroot.get_logger("agent").error(
"No event loop available for async task scheduling, task skipped. "
"Ensure set_main_event_loop() is called before parallel agent creation."
)
from camel.agents import ChatAgent
from camel.agents.chat_agent import StreamingChatAgentResponse, AsyncStreamingChatAgentResponse
from camel.agents.chat_agent import (
StreamingChatAgentResponse,
AsyncStreamingChatAgentResponse,
)
from camel.agents._types import ToolCallRequest
from camel.memories import AgentMemory
from camel.messages import BaseMessage
from camel.models import BaseModelBackend, ModelFactory, ModelManager, OpenAIAudioModels, ModelProcessingError
from camel.models import (
BaseModelBackend,
ModelFactory,
ModelManager,
OpenAIAudioModels,
ModelProcessingError,
)
from camel.responses import ChatAgentResponse
from camel.terminators import ResponseTerminator
from camel.toolkits import FunctionTool, RegisteredAgentToolkit
@ -79,25 +140,29 @@ class ListenChatAgent(ChatAgent):
api_task_id: str,
agent_name: str,
system_message: BaseMessage | str | None = None,
model: BaseModelBackend
| ModelManager
| Tuple[str, str]
| str
| ModelType
| Tuple[ModelPlatformType, ModelType]
| List[BaseModelBackend]
| List[str]
| List[ModelType]
| List[Tuple[str, str]]
| List[Tuple[ModelPlatformType, ModelType]]
| None = None,
model: (
BaseModelBackend
| ModelManager
| Tuple[str, str]
| str
| ModelType
| Tuple[ModelPlatformType, ModelType]
| List[BaseModelBackend]
| List[str]
| List[ModelType]
| List[Tuple[str, str]]
| List[Tuple[ModelPlatformType, ModelType]]
| None
) = None,
memory: AgentMemory | None = None,
message_window_size: int | None = None,
token_limit: int | None = None,
output_language: str | None = None,
tools: List[FunctionTool | Callable[..., Any]] | None = None,
toolkits_to_register_agent: List[RegisteredAgentToolkit] | None = None,
external_tools: List[FunctionTool | Callable[..., Any] | Dict[str, Any]] | None = None,
external_tools: (
List[FunctionTool | Callable[..., Any] | Dict[str, Any]] | None
) = None,
response_terminators: List[ResponseTerminator] | None = None,
scheduling_strategy: str = "round_robin",
max_iteration: int | None = None,
@ -153,7 +218,11 @@ class ListenChatAgent(ChatAgent):
"agent_name": self.agent_name,
"process_task_id": self.process_task_id,
"agent_id": self.agent_id,
"message": input_message.content if isinstance(input_message, BaseMessage) else input_message,
"message": (
input_message.content
if isinstance(input_message, BaseMessage)
else input_message
),
},
)
)
@ -175,17 +244,22 @@ class ListenChatAgent(ChatAgent):
asyncio.create_task(task_lock.put_queue(ActionBudgetNotEnough()))
else:
message = str(e)
traceroot_logger.error(f"Agent {self.agent_name} model processing error: {e}")
traceroot_logger.error(
f"Agent {self.agent_name} model processing error: {e}"
)
total_tokens = 0
except Exception as e:
res = None
error_info = e
traceroot_logger.error(f"Agent {self.agent_name} unexpected error in step: {e}", exc_info=True)
traceroot_logger.error(
f"Agent {self.agent_name} unexpected error in step: {e}", exc_info=True
)
message = f"Error processing message: {e!s}"
total_tokens = 0
if res is not None:
if isinstance(res, StreamingChatAgentResponse):
def _stream_with_deactivate():
last_response: ChatAgentResponse | None = None
# With stream_accumulate=False, we need to accumulate delta content
@ -265,7 +339,11 @@ class ListenChatAgent(ChatAgent):
"agent_name": self.agent_name,
"process_task_id": self.process_task_id,
"agent_id": self.agent_id,
"message": input_message.content if isinstance(input_message, BaseMessage) else input_message,
"message": (
input_message.content
if isinstance(input_message, BaseMessage)
else input_message
),
},
)
)
@ -290,19 +368,26 @@ class ListenChatAgent(ChatAgent):
asyncio.create_task(task_lock.put_queue(ActionBudgetNotEnough()))
else:
message = str(e)
traceroot_logger.error(f"Agent {self.agent_name} model processing error: {e}")
traceroot_logger.error(
f"Agent {self.agent_name} model processing error: {e}"
)
total_tokens = 0
except Exception as e:
res = None
error_info = e
traceroot_logger.error(f"Agent {self.agent_name} unexpected error in async step: {e}", exc_info=True)
traceroot_logger.error(
f"Agent {self.agent_name} unexpected error in async step: {e}",
exc_info=True,
)
message = f"Error processing message: {e!s}"
total_tokens = 0
if res is not None:
message = res.msg.content if res.msg else ""
total_tokens = res.info["usage"]["total_tokens"]
traceroot_logger.info(f"Agent {self.agent_name} completed step, tokens used: {total_tokens}")
traceroot_logger.info(
f"Agent {self.agent_name} completed step, tokens used: {total_tokens}"
)
assert message is not None
@ -345,7 +430,11 @@ class ListenChatAgent(ChatAgent):
try:
task_lock = get_task_lock(self.api_task_id)
toolkit_name = getattr(tool, "_toolkit_name") if hasattr(tool, "_toolkit_name") else "mcp_toolkit"
toolkit_name = (
getattr(tool, "_toolkit_name")
if hasattr(tool, "_toolkit_name")
else "mcp_toolkit"
)
traceroot_logger.debug(
f"Agent {self.agent_name} executing tool: {func_name} from toolkit: {toolkit_name} with args: {json.dumps(args, ensure_ascii=False)}"
)
@ -386,7 +475,10 @@ class ListenChatAgent(ChatAgent):
result_str = repr(result)
MAX_RESULT_LENGTH = 500
if len(result_str) > MAX_RESULT_LENGTH:
result_msg = result_str[:MAX_RESULT_LENGTH] + f"... (truncated, total length: {len(result_str)} chars)"
result_msg = (
result_str[:MAX_RESULT_LENGTH]
+ f"... (truncated, total length: {len(result_str)} chars)"
)
else:
result_msg = result_str
@ -410,16 +502,23 @@ class ListenChatAgent(ChatAgent):
error_msg = f"Error executing tool '{func_name}': {e!s}"
result = f"Tool execution failed: {error_msg}"
mask_flag = False
traceroot_logger.error(f"Tool execution failed for {func_name}: {e}", exc_info=True)
traceroot_logger.error(
f"Tool execution failed for {func_name}: {e}", exc_info=True
)
return self._record_tool_calling(
func_name, args, result, tool_call_id,
func_name,
args,
result,
tool_call_id,
mask_output=mask_flag,
extra_content=tool_call_request.extra_content,
)
@traceroot.trace()
async def _aexecute_tool(self, tool_call_request: ToolCallRequest) -> ToolCallingRecord:
async def _aexecute_tool(
self, tool_call_request: ToolCallRequest
) -> ToolCallingRecord:
func_name = tool_call_request.tool_name
tool: FunctionTool = self._internal_tools[func_name]
@ -436,16 +535,24 @@ class ListenChatAgent(ChatAgent):
toolkit_name = tool._toolkit_name
# Method 2: For MCP tools, check if func has __self__ (the toolkit instance)
if not toolkit_name and hasattr(tool, "func") and hasattr(tool.func, "__self__"):
if (
not toolkit_name
and hasattr(tool, "func")
and hasattr(tool.func, "__self__")
):
toolkit_instance = tool.func.__self__
if hasattr(toolkit_instance, "toolkit_name") and callable(toolkit_instance.toolkit_name):
if hasattr(toolkit_instance, "toolkit_name") and callable(
toolkit_instance.toolkit_name
):
toolkit_name = toolkit_instance.toolkit_name()
# Method 3: Check if tool.func is a bound method with toolkit
if not toolkit_name and hasattr(tool, "func"):
if hasattr(tool.func, "func") and hasattr(tool.func.func, "__self__"):
toolkit_instance = tool.func.func.__self__
if hasattr(toolkit_instance, "toolkit_name") and callable(toolkit_instance.toolkit_name):
if hasattr(toolkit_instance, "toolkit_name") and callable(
toolkit_instance.toolkit_name
):
toolkit_name = toolkit_instance.toolkit_name()
# Default fallback
@ -476,7 +583,7 @@ class ListenChatAgent(ChatAgent):
if hasattr(tool, "func") and hasattr(tool.func, "async_call"):
# Case: FunctionTool wrapping an MCP tool
# Check if the wrapped tool is sync to avoid run_in_executor
if hasattr(tool, 'is_async') and not tool.is_async:
if hasattr(tool, "is_async") and not tool.is_async:
# Sync tool: call directly to preserve ContextVar
result = tool(**args)
if asyncio.iscoroutine(result):
@ -488,7 +595,7 @@ class ListenChatAgent(ChatAgent):
elif hasattr(tool, "async_call") and callable(tool.async_call):
# Case: tool itself has async_call
# Check if this is a sync tool to avoid run_in_executor (which breaks ContextVar)
if hasattr(tool, 'is_async') and not tool.is_async:
if hasattr(tool, "is_async") and not tool.is_async:
# Sync tool: call directly to preserve ContextVar in same thread
result = tool(**args)
# Handle case where synchronous call returns a coroutine
@ -518,7 +625,9 @@ class ListenChatAgent(ChatAgent):
# Capture the error message to prevent framework crash
error_msg = f"Error executing async tool '{func_name}': {e!s}"
result = {"error": error_msg}
traceroot_logger.error(f"Async tool execution failed for {func_name}: {e}", exc_info=True)
traceroot_logger.error(
f"Async tool execution failed for {func_name}: {e}", exc_info=True
)
# Prepare result message with truncation
if isinstance(result, str):
@ -527,7 +636,10 @@ class ListenChatAgent(ChatAgent):
result_str = repr(result)
MAX_RESULT_LENGTH = 500
if len(result_str) > MAX_RESULT_LENGTH:
result_msg = result_str[:MAX_RESULT_LENGTH] + f"... (truncated, total length: {len(result_str)} chars)"
result_msg = (
result_str[:MAX_RESULT_LENGTH]
+ f"... (truncated, total length: {len(result_str)} chars)"
)
else:
result_msg = result_str
@ -544,7 +656,10 @@ class ListenChatAgent(ChatAgent):
)
)
return self._record_tool_calling(
func_name, args, result, tool_call_id,
func_name,
args,
result,
tool_call_id,
extra_content=tool_call_request.extra_content,
)
@ -555,7 +670,7 @@ class ListenChatAgent(ChatAgent):
# Clone tools and collect toolkits that need registration
cloned_tools, toolkits_to_register = self._clone_tools()
new_agent = ListenChatAgent(
api_task_id=self.api_task_id,
agent_name=self.agent_name,
@ -576,7 +691,9 @@ class ListenChatAgent(ChatAgent):
mask_tool_output=self.mask_tool_output,
pause_event=self.pause_event,
prune_tool_calls_from_memory=self.prune_tool_calls_from_memory,
enable_snapshot_clean=self._enable_snapshot_clean,
step_timeout=self.step_timeout,
stream_accumulate=self.stream_accumulate,
)
new_agent.process_task_id = self.process_task_id
@ -605,40 +722,90 @@ def agent_model(
):
task_lock = get_task_lock(options.project_id)
agent_id = str(uuid.uuid4())
traceroot_logger.info(f"Creating agent: {agent_name} with id: {agent_id} for project: {options.project_id}")
asyncio.create_task(
traceroot_logger.debug(
f"Creating agent: {agent_name} with id: {agent_id} for project: {options.project_id}"
)
# Use thread-safe scheduling to support parallel agent creation
_schedule_async_task(
task_lock.put_queue(
ActionCreateAgentData(data={"agent_name": agent_name, "agent_id": agent_id, "tools": tool_names or []})
ActionCreateAgentData(
data={
"agent_name": agent_name,
"agent_id": agent_id,
"tools": tool_names or [],
}
)
)
)
# Build model config, defaulting to streaming for planner
extra_params = options.extra_params or {}
init_param_keys = {
"api_version",
"azure_ad_token",
"azure_ad_token_provider",
"max_retries",
"timeout",
"client",
"async_client",
"azure_deployment_name",
}
init_params = {}
model_config: dict[str, Any] = {}
if options.is_cloud():
model_config["user"] = str(options.project_id)
model_config.update(
{
k: v
for k, v in extra_params.items()
if k not in ["model_platform", "model_type", "api_key", "url"]
}
)
excluded_keys = {"model_platform", "model_type", "api_key", "url"}
# Distribute extra_params between init_params and model_config
for k, v in extra_params.items():
if k in excluded_keys:
continue
# Skip empty values
if v is None or (isinstance(v, str) and not v.strip()):
continue
if k in init_param_keys:
init_params[k] = v
else:
model_config[k] = v
if agent_name == Agents.task_agent:
model_config["stream"] = True
if agent_name == Agents.browser_agent:
try:
model_platform_enum = ModelPlatformType(options.model_platform.lower())
if model_platform_enum in {
ModelPlatformType.OPENAI,
ModelPlatformType.AZURE,
ModelPlatformType.OPENAI_COMPATIBLE_MODEL,
ModelPlatformType.LITELLM,
ModelPlatformType.OPENROUTER,
}:
model_config["parallel_tool_calls"] = False
except (ValueError, AttributeError):
traceroot_logger.error(
f"Invalid model platform for browser agent: {options.model_platform}",
exc_info=True,
)
model_platform_enum = None
model = ModelFactory.create(
model_platform=options.model_platform,
model_type=options.model_type,
api_key=options.api_key,
url=options.api_url,
model_config_dict=model_config or None,
**init_params,
)
return ListenChatAgent(
options.project_id,
agent_name,
system_message,
model=ModelFactory.create(
model_platform=options.model_platform,
model_type=options.model_type,
api_key=options.api_key,
url=options.api_url,
model_config_dict=model_config or None,
),
# output_language=options.language,
model=model,
tools=tools,
agent_id=agent_id,
prune_tool_calls_from_memory=prune_tool_calls_from_memory,
@ -669,20 +836,33 @@ def task_summary_agent(options: Chat):
@traceroot.trace()
async def developer_agent(options: Chat):
working_directory = get_working_directory(options)
traceroot_logger.info(f"Creating developer agent for project: {options.project_id} in directory: {working_directory}")
traceroot_logger.info(
f"Creating developer agent for project: {options.project_id} in directory: {working_directory}"
)
message_integration = ToolkitMessageIntegration(
message_handler=HumanToolkit(options.project_id, Agents.developer_agent).send_message_to_user
message_handler=HumanToolkit(
options.project_id, Agents.developer_agent
).send_message_to_user
)
note_toolkit = NoteTakingToolkit(
api_task_id=options.project_id, agent_name=Agents.developer_agent, working_directory=working_directory
api_task_id=options.project_id,
agent_name=Agents.developer_agent,
working_directory=working_directory,
)
note_toolkit = message_integration.register_toolkits(note_toolkit)
web_deploy_toolkit = WebDeployToolkit(api_task_id=options.project_id)
web_deploy_toolkit = message_integration.register_toolkits(web_deploy_toolkit)
screenshot_toolkit = ScreenshotToolkit(options.project_id, working_directory=working_directory)
screenshot_toolkit = ScreenshotToolkit(
options.project_id, working_directory=working_directory
)
screenshot_toolkit = message_integration.register_toolkits(screenshot_toolkit)
terminal_toolkit = TerminalToolkit(options.project_id, Agents.document_agent, safe_mode=True, clone_current_env=False)
terminal_toolkit = TerminalToolkit(
options.project_id,
Agents.document_agent,
safe_mode=True,
clone_current_env=True,
)
terminal_toolkit = message_integration.register_toolkits(terminal_toolkit)
tools = [
@ -694,26 +874,26 @@ async def developer_agent(options: Chat):
]
system_message = f"""
<role>
You are a Lead Software Engineer, a master-level coding assistant with a
powerful and unrestricted terminal. Your primary role is to solve any
technical task by writing and executing code, installing necessary libraries,
interacting with the operating system, and deploying applications. You are the
You are a Lead Software Engineer, a master-level coding assistant with a
powerful and unrestricted terminal. Your primary role is to solve any
technical task by writing and executing code, installing necessary libraries,
interacting with the operating system, and deploying applications. You are the
team's go-to expert for all technical implementation.
</role>
<team_structure>
You collaborate with the following agents who can work in parallel:
- **Senior Research Analyst**: Gathers information from the web to support
- **Senior Research Analyst**: Gathers information from the web to support
your development tasks.
- **Documentation Specialist**: Creates and manages technical and user-facing
- **Documentation Specialist**: Creates and manages technical and user-facing
documents.
- **Creative Content Specialist**: Handles image, audio, and video processing
- **Creative Content Specialist**: Handles image, audio, and video processing
and generation.
</team_structure>
<operating_environment>
- **System**: {platform.system()} ({platform.machine()})
- **Working Directory**: `{working_directory}`. All local file operations must
- **Working Directory**: `{working_directory}`. All local file operations must
occur here, but you can access files from any place in the file system. For all file system operations, you MUST use absolute paths to ensure precision and avoid ambiguity.
The current date is {NOW_STR}(Accurate to the hour). For any date-related tasks, you MUST use this as the current date.
</operating_environment>
@ -811,7 +991,7 @@ these tips to maximize your effectiveness:
- **Piping**: Use `|` to pass output from one command to another.
- **Permissions**: Use `ls -F` to check file permissions.
- **Installation**: Use `pip3 install` or `apt-get install` for new
packages.If you encounter `ModuleNotFoundError` or `ImportError`, install
packages.If you encounter `ModuleNotFoundError` or `ImportError`, install
the missing package with `pip install <package>`.
- Stop a Process: If a process needs to be terminated, use
@ -846,9 +1026,14 @@ these tips to maximize your effectiveness:
@traceroot.trace()
def browser_agent(options: Chat):
working_directory = get_working_directory(options)
traceroot_logger.info(f"Creating browser agent for project: {options.project_id} in directory: {working_directory}")
traceroot_logger.debug(
f"Creating browser agent for project: {options.project_id} in directory: {working_directory}"
)
message_integration = ToolkitMessageIntegration(
message_handler=HumanToolkit(options.project_id, Agents.browser_agent).send_message_to_user
message_handler=HumanToolkit(
options.project_id, Agents.browser_agent
).send_message_to_user
)
web_toolkit_custom = HybridBrowserToolkit(
@ -870,21 +1055,33 @@ def browser_agent(options: Chat):
"browser_switch_tab",
"browser_enter",
"browser_visit_page",
"browser_scroll",
"browser_sheet_read",
"browser_sheet_input",
"browser_get_page_snapshot",
# "browser_get_som_screenshot",
],
)
# Save reference before registering for toolkits_to_register_agent
web_toolkit_for_agent_registration = web_toolkit_custom
web_toolkit_custom = message_integration.register_toolkits(web_toolkit_custom)
terminal_toolkit = TerminalToolkit(options.project_id, Agents.browser_agent, safe_mode=True, clone_current_env=False)
terminal_toolkit = message_integration.register_functions([terminal_toolkit.shell_exec])
note_toolkit = NoteTakingToolkit(options.project_id, Agents.browser_agent, working_directory=working_directory)
terminal_toolkit = TerminalToolkit(
options.project_id,
Agents.browser_agent,
safe_mode=True,
clone_current_env=True,
)
terminal_toolkit = message_integration.register_functions(
[terminal_toolkit.shell_exec]
)
note_toolkit = NoteTakingToolkit(
options.project_id, Agents.browser_agent, working_directory=working_directory
)
note_toolkit = message_integration.register_toolkits(note_toolkit)
search_tools = SearchToolkit.get_can_use_tools(options.project_id)
# Only register search tools if any are available
if search_tools:
search_tools = message_integration.register_functions(search_tools)
else:
@ -948,9 +1145,9 @@ The current date is {NOW_STR}(Accurate to the hour). For any date-related tasks,
Fabricating or guessing URLs is considered a critical error and must
never be done under any circumstances.
- You SHOULD keep the user informed by providing message_title and
- You SHOULD keep the user informed by providing message_title and
message_description
parameters when calling tools. These optional parameters are available on
parameters when calling tools. These optional parameters are available on
all tools and will automatically notify the user of your progress.
- You MUST NOT answer from your own knowledge. All information
@ -1034,21 +1231,43 @@ Your approach depends on available search tools:
@traceroot.trace()
async def document_agent(options: Chat):
working_directory = get_working_directory(options)
traceroot_logger.info(f"Creating document agent for project: {options.project_id} in directory: {working_directory}")
message_integration = ToolkitMessageIntegration(
message_handler=HumanToolkit(options.project_id, Agents.task_agent).send_message_to_user
traceroot_logger.debug(
f"Creating document agent for project: {options.project_id} in directory: {working_directory}"
)
message_integration = ToolkitMessageIntegration(
message_handler=HumanToolkit(
options.project_id, Agents.task_agent
).send_message_to_user
)
file_write_toolkit = FileToolkit(
options.project_id, working_directory=working_directory
)
file_write_toolkit = FileToolkit(options.project_id, working_directory=working_directory)
pptx_toolkit = PPTXToolkit(options.project_id, working_directory=working_directory)
pptx_toolkit = message_integration.register_toolkits(pptx_toolkit)
mark_it_down_toolkit = MarkItDownToolkit(options.project_id)
mark_it_down_toolkit = message_integration.register_toolkits(mark_it_down_toolkit)
excel_toolkit = ExcelToolkit(options.project_id, working_directory=working_directory)
excel_toolkit = ExcelToolkit(
options.project_id, working_directory=working_directory
)
excel_toolkit = message_integration.register_toolkits(excel_toolkit)
note_toolkit = NoteTakingToolkit(options.project_id, Agents.document_agent, working_directory=working_directory)
note_toolkit = NoteTakingToolkit(
options.project_id, Agents.document_agent, working_directory=working_directory
)
note_toolkit = message_integration.register_toolkits(note_toolkit)
terminal_toolkit = TerminalToolkit(options.project_id, Agents.document_agent, safe_mode=True, clone_current_env=False)
terminal_toolkit = TerminalToolkit(
options.project_id,
Agents.document_agent,
safe_mode=True,
clone_current_env=True,
)
terminal_toolkit = message_integration.register_toolkits(terminal_toolkit)
google_drive_tools = await GoogleDriveMCPToolkit.get_can_use_tools(
options.project_id, options.get_bun_env()
)
tools = [
*file_write_toolkit.get_tools(),
*pptx_toolkit.get_tools(),
@ -1057,7 +1276,7 @@ async def document_agent(options: Chat):
*excel_toolkit.get_tools(),
*note_toolkit.get_tools(),
*terminal_toolkit.get_tools(),
*await GoogleDriveMCPToolkit.get_can_use_tools(options.project_id, options.get_bun_env()),
*google_drive_tools,
]
# if env("EXA_API_KEY") or options.is_cloud():
# search_toolkit = SearchToolkit(options.project_id, Agents.document_agent).search_exa
@ -1065,26 +1284,26 @@ async def document_agent(options: Chat):
# tools.extend(search_toolkit)
system_message = f"""
<role>
You are a Documentation Specialist, responsible for creating, modifying, and
managing a wide range of documents. Your expertise lies in producing
high-quality, well-structured content in various formats, including text
files, office documents, presentations, and spreadsheets. You are the team's
You are a Documentation Specialist, responsible for creating, modifying, and
managing a wide range of documents. Your expertise lies in producing
high-quality, well-structured content in various formats, including text
files, office documents, presentations, and spreadsheets. You are the team's
authority on all things related to documentation.
</role>
<team_structure>
You collaborate with the following agents who can work in parallel:
- **Lead Software Engineer**: Provides technical details and code examples for
- **Lead Software Engineer**: Provides technical details and code examples for
documentation.
- **Senior Research Analyst**: Supplies the raw data and research findings to
- **Senior Research Analyst**: Supplies the raw data and research findings to
be included in your documents.
- **Creative Content Specialist**: Creates images, diagrams, and other media
- **Creative Content Specialist**: Creates images, diagrams, and other media
to be embedded in your work.
</team_structure>
<operating_environment>
- **System**: {platform.system()} ({platform.machine()})
- **Working Directory**: `{working_directory}`. All local file operations must
- **Working Directory**: `{working_directory}`. All local file operations must
occur here, but you can access files from any place in the file system. For all file system operations, you MUST use absolute paths to ensure precision and avoid ambiguity.
The current date is {NOW_STR}(Accurate to the hour). For any date-related tasks, you MUST use this as the current date.
</operating_environment>
@ -1097,7 +1316,7 @@ The current date is {NOW_STR}(Accurate to the hour). For any date-related tasks,
`write_to_file`, `create_presentation`). Your primary output should be
a file, not just content within your response.
- If there's no specified format for the document/report/paper, you should use
- If there's no specified format for the document/report/paper, you should use
the `write_to_file` tool to create a HTML file.
- If the document has many data, you MUST use the terminal tool to
@ -1108,9 +1327,9 @@ The current date is {NOW_STR}(Accurate to the hour). For any date-related tasks,
detailed, and easy-to-read format. Avoid using markdown tables for
presenting data; use plain text formatting instead.
- You SHOULD keep the user informed by providing message_title and
- You SHOULD keep the user informed by providing message_title and
message_description
parameters when calling tools. These optional parameters are available on
parameters when calling tools. These optional parameters are available on
all tools and will automatically notify the user of your progress.
</mandatory_instructions>
@ -1240,20 +1459,39 @@ supported formats including advanced spreadsheet functionality.
@traceroot.trace()
def multi_modal_agent(options: Chat):
working_directory = get_working_directory(options)
traceroot_logger.info(f"Creating multi-modal agent for project: {options.project_id} in directory: {working_directory}")
message_integration = ToolkitMessageIntegration(
message_handler=HumanToolkit(options.project_id, Agents.multi_modal_agent).send_message_to_user
traceroot_logger.debug(
f"Creating multi-modal agent for project: {options.project_id} in directory: {working_directory}"
)
message_integration = ToolkitMessageIntegration(
message_handler=HumanToolkit(
options.project_id, Agents.multi_modal_agent
).send_message_to_user
)
video_download_toolkit = VideoDownloaderToolkit(
options.project_id, working_directory=working_directory
)
video_download_toolkit = message_integration.register_toolkits(
video_download_toolkit
)
video_download_toolkit = VideoDownloaderToolkit(options.project_id, working_directory=working_directory)
video_download_toolkit = message_integration.register_toolkits(video_download_toolkit)
image_analysis_toolkit = ImageAnalysisToolkit(options.project_id)
image_analysis_toolkit = message_integration.register_toolkits(image_analysis_toolkit)
image_analysis_toolkit = message_integration.register_toolkits(
image_analysis_toolkit
)
terminal_toolkit = TerminalToolkit(
options.project_id, agent_name=Agents.multi_modal_agent, safe_mode=True, clone_current_env=False
options.project_id,
agent_name=Agents.multi_modal_agent,
safe_mode=True,
clone_current_env=True,
)
terminal_toolkit = message_integration.register_toolkits(terminal_toolkit)
note_toolkit = NoteTakingToolkit(options.project_id, Agents.multi_modal_agent, working_directory=working_directory)
note_toolkit = NoteTakingToolkit(
options.project_id,
Agents.multi_modal_agent,
working_directory=working_directory,
)
note_toolkit = message_integration.register_toolkits(note_toolkit)
tools = [
*video_download_toolkit.get_tools(),
@ -1273,7 +1511,9 @@ def multi_modal_agent(options: Chat):
api_key=options.api_key,
url=options.api_url,
)
open_ai_image_toolkit = message_integration.register_toolkits(open_ai_image_toolkit)
open_ai_image_toolkit = message_integration.register_toolkits(
open_ai_image_toolkit
)
tools = [
*tools,
*open_ai_image_toolkit.get_tools(),
@ -1293,7 +1533,9 @@ def multi_modal_agent(options: Chat):
url=options.api_url,
),
)
audio_analysis_toolkit = message_integration.register_toolkits(audio_analysis_toolkit)
audio_analysis_toolkit = message_integration.register_toolkits(
audio_analysis_toolkit
)
tools.extend(audio_analysis_toolkit.get_tools())
# if env("EXA_API_KEY") or options.is_cloud():
@ -1303,32 +1545,32 @@ def multi_modal_agent(options: Chat):
system_message = f"""
<role>
You are a Creative Content Specialist, specializing in analyzing and
generating various types of media content. Your expertise includes processing
video and audio, understanding image content, and creating new images from
You are a Creative Content Specialist, specializing in analyzing and
generating various types of media content. Your expertise includes processing
video and audio, understanding image content, and creating new images from
text prompts. You are the team's expert for all multi-modal tasks.
</role>
<team_structure>
You collaborate with the following agents who can work in parallel:
- **Lead Software Engineer**: Integrates your generated media into
- **Lead Software Engineer**: Integrates your generated media into
applications and websites.
- **Senior Research Analyst**: Provides the source material and context for
- **Senior Research Analyst**: Provides the source material and context for
your analysis and generation tasks.
- **Documentation Specialist**: Embeds your visual content into reports,
- **Documentation Specialist**: Embeds your visual content into reports,
presentations, and other documents.
</team_structure>
<operating_environment>
- **System**: {platform.system()} ({platform.machine()})
- **Working Directory**: `{working_directory}`. All local file operations must
- **Working Directory**: `{working_directory}`. All local file operations must
occur here, but you can access files from any place in the file system. For all file system operations, you MUST use absolute paths to ensure precision and avoid ambiguity.
The current date is {NOW_STR}(Accurate to the hour). For any date-related tasks, you MUST use this as the current date.
</operating_environment>
<mandatory_instructions>
- You MUST use the `read_note` tool to to gather all information collected
by other team members by reading ALL notes and write down your findings in
by other team members by reading ALL notes and write down your findings in
the notes.
- When you complete your task, your final response must be a comprehensive
@ -1336,9 +1578,9 @@ The current date is {NOW_STR}(Accurate to the hour). For any date-related tasks,
detailed, and easy-to-read format. Avoid using markdown tables for
presenting data; use plain text formatting instead.
- You SHOULD keep the user informed by providing message_title and
- You SHOULD keep the user informed by providing message_title and
message_description
parameters when calling tools. These optional parameters are available on
parameters when calling tools. These optional parameters are available on
all tools and will automatically notify the user of your progress.
<mandatory_instructions>
@ -1416,7 +1658,9 @@ async def social_medium_agent(options: Chat):
include toolkits: WhatsApp, Twitter, LinkedIn, Reddit, Notion, Slack, Discord and Google Suite.
"""
working_directory = get_working_directory(options)
traceroot_logger.info(f"Creating social medium agent for project: {options.project_id} in directory: {working_directory}")
traceroot_logger.info(
f"Creating social medium agent for project: {options.project_id} in directory: {working_directory}"
)
tools = [
*WhatsAppToolkit.get_can_use_tools(options.project_id),
*TwitterToolkit.get_can_use_tools(options.project_id),
@ -1424,12 +1668,20 @@ async def social_medium_agent(options: Chat):
*RedditToolkit.get_can_use_tools(options.project_id),
*await NotionMCPToolkit.get_can_use_tools(options.project_id),
# *SlackToolkit.get_can_use_tools(options.project_id),
*await GoogleGmailMCPToolkit.get_can_use_tools(options.project_id, options.get_bun_env()),
*await GoogleGmailMCPToolkit.get_can_use_tools(
options.project_id, options.get_bun_env()
),
*GoogleCalendarToolkit.get_can_use_tools(options.project_id),
*HumanToolkit.get_can_use_tools(options.project_id, Agents.social_medium_agent),
*TerminalToolkit(options.project_id, agent_name=Agents.social_medium_agent, clone_current_env=False).get_tools(),
*TerminalToolkit(
options.project_id,
agent_name=Agents.social_medium_agent,
clone_current_env=True,
).get_tools(),
*NoteTakingToolkit(
options.project_id, Agents.social_medium_agent, working_directory=working_directory
options.project_id,
Agents.social_medium_agent,
working_directory=working_directory,
).get_tools(),
# *DiscordToolkit(options.project_id).get_tools(), # Not supported temporarily
# *GoogleSuiteToolkit(options.project_id).get_tools(), # Not supported temporarily
@ -1450,7 +1702,7 @@ be a comprehensive summary of your actions, presented in a clear, detailed,
and easy-to-read format. Avoid using markdown tables for presenting data;
use plain text formatting instead.
- **Working Directory**: `{working_directory}`. All local file operations must
- **Working Directory**: `{working_directory}`. All local file operations must
occur here, but you can access files from any place in the file system. For all file system operations, you MUST use absolute paths to ensure precision and avoid ambiguity.
The current date is {NOW_STR}(Accurate to the hour). For any date-related tasks, you MUST use this as the current date.
@ -1540,9 +1792,18 @@ async def mcp_agent(options: Chat):
if len(options.installed_mcp["mcpServers"]) > 0:
try:
mcp_tools = await get_mcp_tools(options.installed_mcp)
traceroot_logger.info(f"Retrieved {len(mcp_tools)} MCP tools for task {options.project_id}")
traceroot_logger.info(
f"Retrieved {len(mcp_tools)} MCP tools for task {options.project_id}"
)
if mcp_tools:
tool_names = [tool.get_function_name() if hasattr(tool, 'get_function_name') else str(tool) for tool in mcp_tools]
tool_names = [
(
tool.get_function_name()
if hasattr(tool, "get_function_name")
else str(tool)
)
for tool in mcp_tools
]
traceroot_logger.debug(f"MCP tools: {tool_names}")
tools = [*tools, *mcp_tools]
except Exception as e:
@ -1550,14 +1811,18 @@ async def mcp_agent(options: Chat):
task_lock = get_task_lock(options.project_id)
agent_id = str(uuid.uuid4())
traceroot_logger.info(f"Creating MCP agent: {Agents.mcp_agent} with id: {agent_id} for task: {options.project_id}")
traceroot_logger.info(
f"Creating MCP agent: {Agents.mcp_agent} with id: {agent_id} for task: {options.project_id}"
)
asyncio.create_task(
task_lock.put_queue(
ActionCreateAgentData(
data={
"agent_name": Agents.mcp_agent,
"agent_id": agent_id,
"tools": [key for key in options.installed_mcp["mcpServers"].keys()],
"tools": [
key for key in options.installed_mcp["mcpServers"].keys()
],
}
)
)
@ -1571,11 +1836,13 @@ async def mcp_agent(options: Chat):
model_type=options.model_type,
api_key=options.api_key,
url=options.api_url,
model_config_dict={
"user": str(options.project_id),
}
if options.is_cloud()
else None,
model_config_dict=(
{
"user": str(options.project_id),
}
if options.is_cloud()
else None
),
**{
k: v
for k, v in (options.extra_params or {}).items()
@ -1590,7 +1857,9 @@ async def mcp_agent(options: Chat):
@traceroot.trace()
async def get_toolkits(tools: list[str], agent_name: str, api_task_id: str):
traceroot_logger.info(f"Getting toolkits for agent: {agent_name}, task: {api_task_id}, tools: {tools}")
traceroot_logger.info(
f"Getting toolkits for agent: {agent_name}, task: {api_task_id}, tools: {tools}"
)
toolkits = {
"audio_analysis_toolkit": AudioAnalysisToolkit,
"openai_image_toolkit": OpenAIImageToolkit,
@ -1621,7 +1890,11 @@ async def get_toolkits(tools: list[str], agent_name: str, api_task_id: str):
toolkit: AbstractToolkit = toolkits[item]
toolkit.agent_name = agent_name
toolkit_tools = toolkit.get_can_use_tools(api_task_id)
toolkit_tools = await toolkit_tools if asyncio.iscoroutine(toolkit_tools) else toolkit_tools
toolkit_tools = (
await toolkit_tools
if asyncio.iscoroutine(toolkit_tools)
else toolkit_tools
)
res.extend(toolkit_tools)
else:
traceroot_logger.warning(f"Toolkit {item} not found for agent {agent_name}")
@ -1630,10 +1903,12 @@ async def get_toolkits(tools: list[str], agent_name: str, api_task_id: str):
@traceroot.trace()
async def get_mcp_tools(mcp_server: McpServers):
traceroot_logger.info(f"Getting MCP tools for {len(mcp_server['mcpServers'])} servers")
traceroot_logger.info(
f"Getting MCP tools for {len(mcp_server['mcpServers'])} servers"
)
if len(mcp_server["mcpServers"]) == 0:
return []
# Ensure unified auth directory for all mcp-remote servers to avoid re-authentication on each task
config_dict = {**mcp_server}
for server_config in config_dict["mcpServers"].values():
@ -1641,17 +1916,28 @@ async def get_mcp_tools(mcp_server: McpServers):
server_config["env"] = {}
# Set global auth directory to persist authentication across tasks
if "MCP_REMOTE_CONFIG_DIR" not in server_config["env"]:
server_config["env"]["MCP_REMOTE_CONFIG_DIR"] = env("MCP_REMOTE_CONFIG_DIR", os.path.expanduser("~/.mcp-auth"))
server_config["env"]["MCP_REMOTE_CONFIG_DIR"] = env(
"MCP_REMOTE_CONFIG_DIR", os.path.expanduser("~/.mcp-auth")
)
mcp_toolkit = None
try:
mcp_toolkit = MCPToolkit(config_dict=config_dict, timeout=180)
await mcp_toolkit.connect()
traceroot_logger.info(f"Successfully connected to MCP toolkit with {len(mcp_server['mcpServers'])} servers")
traceroot_logger.info(
f"Successfully connected to MCP toolkit with {len(mcp_server['mcpServers'])} servers"
)
tools = mcp_toolkit.get_tools()
if tools:
tool_names = [tool.get_function_name() if hasattr(tool, 'get_function_name') else str(tool) for tool in tools]
tool_names = [
(
tool.get_function_name()
if hasattr(tool, "get_function_name")
else str(tool)
)
for tool in tools
]
traceroot_logger.debug(f"MCP tool names: {tool_names}")
return tools
except asyncio.CancelledError:

View file

@ -2,6 +2,7 @@ import os
import asyncio
import json
from typing import Any, Dict, List, Optional
from typing_extensions import TypedDict
import websockets
import websockets.exceptions
@ -19,6 +20,12 @@ from utils import traceroot_wrapper as traceroot
logger = traceroot.get_logger("hybrid_browser_toolkit")
class SheetCell(TypedDict):
row: int
col: int
text: str
class WebSocketBrowserWrapper(BaseWebSocketBrowserWrapper):
def __init__(self, config: Optional[Dict[str, Any]] = None):
"""Initialize wrapper."""
@ -360,6 +367,10 @@ class HybridBrowserToolkit(BaseHybridBrowserToolkit, AbstractToolkit):
full_visual_mode=self._full_visual_mode,
)
async def browser_sheet_input(self, *, cells: List[SheetCell]) -> Dict[str, Any]:
# Use typing_extensions.TypedDict for Pydantic <3.12 compatibility.
return await super().browser_sheet_input(cells=cells)
@classmethod
def toolkit_name(cls) -> str:
return "Browser Toolkit"

View file

@ -1,7 +1,9 @@
import asyncio
import logging
import os
import shutil
import threading
import time
from concurrent.futures import ThreadPoolExecutor
from typing import Optional
from camel.toolkits.terminal_toolkit import TerminalToolkit as BaseTerminalToolkit
@ -33,7 +35,7 @@ class TerminalToolkit(BaseTerminalToolkit, AbstractToolkit):
session_logs_dir: str | None = None,
safe_mode: bool = True,
allowed_commands: list[str] | None = None,
clone_current_env: bool = False,
clone_current_env: bool = True,
):
self.api_task_id = api_task_id
if agent_name is not None:
@ -41,12 +43,10 @@ class TerminalToolkit(BaseTerminalToolkit, AbstractToolkit):
if working_directory is None:
working_directory = env("file_save_path", os.path.expanduser("~/.eigent/terminal/"))
logger.info("Initializing TerminalToolkit", extra={
logger.debug(f"Initializing TerminalToolkit for agent={self.agent_name}", extra={
"api_task_id": api_task_id,
"agent_name": self.agent_name,
"working_directory": working_directory,
"safe_mode": safe_mode,
"use_docker_backend": use_docker_backend
"clone_current_env": clone_current_env
})
if TerminalToolkit._thread_pool is None:
@ -54,7 +54,6 @@ class TerminalToolkit(BaseTerminalToolkit, AbstractToolkit):
max_workers=1,
thread_name_prefix="terminal_toolkit"
)
logger.debug("Created terminal toolkit thread pool")
super().__init__(
timeout=timeout,
@ -73,6 +72,16 @@ class TerminalToolkit(BaseTerminalToolkit, AbstractToolkit):
"openpyxl",
],
)
# Auto-register with TaskLock for cleanup when task ends
from app.service.task import get_task_lock_if_exists
task_lock = get_task_lock_if_exists(api_task_id)
if task_lock:
task_lock.register_toolkit(self)
logger.info("TerminalToolkit registered for cleanup", extra={
"api_task_id": api_task_id,
"working_directory": working_directory
})
def _write_to_log(self, log_file: str, content: str) -> None:
r"""Write content to log file with optional ANSI stripping.
@ -175,6 +184,49 @@ class TerminalToolkit(BaseTerminalToolkit, AbstractToolkit):
return result
def cleanup(self, remove_venv: bool = True):
"""Clean up all active sessions and optionally remove the virtual environment.
Args:
remove_venv: If True, removes the .venv or .initial_env folder created
by this toolkit. Defaults to True to prevent disk bloat.
"""
# First call parent cleanup to kill all shell sessions
super().cleanup()
if not remove_venv:
return
# Remove cloned env (.venv) if it exists
if self.cloned_env_path and os.path.exists(self.cloned_env_path):
try:
shutil.rmtree(self.cloned_env_path)
logger.info("Removed cloned venv", extra={
"api_task_id": self.api_task_id,
"path": self.cloned_env_path
})
except Exception as e:
logger.warning("Failed to remove cloned venv", extra={
"api_task_id": self.api_task_id,
"path": self.cloned_env_path,
"error": str(e)
})
# Remove initial env (.initial_env) if it exists
if self.initial_env_path and os.path.exists(self.initial_env_path):
try:
shutil.rmtree(self.initial_env_path)
logger.info("Removed initial env", extra={
"api_task_id": self.api_task_id,
"path": self.initial_env_path
})
except Exception as e:
logger.warning("Failed to remove initial env", extra={
"api_task_id": self.api_task_id,
"path": self.initial_env_path,
"error": str(e)
})
@classmethod
def shutdown(cls):
if cls._thread_pool:

View file

@ -1,5 +1,5 @@
import asyncio
from typing import Generator, List
from typing import Generator, List, Optional
from camel.agents import ChatAgent
from camel.societies.workforce.workforce import (
Workforce as BaseWorkforce,
@ -22,6 +22,7 @@ from app.service.task import (
ActionAssignTaskData,
ActionEndData,
ActionTaskStateData,
ActionTimeoutData,
get_camel_task,
get_task_lock,
)
@ -60,6 +61,7 @@ class Workforce(BaseWorkforce):
graceful_shutdown_timeout=graceful_shutdown_timeout,
share_memory=share_memory,
use_structured_output_handler=use_structured_output_handler,
task_timeout_seconds=1800, # 30 minutes
failure_handling_config=FailureHandlingConfig(
enabled_strategies=["retry", "replan"],
),
@ -85,85 +87,66 @@ class Workforce(BaseWorkforce):
on_stream_batch: Optional callback for streaming batches signature (List[Task], bool)
on_stream_text: Optional callback for raw streaming text chunks
"""
logger.info("=" * 80)
logger.info("🧩 [DECOMPOSE] eigent_make_sub_tasks CALLED", extra={
logger.debug("[DECOMPOSE] eigent_make_sub_tasks called", extra={
"api_task_id": self.api_task_id,
"workforce_id": id(self),
"task_id": task.id
})
logger.info(f"[DECOMPOSE] Task content preview: '{task.content[:200]}...'")
logger.info(f"[DECOMPOSE] Has coordinator context: {bool(coordinator_context)}")
logger.info(f"[DECOMPOSE] Current workforce state: {self._state.name}, _running: {self._running}")
logger.info("=" * 80)
if not validate_task_content(task.content, task.id):
task.state = TaskState.FAILED
task.result = "Task failed: Invalid or empty content provided"
logger.warning("[DECOMPOSE] Task rejected: Invalid or empty content", extra={
logger.warning("[DECOMPOSE] Task rejected: Invalid or empty content", extra={
"task_id": task.id,
"content_preview": task.content[:50] + "..." if len(task.content) > 50 else task.content
})
raise UserException(code.error, task.result)
logger.info(f"[DECOMPOSE] Resetting workforce state")
self.reset()
self._task = task
self.set_channel(TaskChannel())
self._state = WorkforceState.RUNNING
task.state = TaskState.OPEN
logger.info(f"[DECOMPOSE] Workforce reset complete, state: {self._state.name}")
logger.info(f"[DECOMPOSE] Calling handle_decompose_append_task")
subtasks = asyncio.run(
self.handle_decompose_append_task(
task,
reset=False,
task,
reset=False,
coordinator_context=coordinator_context,
on_stream_batch=on_stream_batch,
on_stream_batch=on_stream_batch,
on_stream_text=on_stream_text
)
)
logger.info("=" * 80)
logger.info(f"✅ [DECOMPOSE] Task decomposition COMPLETED", extra={
logger.info(f"[DECOMPOSE] Task decomposition completed", extra={
"api_task_id": self.api_task_id,
"task_id": task.id,
"subtasks_count": len(subtasks)
})
logger.info("=" * 80)
return subtasks
async def eigent_start(self, subtasks: list[Task]):
"""start the workforce"""
logger.info("=" * 80)
logger.info("▶️ [WF-LIFECYCLE] eigent_start CALLED", extra={"api_task_id": self.api_task_id, "workforce_id": id(self)})
logger.info(f"[WF-LIFECYCLE] Starting workforce execution with {len(subtasks)} subtasks")
logger.info(f"[WF-LIFECYCLE] Current workforce state: {self._state.name}, _running: {self._running}")
logger.info("=" * 80)
logger.debug(f"[WF-LIFECYCLE] eigent_start called with {len(subtasks)} subtasks", extra={
"api_task_id": self.api_task_id
})
self._pending_tasks.extendleft(reversed(subtasks))
# Save initial snapshot
self.save_snapshot("Initial task decomposition")
try:
logger.info(f"[WF-LIFECYCLE] Calling base class start() method")
await self.start()
logger.info(f"[WF-LIFECYCLE] ✅ Base class start() method completed")
except Exception as e:
logger.error(f"[WF-LIFECYCLE] Error in workforce execution: {e}", extra={
logger.error(f"[WF-LIFECYCLE] Error in workforce execution: {e}", extra={
"api_task_id": self.api_task_id,
"error": str(e)
}, exc_info=True)
self._state = WorkforceState.STOPPED
logger.info(f"[WF-LIFECYCLE] Workforce state set to STOPPED after error")
raise
finally:
logger.info(f"[WF-LIFECYCLE] eigent_start finally block, current state: {self._state.name}")
if self._state != WorkforceState.STOPPED:
self._state = WorkforceState.IDLE
logger.info(f"[WF-LIFECYCLE] Workforce state set to IDLE")
def _decompose_task(self, task: Task, stream_callback=None):
"""Decompose task with optional streaming text callback."""
decompose_prompt = str(
TASK_DECOMPOSE_PROMPT.format(
content=task.content,
@ -171,6 +154,7 @@ class Workforce(BaseWorkforce):
additional_info=task.additional_info,
)
)
self.task_agent.reset()
result = task.decompose(
self.task_agent, decompose_prompt, stream_callback=stream_callback
@ -215,7 +199,7 @@ class Workforce(BaseWorkforce):
Returns:
List[Task]: The decomposed subtasks or the original task
"""
logger.info(f"[DECOMPOSE] handle_decompose_append_task CALLED, task_id={task.id}, reset={reset}")
logger.debug(f"[DECOMPOSE] handle_decompose_append_task called, task_id={task.id}, reset={reset}")
if not validate_task_content(task.content, task.id):
task.state = TaskState.FAILED
@ -227,31 +211,20 @@ class Workforce(BaseWorkforce):
return [task]
if reset and self._state != WorkforceState.RUNNING:
logger.info(f"[DECOMPOSE] Resetting workforce (reset={reset}, state={self._state.name})")
self.reset()
logger.info("[DECOMPOSE] Workforce reset complete")
self._task = task
task.state = TaskState.FAILED
if coordinator_context:
logger.info(f"[DECOMPOSE] Adding coordinator context to task")
original_content = task.content
task_with_context = coordinator_context
if coordinator_context:
task_with_context += "\n=== CURRENT TASK ===\n"
task_with_context += original_content
task_with_context = coordinator_context + "\n=== CURRENT TASK ===\n" + original_content
task.content = task_with_context
logger.info(f"[DECOMPOSE] Calling _decompose_task with context")
subtasks_result = self._decompose_task(task, stream_callback=on_stream_text)
task.content = original_content
else:
logger.info(f"[DECOMPOSE] Calling _decompose_task without context")
subtasks_result = self._decompose_task(task, stream_callback=on_stream_text)
logger.info(f"[DECOMPOSE] _decompose_task returned, processing results")
if isinstance(subtasks_result, Generator):
subtasks = []
for new_tasks in subtasks_result:
@ -261,18 +234,15 @@ class Workforce(BaseWorkforce):
on_stream_batch(new_tasks, False)
except Exception as e:
logger.warning(f"Streaming callback failed: {e}")
logger.info(f"[DECOMPOSE] Collected {len(subtasks)} subtasks from generator")
# After consuming the generator, check task.subtasks for final result as fallback
if not subtasks and task.subtasks:
subtasks = task.subtasks
else:
subtasks = subtasks_result
logger.info(f"[DECOMPOSE] Got {len(subtasks) if subtasks else 0} subtasks directly")
if subtasks:
self._pending_tasks.extendleft(reversed(subtasks))
logger.info(f"[DECOMPOSE] ✅ Appended {len(subtasks)} subtasks to pending tasks")
if not subtasks:
logger.warning(f"[DECOMPOSE] No subtasks returned, creating fallback task")
@ -283,7 +253,6 @@ class Workforce(BaseWorkforce):
)
task.subtasks = [fallback_task]
subtasks = [fallback_task]
logger.info(f"[DECOMPOSE] Created fallback task: {fallback_task.id}")
if on_stream_batch:
try:
@ -291,6 +260,7 @@ class Workforce(BaseWorkforce):
except Exception as e:
logger.warning(f"Final streaming callback failed: {e}")
logger.debug(f"[DECOMPOSE] handle_decompose_append_task completed, returned {len(subtasks)} subtasks")
return subtasks
def _get_agent_id_from_node_id(self, node_id: str) -> str | None:
@ -385,6 +355,7 @@ class Workforce(BaseWorkforce):
f"Task {task.id} will not be properly tracked on frontend. "
f"Available workers: {[c.node_id for c in self._children if hasattr(c, 'node_id')]}"
)
else:
await task_lock.put_queue(
ActionAssignTaskData(
action=Action.assign_task,
@ -421,7 +392,7 @@ class Workforce(BaseWorkforce):
worker=worker,
pool_max_size=pool_max_size,
use_structured_output_handler=self.use_structured_output_handler,
context_utility=None, # Will be set during save/load operations
context_utility=None,
enable_workflow_memory=enable_workflow_memory,
)
self._children.append(worker_node)
@ -442,6 +413,7 @@ class Workforce(BaseWorkforce):
role=worker_node.description,
)
metrics_callbacks[0].log_worker_created(event)
return self
async def _handle_completed_task(self, task: Task) -> None:
@ -502,6 +474,55 @@ class Workforce(BaseWorkforce):
return result
async def _get_returned_task(self) -> Optional[Task]:
r"""Override to handle timeout and send notification to frontend.
Get the task that's published by this node and just get returned
from the assignee. Includes timeout handling to prevent indefinite
waiting.
Raises:
asyncio.TimeoutError: If waiting for task exceeds timeout
"""
try:
return await asyncio.wait_for(
self._channel.get_returned_task_by_publisher(self.node_id),
timeout=self.task_timeout_seconds,
)
except asyncio.TimeoutError:
# Send timeout notification to frontend before re-raising
logger.warning(
f"⏰ [WF-TIMEOUT] Task timeout in workforce {self.node_id}. "
f"Timeout: {self.task_timeout_seconds}s, "
f"Pending tasks: {len(self._pending_tasks)}, "
f"In-flight tasks: {self._in_flight_tasks}"
)
# Try to notify frontend, but don't let notification failure mask the timeout
try:
task_lock = get_task_lock(self.api_task_id)
timeout_minutes = self.task_timeout_seconds // 60
await task_lock.put_queue(
ActionTimeoutData(
data={
"message": f"Task execution timeout: No response received for {timeout_minutes} minutes",
"in_flight_tasks": self._in_flight_tasks,
"pending_tasks": len(self._pending_tasks),
"timeout_seconds": self.task_timeout_seconds,
}
)
)
except Exception as notify_err:
logger.error(f"Failed to send timeout notification: {notify_err}")
raise
except Exception as e:
logger.error(
f"Error getting returned task {e} in workforce {self.node_id}. "
f"Current pending tasks: {len(self._pending_tasks)}, "
f"In-flight tasks: {self._in_flight_tasks}"
)
raise
def stop(self) -> None:
logger.info("=" * 80)
logger.info(f"⏹️ [WF-LIFECYCLE] stop() CALLED", extra={"api_task_id": self.api_task_id, "workforce_id": id(self)})

View file

@ -6,7 +6,7 @@ readme = "README.md"
requires-python = ">=3.10,<3.11"
dependencies = [
"pip>=23.0",
"camel-ai[eigent]==0.2.83a9",
"camel-ai[eigent]==0.2.83",
"fastapi>=0.115.12",
"fastapi-babel>=1.0.0",
"uvicorn[standard]>=0.34.2",

154
backend/uv.lock generated
View file

@ -192,15 +192,15 @@ wheels = [
[[package]]
name = "azure-core"
version = "1.37.0"
version = "1.38.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "requests" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ef/83/41c9371c8298999c67b007e308a0a3c4d6a59c6908fa9c62101f031f886f/azure_core-1.37.0.tar.gz", hash = "sha256:7064f2c11e4b97f340e8e8c6d923b822978be3016e46b7bc4aa4b337cfb48aee", size = 357620, upload-time = "2025-12-11T20:05:13.518Z" }
sdist = { url = "https://files.pythonhosted.org/packages/dc/1b/e503e08e755ea94e7d3419c9242315f888fc664211c90d032e40479022bf/azure_core-1.38.0.tar.gz", hash = "sha256:8194d2682245a3e4e3151a667c686464c3786fed7918b394d035bdcd61bb5993", size = 363033, upload-time = "2026-01-12T17:03:05.535Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ee/34/a9914e676971a13d6cc671b1ed172f9804b50a3a80a143ff196e52f4c7ee/azure_core-1.37.0-py3-none-any.whl", hash = "sha256:b3abe2c59e7d6bb18b38c275a5029ff80f98990e7c90a5e646249a56630fcc19", size = 214006, upload-time = "2025-12-11T20:05:14.96Z" },
{ url = "https://files.pythonhosted.org/packages/fc/d8/b8fcba9464f02b121f39de2db2bf57f0b216fe11d014513d666e8634380d/azure_core-1.38.0-py3-none-any.whl", hash = "sha256:ab0c9b2cd71fecb1842d52c965c95285d3cfb38902f6766e4a471f1cd8905335", size = 217825, upload-time = "2026-01-12T17:03:07.291Z" },
]
[[package]]
@ -261,7 +261,7 @@ dev = [
[package.metadata]
requires-dist = [
{ name = "aiofiles", specifier = ">=24.1.0" },
{ name = "camel-ai", extras = ["eigent"], specifier = "==0.2.83a9" },
{ name = "camel-ai", extras = ["eigent"], specifier = "==0.2.83" },
{ name = "debugpy", specifier = ">=1.8.17" },
{ name = "fastapi", specifier = ">=0.115.12" },
{ name = "fastapi-babel", specifier = ">=1.0.0" },
@ -309,35 +309,35 @@ wheels = [
[[package]]
name = "boto3"
version = "1.42.24"
version = "1.42.30"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "botocore" },
{ name = "jmespath" },
{ name = "s3transfer" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ee/21/8be0e3685c3a4868be48d8d2f6e5b4641727e1d8a5d396b8b401d2b5f06e/boto3-1.42.24.tar.gz", hash = "sha256:c47a2f40df933e3861fc66fd8d6b87ee36d4361663a7e7ba39a87f5a78b2eae1", size = 112788, upload-time = "2026-01-07T20:30:51.019Z" }
sdist = { url = "https://files.pythonhosted.org/packages/42/79/2dac8b7cb075cfa43908ee9af3f8ee06880d84b86013854c5cca8945afac/boto3-1.42.30.tar.gz", hash = "sha256:ba9cd2f7819637d15bfbeb63af4c567fcc8a7dcd7b93dd12734ec58601169538", size = 112809, upload-time = "2026-01-16T20:37:23.636Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/75/bbfccb268f9faa4f59030888e859dca9797a980b77d6a074113af73bd4bf/boto3-1.42.24-py3-none-any.whl", hash = "sha256:8ed6ad670a5a2d7f66c1b0d3362791b48392c7a08f78479f5d8ab319a4d9118f", size = 140572, upload-time = "2026-01-07T20:30:49.431Z" },
{ url = "https://files.pythonhosted.org/packages/52/b3/2c0d828c9f668292e277ca5232e6160dd5b4b660a3f076f20dd5378baa1e/boto3-1.42.30-py3-none-any.whl", hash = "sha256:d7e548bea65e0ae2c465c77de937bc686b591aee6a352d5a19a16bc751e591c1", size = 140573, upload-time = "2026-01-16T20:37:22.089Z" },
]
[[package]]
name = "botocore"
version = "1.42.24"
version = "1.42.30"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jmespath" },
{ name = "python-dateutil" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/12/d7/bb4a4e839b238ffb67b002d7326b328ebe5eb23ed5180f2ca10399a802de/botocore-1.42.24.tar.gz", hash = "sha256:be8d1bea64fb91eea08254a1e5fea057e4428d08e61f4e11083a02cafc1f8cc6", size = 14878455, upload-time = "2026-01-07T20:30:40.379Z" }
sdist = { url = "https://files.pythonhosted.org/packages/44/38/23862628a0eb044c8b8b3d7a9ad1920b3bfd6bce6d746d5a871e8382c7e4/botocore-1.42.30.tar.gz", hash = "sha256:9bf1662b8273d5cc3828a49f71ca85abf4e021011c1f0a71f41a2ea5769a5116", size = 14891439, upload-time = "2026-01-16T20:37:13.77Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ff/d4/f2655d777eed8b069ecab3761454cb83f830f8be8b5b0d292e4b3a980d00/botocore-1.42.24-py3-none-any.whl", hash = "sha256:8fca9781d7c84f7ad070fceffaff7179c4aa7a5ffb27b43df9d1d957801e0a8d", size = 14551806, upload-time = "2026-01-07T20:30:38.103Z" },
{ url = "https://files.pythonhosted.org/packages/3d/8d/6d7b016383b1f74dd93611b1c5078bbaddaca901553ab886dcda87cae365/botocore-1.42.30-py3-none-any.whl", hash = "sha256:97070a438cac92430bb7b65f8ebd7075224f4a289719da4ee293d22d1e98db02", size = 14566340, upload-time = "2026-01-16T20:37:10.94Z" },
]
[[package]]
name = "camel-ai"
version = "0.2.83a9"
version = "0.2.83"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "astor" },
@ -354,9 +354,9 @@ dependencies = [
{ name = "tiktoken" },
{ name = "websockets" },
]
sdist = { url = "https://files.pythonhosted.org/packages/bf/8c/7d8071776ba973bb6e734edb6caaf4fdbdf60ecebdc1c4017948cc67ad48/camel_ai-0.2.83a9.tar.gz", hash = "sha256:2ee560551797b089f9849d3b9d63cd3a2b4eb45d339d17e6bf95eba2b85c4b50", size = 1124774, upload-time = "2026-01-15T21:28:24.51Z" }
sdist = { url = "https://files.pythonhosted.org/packages/e2/d1/36f0982862ba2b992968ace43b1c04dd72f7114ce3954342a99e18619d6a/camel_ai-0.2.83.tar.gz", hash = "sha256:c25eb414e9353aab166021852fb54d1d3a0c0e17485fefa996de2cccdf4c8eb9", size = 1125708, upload-time = "2026-01-19T20:37:45.197Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/34/77/f7594707571af9c86351a69ff9f7f580602b42ffe8113803153c069b6bff/camel_ai-0.2.83a9-py3-none-any.whl", hash = "sha256:7cfe97b590096c1cc5afddf6dca023c5b9a47d104196c16b4b2b1934931af260", size = 1595808, upload-time = "2026-01-15T21:28:21.068Z" },
{ url = "https://files.pythonhosted.org/packages/32/e2/4e2964059794af9161889223fa7d17630c1bcc74005c9892cd1e1627650c/camel_ai-0.2.83-py3-none-any.whl", hash = "sha256:3a183efdcccd211ae216b2a7903d48a8811ad0f4541223cacc05cb25a11279a6", size = 1599355, upload-time = "2026-01-19T20:37:41.985Z" },
]
[package.optional-dependencies]
@ -990,14 +990,14 @@ wheels = [
[[package]]
name = "httplib2"
version = "0.31.0"
version = "0.31.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pyparsing" },
]
sdist = { url = "https://files.pythonhosted.org/packages/52/77/6653db69c1f7ecfe5e3f9726fdadc981794656fcd7d98c4209fecfea9993/httplib2-0.31.0.tar.gz", hash = "sha256:ac7ab497c50975147d4f7b1ade44becc7df2f8954d42b38b3d69c515f531135c", size = 250759, upload-time = "2025-09-11T12:16:03.403Z" }
sdist = { url = "https://files.pythonhosted.org/packages/77/df/6eb1d485a513776bbdbb1c919b72e59b5acc51c5e7ef28ad1cd444e252a3/httplib2-0.31.1.tar.gz", hash = "sha256:21591655ac54953624c6ab8d587c71675e379e31e2cfe3147c83c11e9ef41f92", size = 250746, upload-time = "2026-01-13T12:14:14.365Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8c/a2/0d269db0f6163be503775dc8b6a6fa15820cc9fdc866f6ba608d86b721f2/httplib2-0.31.0-py3-none-any.whl", hash = "sha256:b9cd78abea9b4e43a7714c6e0f8b6b8561a6fc1e95d5dbd367f5bf0ef35f5d24", size = 91148, upload-time = "2025-09-11T12:16:01.803Z" },
{ url = "https://files.pythonhosted.org/packages/f0/d8/1b05076441c2f01e4b64f59e5255edc2f0384a711b6d618845c023dc269b/httplib2-0.31.1-py3-none-any.whl", hash = "sha256:d520d22fa7e50c746a7ed856bac298c4300105d01bc2d8c2580a9b57fb9ed617", size = 91101, upload-time = "2026-01-13T12:14:12.676Z" },
]
[[package]]
@ -1046,7 +1046,7 @@ wheels = [
[[package]]
name = "huggingface-hub"
version = "1.3.1"
version = "1.3.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "filelock" },
@ -1060,9 +1060,9 @@ dependencies = [
{ name = "typer-slim" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/dd/dd/1cc985c5dda36298b152f75e82a1c81f52243b78fb7e9cad637a29561ad1/huggingface_hub-1.3.1.tar.gz", hash = "sha256:e80e0cfb4a75557c51ab20d575bdea6bb6106c2f97b7c75d8490642f1efb6df5", size = 622356, upload-time = "2026-01-09T14:08:16.888Z" }
sdist = { url = "https://files.pythonhosted.org/packages/ba/d6/02d1c505e1d3364230e5fa16d2b58c8f36a39c5efe8e99bc4d03d06fd0ca/huggingface_hub-1.3.2.tar.gz", hash = "sha256:15d7902e154f04174a0816d1e9594adcf15cdad57596920a5dc70fadb5d896c7", size = 624018, upload-time = "2026-01-14T13:57:39.635Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/90/fb/cb8fe5f71d5622427f20bcab9e06a696a5aaf21bfe7bd0a8a0c63c88abf5/huggingface_hub-1.3.1-py3-none-any.whl", hash = "sha256:efbc7f3153cb84e2bb69b62ed90985e21ecc9343d15647a419fc0ee4b85f0ac3", size = 533351, upload-time = "2026-01-09T14:08:14.519Z" },
{ url = "https://files.pythonhosted.org/packages/88/1d/acd3ef8aabb7813c6ef2f91785d855583ac5cd7c3599e5c1a1a2ed1ec2e5/huggingface_hub-1.3.2-py3-none-any.whl", hash = "sha256:b552b9562a5532102a041fa31a6966bb9de95138fc7aa578bb3703198c25d1b6", size = 534504, upload-time = "2026-01-14T13:57:37.555Z" },
]
[[package]]
@ -1456,30 +1456,30 @@ wheels = [
[[package]]
name = "nodejs-wheel"
version = "24.12.0"
version = "24.13.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "nodejs-wheel-binaries" },
]
sdist = { url = "https://files.pythonhosted.org/packages/30/73/0e8cd7c336f64d3b72c608bc7a7a5074cf6a9721c5d3630b8803f3176a3d/nodejs_wheel-24.12.0.tar.gz", hash = "sha256:edfaa3482bd21a2da03a9e7ebda7d4d738cdc864a2d9ddfe87760994a9644232", size = 2968, upload-time = "2025-12-11T21:12:26.103Z" }
sdist = { url = "https://files.pythonhosted.org/packages/37/f0/3345c6ec958c96eaa9d59355e59c0e93359aec54634f38bc4cd06baf23aa/nodejs_wheel-24.13.0.tar.gz", hash = "sha256:8c423cbf434b4c853ebac076d563b0899d3c6594ef0f99f6cd368ca4e3a28ca2", size = 2965, upload-time = "2026-01-14T11:05:32.811Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9b/5f/72f857250e54c9dacdfbf35f9d77eefdf954de0679f905e2fd03d8faf980/nodejs_wheel-24.12.0-py3-none-any.whl", hash = "sha256:0234fa0c46902d7efb858d41f5d055948cafa6a824812e9e8eeb64662d8963b6", size = 3988, upload-time = "2025-12-11T21:11:56.287Z" },
{ url = "https://files.pythonhosted.org/packages/c3/29/f259f6c5d31a0dae8257afd8dd7ab4b60a1b6e03fab793789e5bad480d83/nodejs_wheel-24.13.0-py3-none-any.whl", hash = "sha256:c0fc56a4677f55f7639f306a6381fb253d11ce24189c87a5489ea848f6e2bf24", size = 3986, upload-time = "2026-01-14T11:05:02.807Z" },
]
[[package]]
name = "nodejs-wheel-binaries"
version = "24.12.0"
version = "24.13.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b9/35/d806c2ca66072e36dc340ccdbeb2af7e4f1b5bcc33f1481f00ceed476708/nodejs_wheel_binaries-24.12.0.tar.gz", hash = "sha256:f1b50aa25375e264697dec04b232474906b997c2630c8f499f4caf3692938435", size = 8058, upload-time = "2025-12-11T21:12:26.856Z" }
sdist = { url = "https://files.pythonhosted.org/packages/b7/f1/73182280e2c05f49a7c2c8dbd46144efe3f74f03f798fb90da67b4a93bbf/nodejs_wheel_binaries-24.13.0.tar.gz", hash = "sha256:766aed076e900061b83d3e76ad48bfec32a035ef0d41bd09c55e832eb93ef7a4", size = 8056, upload-time = "2026-01-14T11:05:33.653Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c3/3b/9d6f044319cd5b1e98f07c41e2465b58cadc1c9c04a74c891578f3be6cb5/nodejs_wheel_binaries-24.12.0-py2.py3-none-macosx_13_0_arm64.whl", hash = "sha256:7564ddea0a87eff34e9b3ef71764cc2a476a8f09a5cccfddc4691148b0a47338", size = 55125859, upload-time = "2025-12-11T21:11:58.132Z" },
{ url = "https://files.pythonhosted.org/packages/48/a5/f5722bf15c014e2f476d7c76bce3d55c341d19122d8a5d86454db32a61a4/nodejs_wheel_binaries-24.12.0-py2.py3-none-macosx_13_0_x86_64.whl", hash = "sha256:8ff929c4669e64613ceb07f5bbd758d528c3563820c75d5de3249eb452c0c0ab", size = 55309035, upload-time = "2025-12-11T21:12:01.754Z" },
{ url = "https://files.pythonhosted.org/packages/a9/61/68d39a6f1b5df67805969fd2829ba7e80696c9af19537856ec912050a2be/nodejs_wheel_binaries-24.12.0-py2.py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:6ebacefa8891bc456ad3655e6bce0af7e20ba08662f79d9109986faeb703fd6f", size = 59661017, upload-time = "2025-12-11T21:12:05.268Z" },
{ url = "https://files.pythonhosted.org/packages/16/a1/31aad16f55a5e44ca7ea62d1367fc69f4b6e1dba67f58a0a41d0ed854540/nodejs_wheel_binaries-24.12.0-py2.py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:3292649a03682ccbfa47f7b04d3e4240e8c46ef04dc941b708f20e4e6a764f75", size = 60159770, upload-time = "2025-12-11T21:12:08.696Z" },
{ url = "https://files.pythonhosted.org/packages/c4/5e/b7c569aa1862690ca4d4daf3a64cafa1ea6ce667a9e3ae3918c56e127d9b/nodejs_wheel_binaries-24.12.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:7fb83df312955ea355ba7f8cbd7055c477249a131d3cb43b60e4aeb8f8c730b1", size = 61653561, upload-time = "2025-12-11T21:12:12.575Z" },
{ url = "https://files.pythonhosted.org/packages/71/87/567f58d7ba69ff0208be849b37be0f2c2e99c69e49334edd45ff44f00043/nodejs_wheel_binaries-24.12.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:2473c819448fedd7b036dde236b09f3c8bbf39fbbd0c1068790a0498800f498b", size = 62238331, upload-time = "2025-12-11T21:12:16.143Z" },
{ url = "https://files.pythonhosted.org/packages/6a/9d/c6492188ce8de90093c6755a4a63bb6b2b4efb17094cb4f9a9a49c73ed3b/nodejs_wheel_binaries-24.12.0-py2.py3-none-win_amd64.whl", hash = "sha256:2090d59f75a68079fabc9b86b14df8238b9aecb9577966dc142ce2a23a32e9bb", size = 41342076, upload-time = "2025-12-11T21:12:20.618Z" },
{ url = "https://files.pythonhosted.org/packages/df/af/cd3290a647df567645353feed451ef4feaf5844496ced69c4dcb84295ff4/nodejs_wheel_binaries-24.12.0-py2.py3-none-win_arm64.whl", hash = "sha256:d0c2273b667dd7e3f55e369c0085957b702144b1b04bfceb7ce2411e58333757", size = 39048104, upload-time = "2025-12-11T21:12:23.495Z" },
{ url = "https://files.pythonhosted.org/packages/c4/dc/4d7548aa74a5b446d093f03aff4fb236b570959d793f21c9c42ab6ad870a/nodejs_wheel_binaries-24.13.0-py2.py3-none-macosx_13_0_arm64.whl", hash = "sha256:356654baa37bfd894e447e7e00268db403ea1d223863963459a0fbcaaa1d9d48", size = 55133268, upload-time = "2026-01-14T11:05:05.335Z" },
{ url = "https://files.pythonhosted.org/packages/24/8a/8a4454d28339487240dd2232f42f1090e4a58544c581792d427f6239798c/nodejs_wheel_binaries-24.13.0-py2.py3-none-macosx_13_0_x86_64.whl", hash = "sha256:92fdef7376120e575f8b397789bafcb13bbd22a1b4d21b060d200b14910f22a5", size = 55314800, upload-time = "2026-01-14T11:05:09.121Z" },
{ url = "https://files.pythonhosted.org/packages/e7/fb/46c600fcc748bd13bc536a735f11532a003b14f5c4dfd6865f5911672175/nodejs_wheel_binaries-24.13.0-py2.py3-none-manylinux_2_28_aarch64.whl", hash = "sha256:3f619ac140e039ecd25f2f71d6e83ad1414017a24608531851b7c31dc140cdfd", size = 59666320, upload-time = "2026-01-14T11:05:12.369Z" },
{ url = "https://files.pythonhosted.org/packages/85/47/d48f11fc5d1541ace5d806c62a45738a1db9ce33e85a06fe4cd3d9ce83f6/nodejs_wheel_binaries-24.13.0-py2.py3-none-manylinux_2_28_x86_64.whl", hash = "sha256:dfb31ebc2c129538192ddb5bedd3d63d6de5d271437cd39ea26bf3fe229ba430", size = 60162447, upload-time = "2026-01-14T11:05:16.003Z" },
{ url = "https://files.pythonhosted.org/packages/b1/74/d285c579ae8157c925b577dde429543963b845e69cd006549e062d1cf5b6/nodejs_wheel_binaries-24.13.0-py2.py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:fdd720d7b378d5bb9b2710457bbc880d4c4d1270a94f13fbe257198ac707f358", size = 61659994, upload-time = "2026-01-14T11:05:19.68Z" },
{ url = "https://files.pythonhosted.org/packages/ba/97/88b4254a2ff93ed2eaed725f77b7d3d2d8d7973bf134359ce786db894faf/nodejs_wheel_binaries-24.13.0-py2.py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:9ad6383613f3485a75b054647a09f1cd56d12380d7459184eebcf4a5d403f35c", size = 62244373, upload-time = "2026-01-14T11:05:23.987Z" },
{ url = "https://files.pythonhosted.org/packages/4e/c3/0e13a3da78f08cb58650971a6957ac7bfef84164b405176e53ab1e3584e2/nodejs_wheel_binaries-24.13.0-py2.py3-none-win_amd64.whl", hash = "sha256:605be4763e3ef427a3385a55da5a1bcf0a659aa2716eebbf23f332926d7e5f23", size = 41345528, upload-time = "2026-01-14T11:05:27.67Z" },
{ url = "https://files.pythonhosted.org/packages/a3/f1/0578d65b4e3dc572967fd702221ea1f42e1e60accfb6b0dd8d8f15410139/nodejs_wheel_binaries-24.13.0-py2.py3-none-win_arm64.whl", hash = "sha256:2e3431d869d6b2dbeef1d469ad0090babbdcc8baaa72c01dd3cc2c6121c96af5", size = 39054688, upload-time = "2026-01-14T11:05:30.739Z" },
]
[[package]]
@ -1933,26 +1933,26 @@ wheels = [
[[package]]
name = "pyarrow"
version = "22.0.0"
version = "23.0.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/30/53/04a7fdc63e6056116c9ddc8b43bc28c12cdd181b85cbeadb79278475f3ae/pyarrow-22.0.0.tar.gz", hash = "sha256:3d600dc583260d845c7d8a6db540339dd883081925da2bd1c5cb808f720b3cd9", size = 1151151, upload-time = "2025-10-24T12:30:00.762Z" }
sdist = { url = "https://files.pythonhosted.org/packages/01/33/ffd9c3eb087fa41dd79c3cf20c4c0ae3cdb877c4f8e1107a446006344924/pyarrow-23.0.0.tar.gz", hash = "sha256:180e3150e7edfcd182d3d9afba72f7cf19839a497cc76555a8dce998a8f67615", size = 1167185, upload-time = "2026-01-18T16:19:42.218Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d9/9b/cb3f7e0a345353def531ca879053e9ef6b9f38ed91aebcf68b09ba54dec0/pyarrow-22.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:77718810bd3066158db1e95a63c160ad7ce08c6b0710bc656055033e39cdad88", size = 34223968, upload-time = "2025-10-24T10:03:31.21Z" },
{ url = "https://files.pythonhosted.org/packages/6c/41/3184b8192a120306270c5307f105b70320fdaa592c99843c5ef78aaefdcf/pyarrow-22.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:44d2d26cda26d18f7af7db71453b7b783788322d756e81730acb98f24eb90ace", size = 35942085, upload-time = "2025-10-24T10:03:38.146Z" },
{ url = "https://files.pythonhosted.org/packages/d9/3d/a1eab2f6f08001f9fb714b8ed5cfb045e2fe3e3e3c0c221f2c9ed1e6d67d/pyarrow-22.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:b9d71701ce97c95480fecb0039ec5bb889e75f110da72005743451339262f4ce", size = 44964613, upload-time = "2025-10-24T10:03:46.516Z" },
{ url = "https://files.pythonhosted.org/packages/46/46/a1d9c24baf21cfd9ce994ac820a24608decf2710521b29223d4334985127/pyarrow-22.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:710624ab925dc2b05a6229d47f6f0dac1c1155e6ed559be7109f684eba048a48", size = 47627059, upload-time = "2025-10-24T10:03:55.353Z" },
{ url = "https://files.pythonhosted.org/packages/3a/4c/f711acb13075c1391fd54bc17e078587672c575f8de2a6e62509af026dcf/pyarrow-22.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f963ba8c3b0199f9d6b794c90ec77545e05eadc83973897a4523c9e8d84e9340", size = 47947043, upload-time = "2025-10-24T10:04:05.408Z" },
{ url = "https://files.pythonhosted.org/packages/4e/70/1f3180dd7c2eab35c2aca2b29ace6c519f827dcd4cfeb8e0dca41612cf7a/pyarrow-22.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd0d42297ace400d8febe55f13fdf46e86754842b860c978dfec16f081e5c653", size = 50206505, upload-time = "2025-10-24T10:04:15.786Z" },
{ url = "https://files.pythonhosted.org/packages/80/07/fea6578112c8c60ffde55883a571e4c4c6bc7049f119d6b09333b5cc6f73/pyarrow-22.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:00626d9dc0f5ef3a75fe63fd68b9c7c8302d2b5bbc7f74ecaedba83447a24f84", size = 28101641, upload-time = "2025-10-24T10:04:22.57Z" },
{ url = "https://files.pythonhosted.org/packages/ae/2f/23e042a5aa99bcb15e794e14030e8d065e00827e846e53a66faec73c7cd6/pyarrow-23.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:cbdc2bf5947aa4d462adcf8453cf04aee2f7932653cb67a27acd96e5e8528a67", size = 34281861, upload-time = "2026-01-18T16:13:34.332Z" },
{ url = "https://files.pythonhosted.org/packages/8b/65/1651933f504b335ec9cd8f99463718421eb08d883ed84f0abd2835a16cad/pyarrow-23.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:4d38c836930ce15cd31dce20114b21ba082da231c884bdc0a7b53e1477fe7f07", size = 35825067, upload-time = "2026-01-18T16:13:42.549Z" },
{ url = "https://files.pythonhosted.org/packages/84/ec/d6fceaec050c893f4e35c0556b77d4cc9973fcc24b0a358a5781b1234582/pyarrow-23.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:4222ff8f76919ecf6c716175a0e5fddb5599faeed4c56d9ea41a2c42be4998b2", size = 44458539, upload-time = "2026-01-18T16:13:52.975Z" },
{ url = "https://files.pythonhosted.org/packages/fd/d9/369f134d652b21db62fe3ec1c5c2357e695f79eb67394b8a93f3a2b2cffa/pyarrow-23.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:87f06159cbe38125852657716889296c83c37b4d09a5e58f3d10245fd1f69795", size = 47535889, upload-time = "2026-01-18T16:14:03.693Z" },
{ url = "https://files.pythonhosted.org/packages/a3/95/f37b6a252fdbf247a67a78fb3f61a529fe0600e304c4d07741763d3522b1/pyarrow-23.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1675c374570d8b91ea6d4edd4608fa55951acd44e0c31bd146e091b4005de24f", size = 48157777, upload-time = "2026-01-18T16:14:12.483Z" },
{ url = "https://files.pythonhosted.org/packages/ab/ab/fb94923108c9c6415dab677cf1f066d3307798eafc03f9a65ab4abc61056/pyarrow-23.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:247374428fde4f668f138b04031a7e7077ba5fa0b5b1722fdf89a017bf0b7ee0", size = 50580441, upload-time = "2026-01-18T16:14:20.187Z" },
{ url = "https://files.pythonhosted.org/packages/ae/78/897ba6337b517fc8e914891e1bd918da1c4eb8e936a553e95862e67b80f6/pyarrow-23.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:de53b1bd3b88a2ee93c9af412c903e57e738c083be4f6392288294513cd8b2c1", size = 27530028, upload-time = "2026-01-18T16:14:27.353Z" },
]
[[package]]
name = "pyasn1"
version = "0.6.1"
version = "0.6.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ba/e9/01f1a64245b89f039897cb0130016d79f77d52669aae6ee7b159a6c4c018/pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034", size = 145322, upload-time = "2024-09-10T22:41:42.55Z" }
sdist = { url = "https://files.pythonhosted.org/packages/fe/b6/6e630dff89739fcd427e3f72b3d905ce0acb85a45d4ec3e2678718a3487f/pyasn1-0.6.2.tar.gz", hash = "sha256:9b59a2b25ba7e4f8197db7686c09fb33e658b98339fadb826e9512629017833b", size = 146586, upload-time = "2026-01-16T18:04:18.534Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c8/f1/d6a797abb14f6283c0ddff96bbdd46937f64122b8c925cab503dd37f8214/pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629", size = 83135, upload-time = "2024-09-11T16:00:36.122Z" },
{ url = "https://files.pythonhosted.org/packages/44/b5/a96872e5184f354da9c84ae119971a0a4c221fe9b27a4d94bd43f2596727/pyasn1-0.6.2-py3-none-any.whl", hash = "sha256:1eb26d860996a18e9b6ed05e7aae0e9fc21619fcee6af91cca9bad4fbea224bf", size = 83371, upload-time = "2026-01-16T18:04:17.174Z" },
]
[[package]]
@ -2051,14 +2051,14 @@ wheels = [
[[package]]
name = "pydash"
version = "8.0.5"
version = "8.0.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2f/24/91c037f47e434172c2112d65c00c84d475a6715425e3315ba2cbb7a87e66/pydash-8.0.5.tar.gz", hash = "sha256:7cc44ebfe5d362f4f5f06c74c8684143c5ac481376b059ff02570705523f9e2e", size = 164861, upload-time = "2025-01-17T16:08:50.562Z" }
sdist = { url = "https://files.pythonhosted.org/packages/75/c1/1c55272f49d761cec38ddb80be9817935b9c91ebd6a8988e10f532868d56/pydash-8.0.6.tar.gz", hash = "sha256:b2821547e9723f69cf3a986be4db64de41730be149b2641947ecd12e1e11025a", size = 164338, upload-time = "2026-01-17T16:42:56.576Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/86/e74c978800131c657fc5145f2c1c63e0cea01a49b6216f729cf77a2e1edf/pydash-8.0.5-py3-none-any.whl", hash = "sha256:b2625f8981862e19911daa07f80ed47b315ce20d9b5eb57aaf97aaf570c3892f", size = 102077, upload-time = "2025-01-17T16:08:47.91Z" },
{ url = "https://files.pythonhosted.org/packages/a5/b7/cc5e7974699db40014d58c7dd7c4ad4ffc244d36930dc9ec7d06ee67d7a9/pydash-8.0.6-py3-none-any.whl", hash = "sha256:ee70a81a5b292c007f28f03a4ee8e75c1f5d7576df5457b836ec7ab2839cc5d0", size = 101561, upload-time = "2026-01-17T16:42:55.448Z" },
]
[[package]]
@ -2277,38 +2277,40 @@ wheels = [
[[package]]
name = "regex"
version = "2025.11.3"
version = "2026.1.15"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669, upload-time = "2025-11-03T21:34:22.089Z" }
sdist = { url = "https://files.pythonhosted.org/packages/0b/86/07d5056945f9ec4590b518171c4254a5925832eb727b56d3c38a7476f316/regex-2026.1.15.tar.gz", hash = "sha256:164759aa25575cbc0651bef59a0b18353e54300d79ace8084c818ad8ac72b7d5", size = 414811, upload-time = "2026-01-14T23:18:02.775Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8a/d6/d788d52da01280a30a3f6268aef2aa71043bff359c618fea4c5b536654d5/regex-2025.11.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2b441a4ae2c8049106e8b39973bfbddfb25a179dda2bdb99b0eeb60c40a6a3af", size = 488087, upload-time = "2025-11-03T21:30:47.317Z" },
{ url = "https://files.pythonhosted.org/packages/69/39/abec3bd688ec9bbea3562de0fd764ff802976185f5ff22807bf0a2697992/regex-2025.11.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2fa2eed3f76677777345d2f81ee89f5de2f5745910e805f7af7386a920fa7313", size = 290544, upload-time = "2025-11-03T21:30:49.912Z" },
{ url = "https://files.pythonhosted.org/packages/39/b3/9a231475d5653e60002508f41205c61684bb2ffbf2401351ae2186897fc4/regex-2025.11.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8b4a27eebd684319bdf473d39f1d79eed36bf2cd34bd4465cdb4618d82b3d56", size = 288408, upload-time = "2025-11-03T21:30:51.344Z" },
{ url = "https://files.pythonhosted.org/packages/c3/c5/1929a0491bd5ac2d1539a866768b88965fa8c405f3e16a8cef84313098d6/regex-2025.11.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5cf77eac15bd264986c4a2c63353212c095b40f3affb2bc6b4ef80c4776c1a28", size = 781584, upload-time = "2025-11-03T21:30:52.596Z" },
{ url = "https://files.pythonhosted.org/packages/ce/fd/16aa16cf5d497ef727ec966f74164fbe75d6516d3d58ac9aa989bc9cdaad/regex-2025.11.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b7f9ee819f94c6abfa56ec7b1dbab586f41ebbdc0a57e6524bd5e7f487a878c7", size = 850733, upload-time = "2025-11-03T21:30:53.825Z" },
{ url = "https://files.pythonhosted.org/packages/e6/49/3294b988855a221cb6565189edf5dc43239957427df2d81d4a6b15244f64/regex-2025.11.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:838441333bc90b829406d4a03cb4b8bf7656231b84358628b0406d803931ef32", size = 898691, upload-time = "2025-11-03T21:30:55.575Z" },
{ url = "https://files.pythonhosted.org/packages/14/62/b56d29e70b03666193369bdbdedfdc23946dbe9f81dd78ce262c74d988ab/regex-2025.11.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cfe6d3f0c9e3b7e8c0c694b24d25e677776f5ca26dce46fd6b0489f9c8339391", size = 791662, upload-time = "2025-11-03T21:30:57.262Z" },
{ url = "https://files.pythonhosted.org/packages/15/fc/e4c31d061eced63fbf1ce9d853975f912c61a7d406ea14eda2dd355f48e7/regex-2025.11.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2ab815eb8a96379a27c3b6157fcb127c8f59c36f043c1678110cea492868f1d5", size = 782587, upload-time = "2025-11-03T21:30:58.788Z" },
{ url = "https://files.pythonhosted.org/packages/b2/bb/5e30c7394bcf63f0537121c23e796be67b55a8847c3956ae6068f4c70702/regex-2025.11.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:728a9d2d173a65b62bdc380b7932dd8e74ed4295279a8fe1021204ce210803e7", size = 774709, upload-time = "2025-11-03T21:31:00.081Z" },
{ url = "https://files.pythonhosted.org/packages/c5/c4/fce773710af81b0cb37cb4ff0947e75d5d17dee304b93d940b87a67fc2f4/regex-2025.11.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:509dc827f89c15c66a0c216331260d777dd6c81e9a4e4f830e662b0bb296c313", size = 845773, upload-time = "2025-11-03T21:31:01.583Z" },
{ url = "https://files.pythonhosted.org/packages/7b/5e/9466a7ec4b8ec282077095c6eb50a12a389d2e036581134d4919e8ca518c/regex-2025.11.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:849202cd789e5f3cf5dcc7822c34b502181b4824a65ff20ce82da5524e45e8e9", size = 836164, upload-time = "2025-11-03T21:31:03.244Z" },
{ url = "https://files.pythonhosted.org/packages/95/18/82980a60e8ed1594eb3c89eb814fb276ef51b9af7caeab1340bfd8564af6/regex-2025.11.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b6f78f98741dcc89607c16b1e9426ee46ce4bf31ac5e6b0d40e81c89f3481ea5", size = 779832, upload-time = "2025-11-03T21:31:04.876Z" },
{ url = "https://files.pythonhosted.org/packages/03/cc/90ab0fdbe6dce064a42015433f9152710139fb04a8b81b4fb57a1cb63ffa/regex-2025.11.3-cp310-cp310-win32.whl", hash = "sha256:149eb0bba95231fb4f6d37c8f760ec9fa6fabf65bab555e128dde5f2475193ec", size = 265802, upload-time = "2025-11-03T21:31:06.581Z" },
{ url = "https://files.pythonhosted.org/packages/34/9d/e9e8493a85f3b1ddc4a5014465f5c2b78c3ea1cbf238dcfde78956378041/regex-2025.11.3-cp310-cp310-win_amd64.whl", hash = "sha256:ee3a83ce492074c35a74cc76cf8235d49e77b757193a5365ff86e3f2f93db9fd", size = 277722, upload-time = "2025-11-03T21:31:08.144Z" },
{ url = "https://files.pythonhosted.org/packages/15/c4/b54b24f553966564506dbf873a3e080aef47b356a3b39b5d5aba992b50db/regex-2025.11.3-cp310-cp310-win_arm64.whl", hash = "sha256:38af559ad934a7b35147716655d4a2f79fcef2d695ddfe06a06ba40ae631fa7e", size = 270289, upload-time = "2025-11-03T21:31:10.267Z" },
{ url = "https://files.pythonhosted.org/packages/ea/d2/e6ee96b7dff201a83f650241c52db8e5bd080967cb93211f57aa448dc9d6/regex-2026.1.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4e3dd93c8f9abe8aa4b6c652016da9a3afa190df5ad822907efe6b206c09896e", size = 488166, upload-time = "2026-01-14T23:13:46.408Z" },
{ url = "https://files.pythonhosted.org/packages/23/8a/819e9ce14c9f87af026d0690901b3931f3101160833e5d4c8061fa3a1b67/regex-2026.1.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:97499ff7862e868b1977107873dd1a06e151467129159a6ffd07b66706ba3a9f", size = 290632, upload-time = "2026-01-14T23:13:48.688Z" },
{ url = "https://files.pythonhosted.org/packages/d5/c3/23dfe15af25d1d45b07dfd4caa6003ad710dcdcb4c4b279909bdfe7a2de8/regex-2026.1.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0bda75ebcac38d884240914c6c43d8ab5fb82e74cde6da94b43b17c411aa4c2b", size = 288500, upload-time = "2026-01-14T23:13:50.503Z" },
{ url = "https://files.pythonhosted.org/packages/c6/31/1adc33e2f717df30d2f4d973f8776d2ba6ecf939301efab29fca57505c95/regex-2026.1.15-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7dcc02368585334f5bc81fc73a2a6a0bbade60e7d83da21cead622faf408f32c", size = 781670, upload-time = "2026-01-14T23:13:52.453Z" },
{ url = "https://files.pythonhosted.org/packages/23/ce/21a8a22d13bc4adcb927c27b840c948f15fc973e21ed2346c1bd0eae22dc/regex-2026.1.15-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:693b465171707bbe882a7a05de5e866f33c76aa449750bee94a8d90463533cc9", size = 850820, upload-time = "2026-01-14T23:13:54.894Z" },
{ url = "https://files.pythonhosted.org/packages/6c/4f/3eeacdf587a4705a44484cd0b30e9230a0e602811fb3e2cc32268c70d509/regex-2026.1.15-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b0d190e6f013ea938623a58706d1469a62103fb2a241ce2873a9906e0386582c", size = 898777, upload-time = "2026-01-14T23:13:56.908Z" },
{ url = "https://files.pythonhosted.org/packages/79/a9/1898a077e2965c35fc22796488141a22676eed2d73701e37c73ad7c0b459/regex-2026.1.15-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ff818702440a5878a81886f127b80127f5d50563753a28211482867f8318106", size = 791750, upload-time = "2026-01-14T23:13:58.527Z" },
{ url = "https://files.pythonhosted.org/packages/4c/84/e31f9d149a178889b3817212827f5e0e8c827a049ff31b4b381e76b26e2d/regex-2026.1.15-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:f052d1be37ef35a54e394de66136e30fa1191fab64f71fc06ac7bc98c9a84618", size = 782674, upload-time = "2026-01-14T23:13:59.874Z" },
{ url = "https://files.pythonhosted.org/packages/d2/ff/adf60063db24532add6a1676943754a5654dcac8237af024ede38244fd12/regex-2026.1.15-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6bfc31a37fd1592f0c4fc4bfc674b5c42e52efe45b4b7a6a14f334cca4bcebe4", size = 767906, upload-time = "2026-01-14T23:14:01.298Z" },
{ url = "https://files.pythonhosted.org/packages/af/3e/e6a216cee1e2780fec11afe7fc47b6f3925d7264e8149c607ac389fd9b1a/regex-2026.1.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3d6ce5ae80066b319ae3bc62fd55a557c9491baa5efd0d355f0de08c4ba54e79", size = 774798, upload-time = "2026-01-14T23:14:02.715Z" },
{ url = "https://files.pythonhosted.org/packages/0f/98/23a4a8378a9208514ed3efc7e7850c27fa01e00ed8557c958df0335edc4a/regex-2026.1.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1704d204bd42b6bb80167df0e4554f35c255b579ba99616def38f69e14a5ccb9", size = 845861, upload-time = "2026-01-14T23:14:04.824Z" },
{ url = "https://files.pythonhosted.org/packages/f8/57/d7605a9d53bd07421a8785d349cd29677fe660e13674fa4c6cbd624ae354/regex-2026.1.15-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:e3174a5ed4171570dc8318afada56373aa9289eb6dc0d96cceb48e7358b0e220", size = 755648, upload-time = "2026-01-14T23:14:06.371Z" },
{ url = "https://files.pythonhosted.org/packages/6f/76/6f2e24aa192da1e299cc1101674a60579d3912391867ce0b946ba83e2194/regex-2026.1.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:87adf5bd6d72e3e17c9cb59ac4096b1faaf84b7eb3037a5ffa61c4b4370f0f13", size = 836250, upload-time = "2026-01-14T23:14:08.343Z" },
{ url = "https://files.pythonhosted.org/packages/11/3a/1f2a1d29453299a7858eab7759045fc3d9d1b429b088dec2dc85b6fa16a2/regex-2026.1.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e85dc94595f4d766bd7d872a9de5ede1ca8d3063f3bdf1e2c725f5eb411159e3", size = 779919, upload-time = "2026-01-14T23:14:09.954Z" },
{ url = "https://files.pythonhosted.org/packages/c0/67/eab9bc955c9dcc58e9b222c801e39cff7ca0b04261792a2149166ce7e792/regex-2026.1.15-cp310-cp310-win32.whl", hash = "sha256:21ca32c28c30d5d65fc9886ff576fc9b59bbca08933e844fa2363e530f4c8218", size = 265888, upload-time = "2026-01-14T23:14:11.35Z" },
{ url = "https://files.pythonhosted.org/packages/1d/62/31d16ae24e1f8803bddb0885508acecaec997fcdcde9c243787103119ae4/regex-2026.1.15-cp310-cp310-win_amd64.whl", hash = "sha256:3038a62fc7d6e5547b8915a3d927a0fbeef84cdbe0b1deb8c99bbd4a8961b52a", size = 277830, upload-time = "2026-01-14T23:14:12.908Z" },
{ url = "https://files.pythonhosted.org/packages/e5/36/5d9972bccd6417ecd5a8be319cebfd80b296875e7f116c37fb2a2deecebf/regex-2026.1.15-cp310-cp310-win_arm64.whl", hash = "sha256:505831646c945e3e63552cc1b1b9b514f0e93232972a2d5bedbcc32f15bc82e3", size = 270376, upload-time = "2026-01-14T23:14:14.782Z" },
]
[[package]]
name = "reportlab"
version = "4.4.7"
version = "4.4.9"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "charset-normalizer" },
{ name = "pillow" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f8/a7/4600cb1cfc975a06552e8927844ddcb8fd90217e9a6068f5c7aa76c3f221/reportlab-4.4.7.tar.gz", hash = "sha256:41e8287af965e5996764933f3e75e7f363c3b6f252ba172f9429e81658d7b170", size = 3714000, upload-time = "2025-12-21T11:50:11.336Z" }
sdist = { url = "https://files.pythonhosted.org/packages/1a/39/42cf24aee570a80e1903221ae3a92a2e34c324794a392eb036cbb6dc3839/reportlab-4.4.9.tar.gz", hash = "sha256:7cf487764294ee791a4781f5a157bebce262a666ae4bbb87786760a9676c9378", size = 3911246, upload-time = "2026-01-15T10:07:56.08Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e7/bf/a29507386366ab17306b187ad247dd78e4599be9032cb5f44c940f547fc0/reportlab-4.4.7-py3-none-any.whl", hash = "sha256:8fa05cbf468e0e76745caf2029a4770276edb3c8e86a0b71e0398926baf50673", size = 1954263, upload-time = "2025-12-21T11:50:08.93Z" },
{ url = "https://files.pythonhosted.org/packages/17/77/546e50edfaba6a0e58e8ec5fdc4446510227cec9e8f40172b60941d5a633/reportlab-4.4.9-py3-none-any.whl", hash = "sha256:68e2d103ae8041a37714e8896ec9b79a1c1e911d68c3bd2ea17546568cf17bfd", size = 1954401, upload-time = "2026-01-15T09:27:59.133Z" },
]
[[package]]
@ -2453,11 +2455,11 @@ wheels = [
[[package]]
name = "soupsieve"
version = "2.8.1"
version = "2.8.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/89/23/adf3796d740536d63a6fbda113d07e60c734b6ed5d3058d1e47fc0495e47/soupsieve-2.8.1.tar.gz", hash = "sha256:4cf733bc50fa805f5df4b8ef4740fc0e0fa6218cf3006269afd3f9d6d80fd350", size = 117856, upload-time = "2025-12-18T13:50:34.655Z" }
sdist = { url = "https://files.pythonhosted.org/packages/93/f2/21d6ca70c3cf35d01ae9e01be534bf6b6b103c157a728082a5028350c310/soupsieve-2.8.2.tar.gz", hash = "sha256:78a66b0fdee2ab40b7199dc3e747ee6c6e231899feeaae0b9b98a353afd48fd8", size = 118601, upload-time = "2026-01-18T16:21:31.09Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/48/f3/b67d6ea49ca9154453b6d70b34ea22f3996b9fa55da105a79d8732227adc/soupsieve-2.8.1-py3-none-any.whl", hash = "sha256:a11fe2a6f3d76ab3cf2de04eb339c1be5b506a8a47f2ceb6d139803177f85434", size = 36710, upload-time = "2025-12-18T13:50:33.267Z" },
{ url = "https://files.pythonhosted.org/packages/a6/9a/b4450ccce353e2430621b3bb571899ffe1033d5cd72c9e065110f95b1a63/soupsieve-2.8.2-py3-none-any.whl", hash = "sha256:0f4c2f6b5a5fb97a641cf69c0bd163670a0e45e6d6c01a2107f93a6a6f93c51a", size = 37016, upload-time = "2026-01-18T16:21:29.7Z" },
]
[[package]]
@ -2474,15 +2476,15 @@ wheels = [
[[package]]
name = "sse-starlette"
version = "3.1.2"
version = "3.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "starlette" },
]
sdist = { url = "https://files.pythonhosted.org/packages/da/34/f5df66cb383efdbf4f2db23cabb27f51b1dcb737efaf8a558f6f1d195134/sse_starlette-3.1.2.tar.gz", hash = "sha256:55eff034207a83a0eb86de9a68099bd0157838f0b8b999a1b742005c71e33618", size = 26303, upload-time = "2025-12-31T08:02:20.023Z" }
sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/00d280c03ffd39aaee0e86ec81e2d3b9253036a0f93f51d10503adef0e65/sse_starlette-3.2.0.tar.gz", hash = "sha256:8127594edfb51abe44eac9c49e59b0b01f1039d0c7461c6fd91d4e03b70da422", size = 27253, upload-time = "2026-01-17T13:11:05.62Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/95/8c4b76eec9ae574474e5d2997557cebf764bcd3586458956c30631ae08f4/sse_starlette-3.1.2-py3-none-any.whl", hash = "sha256:cd800dd349f4521b317b9391d3796fa97b71748a4da9b9e00aafab32dda375c8", size = 12484, upload-time = "2025-12-31T08:02:18.894Z" },
{ url = "https://files.pythonhosted.org/packages/96/7f/832f015020844a8b8f7a9cbc103dd76ba8e3875004c41e08440ea3a2b41a/sse_starlette-3.2.0-py3-none-any.whl", hash = "sha256:5876954bd51920fc2cd51baee47a080eb88a37b5b784e615abb0b283f801cdbf", size = 12763, upload-time = "2026-01-17T13:11:03.775Z" },
]
[[package]]
@ -2540,11 +2542,11 @@ wheels = [
[[package]]
name = "tomli"
version = "2.3.0"
version = "2.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/52/ed/3f73f72945444548f33eba9a87fc7a6e969915e7b1acc8260b30e1f76a2f/tomli-2.3.0.tar.gz", hash = "sha256:64be704a875d2a59753d80ee8a533c3fe183e3f06807ff7dc2232938ccb01549", size = 17392, upload-time = "2025-10-08T22:01:47.119Z" }
sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/77/b8/0135fadc89e73be292b473cb820b4f5a08197779206b33191e801feeae40/tomli-2.3.0-py3-none-any.whl", hash = "sha256:e95b1af3c5b07d9e643909b5abbec77cd9f1217e6d0bca72b0234736b9fb1f1b", size = 14408, upload-time = "2025-10-08T22:01:46.04Z" },
{ url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" },
]
[[package]]

95
docs/core/models/byok.md Normal file
View file

@ -0,0 +1,95 @@
---
title: "Bring Your Own Key (BYOK)"
description: "Configure your own API keys to use various LLM providers with Eigent."
---
## What is BYOK?
**Bring Your Own Key (BYOK)** allows you to use your own API keys from various AI model providers with Eigent. Instead of relying on a shared service, you connect directly to providers like OpenAI, Anthropic, or Google using your personal API credentials. This gives you:
- **Full control** over your API usage and billing
- **Direct access** to the latest models from each provider
- **Privacy** - your requests go directly to the provider
---
## OpenAI Configuration (Example)
### Step 1: Get Your API Key
1. Visit the [OpenAI API Keys page](https://platform.openai.com/api-keys)
2. Click **"Create new secret key"**
3. Copy the generated key (you won't be able to see it again)
### Step 2: Configure in Eigent
1. Launch Eigent and go to **Settings** > **Models**
2. Find the **OpenAI** card in the Custom Model section
3. Fill in the following fields:
| Field | Value | Example |
|-------|-------|---------|
| **API Key** | Your OpenAI secret key | `sk-proj-xxxx...` |
| **API Host** | OpenAI API endpoint | `https://api.openai.com/v1` |
| **Model Type** | The model you want to use | `gpt-4o`, `gpt-4o-mini` |
4. Click **Save** to validate and store your configuration
5. Click **Set as Default** to use this provider for your agents
---
## Configuration Fields
| Field | Description | Required |
|-------|-------------|----------|
| **API Key** | Your authentication key from the provider | Yes |
| **API Host** | The API endpoint URL | Yes (pre-filled for most providers) |
| **Model Type** | The specific model variant to use | Yes |
| **External Config** | Provider-specific settings (e.g., Azure deployment name) | Only for certain providers |
### Azure-Specific Fields
| Field | Description | Example |
|-------|-------------|---------|
| **API Version** | Azure OpenAI API version | `2024-02-15-preview` |
| **Deployment Name** | Your Azure deployment name | `my-gpt4-deployment` |
---
## Common Errors
When saving your configuration, Eigent validates your API key and model. Here are the errors you may encounter:
| Error | Cause | Solution |
|-------|-------|----------|
| **Invalid key. Validation failed.** | API key is incorrect, expired, or malformed | Double-check your API key. Regenerate a new key if needed. |
| **Invalid model name. Validation failed.** | The specified model does not exist or is not available for your account | Verify the model name is correct. Check if you have access to that model. |
| **You exceeded your current quota** | API quota exhausted or billing issue | Check your provider's billing dashboard. Add credits or upgrade your plan. |
---
## Supported Providers
Eigent supports the following BYOK providers:
| Provider | Default API Host | Official Documentation |
|----------|------------------|------------------------|
| **OpenAI** | `https://api.openai.com/v1` | [OpenAI API Docs](https://platform.openai.com/docs/api-reference) |
| **Anthropic** | `https://api.anthropic.com/v1/` | [Anthropic API Docs](https://docs.anthropic.com/en/api/getting-started) |
| **Google Gemini** | `https://generativelanguage.googleapis.com/v1beta/openai/` | [Gemini API Docs](https://ai.google.dev/gemini-api/docs) |
| **OpenRouter** | `https://openrouter.ai/api/v1` | [OpenRouter Docs](https://openrouter.ai/docs) |
| **Qwen (Alibaba)** | `https://dashscope.aliyuncs.com/compatible-mode/v1` | [Qwen API Docs](https://help.aliyun.com/zh/dashscope/developer-reference/api-details) |
| **DeepSeek** | `https://api.deepseek.com` | [DeepSeek API Docs](https://platform.deepseek.com/api-docs) |
| **Minimax** | `https://api.minimax.io/v1` | [Minimax API Docs](https://platform.minimaxi.com/document/Announcement) |
| **Z.ai** | `https://api.z.ai/api/coding/paas/v4/` | [Z.ai Platform](https://z.ai) |
| **Azure OpenAI** | *(user-provided)* | [Azure OpenAI Docs](https://learn.microsoft.com/en-us/azure/ai-services/openai/reference) |
| **AWS Bedrock** | *(user-provided)* | [AWS Bedrock Docs](https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html) |
| **OpenAI Compatible** | *(user-provided)* | For custom endpoints (e.g., xAI, local servers) |
---
## Tips
- **Keep your API key secure** - Never share or expose your API key publicly
- **Monitor usage** - Check your provider's dashboard regularly to track costs
- **Use appropriate models** - Different models have different capabilities and pricing

View file

@ -5,44 +5,50 @@ description: "This guide walks you through setting up your Google Gemini API key
### Prerequisites
- **Get your API Key:** If you haven't already, generate a key at [Google AI Studio](https://aistudio.google.com/).
- **Get your API Key:** If you haven't already, generate a key at
[Google AI Studio](https://aistudio.google.com/).
- **Copy the Key:** Keep your API key ready to paste.
### Configuration Steps
**1. Access Application Settings**
#### 1. Access Application Settings
- Launch Eigent and navigate to the **Home Page**.
- Click on the **Settings** tab (usually located in the sidebar or top navigation).
- Click on the **Settings** tab (usually located in the sidebar or top
navigation).
![Gemini 1 Pn](/docs/images/gemini_1.png)
![Gemini 1 Pn](/docs/images/model_setting.png)
**2. Locate Model Configuration**
#### 2. Locate Model Configuration
- In the Settings menu, find and select the **Models** section.
- Scroll down to the **Custom Model** area.
- Look for the **Gemini Config** card.
-
![Gemini 2 Pn](/docs/images/gemini_2.png)
![Gemini 2 Pn](/docs/images/gemini_1.png)
**3. Enter API Details** Click on the Gemini Config card and fill in the following fields:
#### 3. Enter API Details
Click on the Gemini Config card and fill in the following fields:
- **API Key:** Paste the key you generated from Google AI Studio.
- **API Host:** Enter the appropriate API endpoint host (e.g., `generativelanguage.googleapis.com`).
- **API Host:** Enter the appropriate API endpoint host (for example,
`generativelanguage.googleapis.com`).
- **Model Type:** Enter the specific model version you wish to use.
- _Example:_ `gemini-3-pro-preview`
- **Save:** Click the **Save** button to apply your changes.
![Gemini 3 Pn](/docs/images/gemini_3.png)
![Gemini 3 Pn](/docs/images/gemini_2.png)
**4. Set as Default & Verify**
#### 4. Set as Default & Verify
- Once saved, the **"Set as Default"** button on the Gemini Config card will be selected/active.
- Once saved, the **"Set as Default"** button on the Gemini Config card will be
selected/active.
- **You are ready to go.** Your Eigent agents can now utilize the Gemini model.
![Gemini 4 Pn](/docs/images/gemini_4.png)
![Gemini 4 Pn](/docs/images/gemini_3.png)
---
> **Video Tutorial:** Prefer a visual guide? **<u>Watch the full configuration video here</u>**
> **Video Tutorial:** Prefer a visual guide? Watch the full configuration video
> here.

View file

@ -0,0 +1,50 @@
---
title: "MiniMax"
description: "This guide walks you through setting up your MiniMax API key within Eigent to enable the MiniMax model for your AI workforce."
---
### Prerequisites
- **Get your API Key:** If you haven't already, generate a key at
[Minimax Platform](https://www.minimax.io/).
- **Copy the Key:** Keep your API key ready to paste.
### Configuration Steps
#### 1. Access Application Settings
- Launch Eigent and navigate to the **Home Page**.
- Click on the **Settings** tab (usually located in the sidebar or top
navigation).
![Minimax 1 Pn](/docs/images/model_setting.png)
#### 2. Locate Model Configuration
- In the Settings menu, find and select the **Models** section.
- Scroll down to the **Custom Model** area.
- Look for the **Minimax Config** card.
![Minimax 2 Pn](/docs/images/minimax_1.png)
#### 3. Enter API Details
Click on the Minimax Config card and fill in the following fields:
- **API Key:** Paste the key you generated from Minimax Platform.
- **API Host:** Enter the appropriate API endpoint host.
- **Model Type:** Enter the specific model version you wish to use.
- _Example:_ `MiniMax-M2.1`
- **Save:** Click the **Save** button to apply your changes.
![Minimax 3 Pn](/docs/images/minimax_2.png)
#### 4. Set as Default & Verify
- Once saved, the **"Set as Default"** button on the Minimax Config card will be
selected/active.
- **You are ready to go.** Your Eigent agents can now utilize the Minimax model.
![Minimax 4 Pn](/docs/images/minimax_3.png)
---

View file

@ -66,7 +66,9 @@
"expanded": true,
"pages": [
"/core/models/gemini",
"/core/models/minimax",
"/core/models/local-model"
"/core/models/byok"
]
},
"/core/tools",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 MiB

After

Width:  |  Height:  |  Size: 2 MiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2 MiB

After

Width:  |  Height:  |  Size: 2.1 MiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 MiB

After

Width:  |  Height:  |  Size: 2.2 MiB

Before After
Before After

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 MiB

BIN
docs/images/minimax_1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 546 KiB

BIN
docs/images/minimax_2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 502 KiB

BIN
docs/images/minimax_3.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 483 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

View file

@ -67,7 +67,6 @@
}
},
"win": {
"certificateFile": null,
"icon": "build/icon.ico",
"artifactName": "${productName}.Setup.${version}.exe",
"target": [
@ -77,6 +76,11 @@
}
]
},
"linux": {
"icon": "build/icon.png",
"target": ["AppImage"],
"category": "Development"
},
"nsis": {
"oneClick": false,
"perMachine": false,

View file

@ -616,7 +616,8 @@ export async function installDependencies(
log.info('[DEPS INSTALL] Using system npm for installation');
} else {
// Try uv run npm (might not work if nodejs-wheel isn't properly set up)
npmCommand = [uv_path, 'run', 'npm'];
// Quote the path to handle spaces in username on Windows
npmCommand = [`"${uv_path}"`, 'run', 'npm'];
log.info('[DEPS INSTALL] Attempting to use uv run npm');
}
@ -746,7 +747,7 @@ export async function installDependencies(
try {
log.info('[DEPS INSTALL] Installing Playwright browsers...');
const npxCommand =
npmCommand[0] === 'npm' ? ['npx'] : [uv_path, 'run', 'npx'];
npmCommand[0] === 'npm' ? ['npx'] : [`"${uv_path}"`, 'run', 'npx'];
const playwrightInstall = spawn(
npxCommand[0],
[...npxCommand.slice(1), 'playwright', 'install'],

View file

@ -67,14 +67,13 @@ export function update(win: Electron.BrowserWindow) {
* Should be called once when the app starts
*/
export function registerUpdateIpcHandlers() {
// Checking for updates
// Checking for updates - errors are silent since users can't act on them
ipcMain.handle('check-update', async () => {
try {
return await autoUpdater.checkForUpdatesAndNotify()
} catch (error) {
return { message: 'Network error', error }
console.log('[AutoUpdater] Update check failed:', (error as Error).message)
return null
}
})

View file

@ -23,7 +23,8 @@
"build:mac": "npm run preinstall-deps && npm run clean-symlinks && npm run compile-babel && tsc && vite build && electron-builder --mac",
"build:mac:test": "npm run preinstall-deps && npm run clean-symlinks && npm run compile-babel && tsc && vite build && electron-builder --mac && npm run test-signing",
"build:win": "npm run preinstall-deps && npm run compile-babel && tsc && vite build && electron-builder --win",
"build:all": "npm run preinstall-deps && npm run compile-babel && tsc && vite build && electron-builder --mac --win",
"build:linux": "npm run preinstall-deps && npm run clean-symlinks && npm run compile-babel && tsc && vite build && electron-builder --linux",
"build:all": "npm run preinstall-deps && npm run compile-babel && tsc && vite build && electron-builder --mac --win --linux",
"preview": "vite preview",
"pretest": "vite build --mode=test",
"test": "vitest run",
@ -67,7 +68,7 @@
"csv-parser": "^3.2.0",
"dompurify": "^3.2.7",
"electron-log": "^5.4.0",
"electron-updater": "^6.3.9",
"electron-updater": "^6.7.3",
"embla-carousel-autoplay": "^8.6.0",
"embla-carousel-react": "^8.6.0",
"framer-motion": "^12.17.0",
@ -114,11 +115,11 @@
"@vitejs/plugin-react": "^4.3.3",
"@vitest/coverage-v8": "^2.1.9",
"autoprefixer": "^10.4.20",
"electron": "^33.2.0",
"electron-builder": "^24.13.3",
"electron": "^33.4.11",
"electron-builder": "^26.4.0",
"electron-devtools-installer": "^4.0.0",
"i18next": "^25.4.2",
"jsdom": "^26.1.0",
"jsdom": "^27.4.0",
"postcss": "^8.4.49",
"postcss-import": "^16.1.0",
"react": "^18.3.1",
@ -132,10 +133,16 @@
"vite-plugin-electron-renderer": "^0.14.6",
"vitest": "^2.1.5"
},
"overrides": {
"glob": "^10.4.5"
},
"pnpm": {
"neverBuiltDependencies": []
"neverBuiltDependencies": [],
"overrides": {
"glob": "^10.4.5"
}
},
"engines": {
"node": ">=18.0.0 <23.0.0"
"node": ">=20.0.0 <23.0.0"
}
}

View file

@ -389,6 +389,28 @@ async function installUv() {
await tar.extract({ file: tempFilename, cwd: BIN_DIR });
}
// Handle nested directory from tarball if needed
if (!isWindows) {
const nestedDir = fs.readdirSync(BIN_DIR).find(f =>
fs.statSync(path.join(BIN_DIR, f)).isDirectory() && f.startsWith('uv-')
);
if (nestedDir) {
const nestedUvPath = path.join(BIN_DIR, nestedDir, 'uv');
const targetPath = path.join(BIN_DIR, 'uv');
if (fs.existsSync(nestedUvPath)) {
console.log(` Found uv in ${nestedDir}, moving...`);
try {
if (fs.existsSync(targetPath)) fs.unlinkSync(targetPath);
fs.renameSync(nestedUvPath, targetPath);
// Clean up directory
fs.rmSync(path.join(BIN_DIR, nestedDir), { recursive: true, force: true });
} catch (e) {
console.log(` Warning: Failed to move uv from nested dir: ${e.message}`);
}
}
}
}
const extractedUvPath = path.join(BIN_DIR, isWindows ? 'uv.exe' : 'uv');
if (fs.existsSync(extractedUvPath)) {
if (!isWindows && extractedUvPath !== uvPath) {

View file

@ -4,8 +4,8 @@ FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim
# Install the project into `/app`
WORKDIR /app
# Enable bytecode compilation
ENV UV_COMPILE_BYTECODE=1
# Disable bytecode transfer during compilation to avoid EMFILE during build on low nofile limits
ENV UV_COMPILE_BYTECODE=0
# Copy from the cache instead of linking since it's a mounted volume
ENV UV_LINK_MODE=copy
@ -15,7 +15,7 @@ ENV UV_PYTHON_INSTALL_MIRROR=https://registry.npmmirror.com/-/binary/python-buil
ARG database_url
ENV database_url=$database_url
RUN apt-get update && apt-get install -y \
RUN apt-get update -o Acquire::Retries=3 && apt-get install -y --no-install-recommends \
gcc \
python3-dev \
&& rm -rf /var/lib/apt/lists/*
@ -43,7 +43,7 @@ RUN uv run pybabel extract -F babel.cfg -o messages.pot . && \
# Install netcat for database connectivity check
RUN apt-get update && apt-get install -y curl netcat-openbsd && rm -rf /var/lib/apt/lists/*
RUN apt-get update -o Acquire::Retries=3 && apt-get install -y --no-install-recommends curl netcat-openbsd && rm -rf /var/lib/apt/lists/*
# Place executables in the environment at the front of the path
ENV PATH="/app/.venv/bin:$PATH"
@ -58,4 +58,4 @@ ENTRYPOINT []
EXPOSE 5678
# Use the start script
CMD ["/app/start.sh"]
CMD ["/app/start.sh"]

111
server/README_PT-BR.md Normal file
View file

@ -0,0 +1,111 @@
### Propósito
`server/` fornece um backend local (FastAPI + PostgreSQL) para alcançar separação completa entre ambientes locais e na nuvem. Após implantar este serviço, dados sensíveis como registro de usuários, configurações de provedores de modelos, configurações de ferramentas e histórico de bate-papo são armazenados em sua máquina e não são enviados para nossa nuvem, a menos que você configure explicitamente serviços externos (por exemplo, provedores de modelos na nuvem ou servidores MCP remotos).
### Serviços Fornecidos (Módulos Principais)
- Usuários e Contas
- `POST /register`: Registro por email + senha (apenas banco de dados local)
- `POST /login`: Login com email + senha; retorna um token emitido localmente
- `GET/PUT /user`, `/user/profile`, `/user/privacy`, `/user/current_credits`, `/user/stat`, etc.
- Provedores de Modelos (armazenar configurações de acesso a modelos locais/na nuvem)
- `GET /providers`, `POST /provider`, `PUT /provider/{id}`, `DELETE /provider/{id}`
- `POST /provider/prefer`: Definir um provedor preferido (frontend/backend priorizará)
- Centro de Configuração (armazenar segredos/parâmetros necessários para ferramentas/capacidades)
- `GET /configs`, `POST /configs`, `PUT /configs/{id}`, `DELETE /configs/{id}`, `GET /config/info`
- Chat e Dados
- Histórico, snapshots, compartilhamento, etc. em `app/controller/chat/`, todos persistidos no banco de dados local
- Gerenciamento de MCP (importar servidores MCP locais/remotos)
- `GET /mcps`, `POST /mcp/install`, `POST /mcp/import/{Local|Remote}`, etc.
Nota: Todos os dados acima são armazenados no volume PostgreSQL local no Docker (veja "Persistência de Dados" abaixo). Se você configurar modelos externos ou MCP remoto, as solicitações vão para os serviços de terceiros que você especificar.
---
### Início Rápido (Docker)
#### Pré-requisitos
- **Docker Desktop**: Instalado e em execução
- **Python**: 3.10.* (3.10.15 recomendado)
- **Node.js**: >=18.0.0 <23.0.0
#### Etapas de Configuração
1) Inicie os serviços
```bash
cd server
# Copie .env.example para .env (ou crie .env de acordo com .env.example)
cp .env.example .env
docker compose up -d
```
2) Inicie o Frontend (Modo Local)
- No diretório raiz do projeto, crie ou modifique `.env.development` para ativar o modo local e apontar para o backend local:
```bash
VITE_BASE_URL=/api
VITE_USE_LOCAL_PROXY=true
VITE_PROXY_URL=http://localhost:3001
```
- Inicie a aplicação frontend:
```bash
npm install
npm run dev
```
### Abra a documentação da API
- `http://localhost:3001/docs` (Interface do Swagger)
### Portas
- API: Host `3001` → Contêiner `5678`
- PostgreSQL: Host `5432` → Contêiner `5432`
### Persistência de Dados
- Os dados do banco de dados são armazenados no volume Docker `server_postgres_data` em `/var/lib/postgresql/data` dentro do contêiner
- As migrações de banco de dados são executadas automaticamente na inicialização do contêiner (veja `start.sh``alembic upgrade head`)
### Comandos Comuns
```bash
# Listar contêineres em execução
docker ps
# Parar/Iniciar contêiner da API (manter DB)
docker stop eigent_api
docker start eigent_api
# Parar/Iniciar tudo (API + DB)
docker compose stop
docker compose start
# Exibir logs
docker logs -f eigent_api | cat
docker logs -f eigent_postgres | cat
```
---
### Modo Desenvolvedor (Opcional)
Você pode executar a API localmente com hot-reload enquanto mantém o banco de dados no Docker:
```bash
# Parar API no contêiner, manter DB
docker stop eigent_api
# Inicializar banco de dados (primeira execução ou quando o esquema do BD muda)
cd server
uv run alembic upgrade head
# Executar localmente (fornecer string de conexão do BD)
export database_url=postgresql://postgres:123456@localhost:5432/eigent
uv run uvicorn main:api --reload --port 3001 --host 0.0.0.0
```
---
### Outros
- Documentação da API: `http://localhost:3001/docs`
- Logs de tempo de execução: `/app/runtime/log/app.log` no contêiner
- i18n (para desenvolvedores)
```bash
uv run pybabel extract -F babel.cfg -o messages.pot .
uv run pybabel init -i messages.pot -d lang -l zh_CN
uv run pybabel compile -d lang -l zh_CN
```
Para um ambiente completamente offline, use apenas modelos locais e servidores MCP locais, e evite configurar quaisquer Provedores externos ou endereços MCP remotos.

View file

@ -7,7 +7,7 @@ requires-python = ">=3.12,<3.13"
dependencies = [
"alembic>=1.15.2",
"openai>=1.99.3,<2",
"camel-ai==0.2.83a9",
"camel-ai==0.2.83",
"pydantic[email]>=2.11.1",
"click>=8.1.8",
"fastapi>=0.115.12",

105
server/uv.lock generated
View file

@ -4,16 +4,16 @@ requires-python = "==3.12.*"
[[package]]
name = "alembic"
version = "1.17.2"
version = "1.18.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "mako" },
{ name = "sqlalchemy" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/02/a6/74c8cadc2882977d80ad756a13857857dbcf9bd405bc80b662eb10651282/alembic-1.17.2.tar.gz", hash = "sha256:bbe9751705c5e0f14877f02d46c53d10885e377e3d90eda810a016f9baa19e8e", size = 1988064, upload-time = "2025-11-14T20:35:04.057Z" }
sdist = { url = "https://files.pythonhosted.org/packages/49/cc/aca263693b2ece99fa99a09b6d092acb89973eb2bb575faef1777e04f8b4/alembic-1.18.1.tar.gz", hash = "sha256:83ac6b81359596816fb3b893099841a0862f2117b2963258e965d70dc62fb866", size = 2044319, upload-time = "2026-01-14T18:53:14.907Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ba/88/6237e97e3385b57b5f1528647addea5cc03d4d65d5979ab24327d41fb00d/alembic-1.17.2-py3-none-any.whl", hash = "sha256:f483dd1fe93f6c5d49217055e4d15b905b425b6af906746abb35b69c1996c4e6", size = 248554, upload-time = "2025-11-14T20:35:05.699Z" },
{ url = "https://files.pythonhosted.org/packages/83/36/cd9cb6101e81e39076b2fbe303bfa3c85ca34e55142b0324fcbf22c5c6e2/alembic-1.18.1-py3-none-any.whl", hash = "sha256:f1c3b0920b87134e851c25f1f7f236d8a332c34b75416802d06971df5d1b7810", size = 260973, upload-time = "2026-01-14T18:53:17.533Z" },
]
[[package]]
@ -117,35 +117,35 @@ wheels = [
[[package]]
name = "boto3"
version = "1.42.24"
version = "1.42.30"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "botocore" },
{ name = "jmespath" },
{ name = "s3transfer" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ee/21/8be0e3685c3a4868be48d8d2f6e5b4641727e1d8a5d396b8b401d2b5f06e/boto3-1.42.24.tar.gz", hash = "sha256:c47a2f40df933e3861fc66fd8d6b87ee36d4361663a7e7ba39a87f5a78b2eae1", size = 112788, upload-time = "2026-01-07T20:30:51.019Z" }
sdist = { url = "https://files.pythonhosted.org/packages/42/79/2dac8b7cb075cfa43908ee9af3f8ee06880d84b86013854c5cca8945afac/boto3-1.42.30.tar.gz", hash = "sha256:ba9cd2f7819637d15bfbeb63af4c567fcc8a7dcd7b93dd12734ec58601169538", size = 112809, upload-time = "2026-01-16T20:37:23.636Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/75/bbfccb268f9faa4f59030888e859dca9797a980b77d6a074113af73bd4bf/boto3-1.42.24-py3-none-any.whl", hash = "sha256:8ed6ad670a5a2d7f66c1b0d3362791b48392c7a08f78479f5d8ab319a4d9118f", size = 140572, upload-time = "2026-01-07T20:30:49.431Z" },
{ url = "https://files.pythonhosted.org/packages/52/b3/2c0d828c9f668292e277ca5232e6160dd5b4b660a3f076f20dd5378baa1e/boto3-1.42.30-py3-none-any.whl", hash = "sha256:d7e548bea65e0ae2c465c77de937bc686b591aee6a352d5a19a16bc751e591c1", size = 140573, upload-time = "2026-01-16T20:37:22.089Z" },
]
[[package]]
name = "botocore"
version = "1.42.24"
version = "1.42.30"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jmespath" },
{ name = "python-dateutil" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/12/d7/bb4a4e839b238ffb67b002d7326b328ebe5eb23ed5180f2ca10399a802de/botocore-1.42.24.tar.gz", hash = "sha256:be8d1bea64fb91eea08254a1e5fea057e4428d08e61f4e11083a02cafc1f8cc6", size = 14878455, upload-time = "2026-01-07T20:30:40.379Z" }
sdist = { url = "https://files.pythonhosted.org/packages/44/38/23862628a0eb044c8b8b3d7a9ad1920b3bfd6bce6d746d5a871e8382c7e4/botocore-1.42.30.tar.gz", hash = "sha256:9bf1662b8273d5cc3828a49f71ca85abf4e021011c1f0a71f41a2ea5769a5116", size = 14891439, upload-time = "2026-01-16T20:37:13.77Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ff/d4/f2655d777eed8b069ecab3761454cb83f830f8be8b5b0d292e4b3a980d00/botocore-1.42.24-py3-none-any.whl", hash = "sha256:8fca9781d7c84f7ad070fceffaff7179c4aa7a5ffb27b43df9d1d957801e0a8d", size = 14551806, upload-time = "2026-01-07T20:30:38.103Z" },
{ url = "https://files.pythonhosted.org/packages/3d/8d/6d7b016383b1f74dd93611b1c5078bbaddaca901553ab886dcda87cae365/botocore-1.42.30-py3-none-any.whl", hash = "sha256:97070a438cac92430bb7b65f8ebd7075224f4a289719da4ee293d22d1e98db02", size = 14566340, upload-time = "2026-01-16T20:37:10.94Z" },
]
[[package]]
name = "camel-ai"
version = "0.2.83a9"
version = "0.2.83"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "astor" },
@ -162,9 +162,9 @@ dependencies = [
{ name = "tiktoken" },
{ name = "websockets" },
]
sdist = { url = "https://files.pythonhosted.org/packages/bf/8c/7d8071776ba973bb6e734edb6caaf4fdbdf60ecebdc1c4017948cc67ad48/camel_ai-0.2.83a9.tar.gz", hash = "sha256:2ee560551797b089f9849d3b9d63cd3a2b4eb45d339d17e6bf95eba2b85c4b50", size = 1124774, upload-time = "2026-01-15T21:28:24.51Z" }
sdist = { url = "https://files.pythonhosted.org/packages/e2/d1/36f0982862ba2b992968ace43b1c04dd72f7114ce3954342a99e18619d6a/camel_ai-0.2.83.tar.gz", hash = "sha256:c25eb414e9353aab166021852fb54d1d3a0c0e17485fefa996de2cccdf4c8eb9", size = 1125708, upload-time = "2026-01-19T20:37:45.197Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/34/77/f7594707571af9c86351a69ff9f7f580602b42ffe8113803153c069b6bff/camel_ai-0.2.83a9-py3-none-any.whl", hash = "sha256:7cfe97b590096c1cc5afddf6dca023c5b9a47d104196c16b4b2b1934931af260", size = 1595808, upload-time = "2026-01-15T21:28:21.068Z" },
{ url = "https://files.pythonhosted.org/packages/32/e2/4e2964059794af9161889223fa7d17630c1bcc74005c9892cd1e1627650c/camel_ai-0.2.83-py3-none-any.whl", hash = "sha256:3a183efdcccd211ae216b2a7903d48a8811ad0f4541223cacc05cb25a11279a6", size = 1599355, upload-time = "2026-01-19T20:37:41.985Z" },
]
[[package]]
@ -364,7 +364,7 @@ requires-dist = [
{ name = "alembic", specifier = ">=1.15.2" },
{ name = "arrow", specifier = ">=1.3.0" },
{ name = "bcrypt", specifier = "==4.0.1" },
{ name = "camel-ai", specifier = "==0.2.83a9" },
{ name = "camel-ai", specifier = "==0.2.83" },
{ name = "click", specifier = ">=8.1.8" },
{ name = "convert-case", specifier = ">=1.2.3" },
{ name = "cryptography", specifier = ">=45.0.4" },
@ -473,16 +473,16 @@ wheels = [
[[package]]
name = "fastapi-pagination"
version = "0.15.5"
version = "0.15.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "fastapi" },
{ name = "pydantic" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/41/5e/656fa2a88b25f3362d96b838d4858cb10f75ae62a917584375be3dae30fc/fastapi_pagination-0.15.5.tar.gz", hash = "sha256:65871797e53392f5a62eb206b4e1f5494d1f64a8ed4d085a32c4f7c1a1987ee1", size = 572714, upload-time = "2026-01-08T16:19:07.372Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a0/da/ad34e0fc98ca9731b0f76d07faeb39d525cb80440ac5814e270cb379d92a/fastapi_pagination-0.15.6.tar.gz", hash = "sha256:c59ca1aa056dccee3526953357c2d1128f988f83d3034d95ddb8de6f5a68e9f8", size = 573720, upload-time = "2026-01-11T22:15:36.385Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/47/f8/6d3dbbd818a106309d073c81081706541e8ffbf9ed581f799cf525e01e15/fastapi_pagination-0.15.5-py3-none-any.whl", hash = "sha256:a9276ad322d0c85b46f1d5e43b2ef33dce21d1a4dbf5598269752b7542a2b47b", size = 56576, upload-time = "2026-01-08T16:19:05.81Z" },
{ url = "https://files.pythonhosted.org/packages/01/28/0cf3b51115e98c0b84553b9e11ec07f59ae580bf8585eb7876fa9afe4c7a/fastapi_pagination-0.15.6-py3-none-any.whl", hash = "sha256:5c44bfaa78c1c968ca6f027b01a27c1805194c7cc8776eb84ec78235abcdaece", size = 59624, upload-time = "2026-01-11T22:15:37.746Z" },
]
[[package]]
@ -515,7 +515,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/f8/0a/a3871375c7b9727edaeeea994bfff7c63ff7804c9829c19309ba2e058807/greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb", size = 276379, upload-time = "2025-12-04T14:23:30.498Z" },
{ url = "https://files.pythonhosted.org/packages/43/ab/7ebfe34dce8b87be0d11dae91acbf76f7b8246bf9d6b319c741f99fa59c6/greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3", size = 597294, upload-time = "2025-12-04T14:50:06.847Z" },
{ url = "https://files.pythonhosted.org/packages/a4/39/f1c8da50024feecd0793dbd5e08f526809b8ab5609224a2da40aad3a7641/greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655", size = 607742, upload-time = "2025-12-04T14:57:42.349Z" },
{ url = "https://files.pythonhosted.org/packages/77/cb/43692bcd5f7a0da6ec0ec6d58ee7cddb606d055ce94a62ac9b1aa481e969/greenlet-3.3.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c024b1e5696626890038e34f76140ed1daf858e37496d33f2af57f06189e70d7", size = 622297, upload-time = "2025-12-04T15:07:13.552Z" },
{ url = "https://files.pythonhosted.org/packages/75/b0/6bde0b1011a60782108c01de5913c588cf51a839174538d266de15e4bf4d/greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b", size = 609885, upload-time = "2025-12-04T14:26:02.368Z" },
{ url = "https://files.pythonhosted.org/packages/49/0e/49b46ac39f931f59f987b7cd9f34bfec8ef81d2a1e6e00682f55be5de9f4/greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53", size = 1567424, upload-time = "2025-12-04T15:04:23.757Z" },
{ url = "https://files.pythonhosted.org/packages/05/f5/49a9ac2dff7f10091935def9165c90236d8f175afb27cbed38fb1d61ab6b/greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614", size = 1636017, upload-time = "2025-12-04T14:27:29.688Z" },
@ -738,21 +737,21 @@ wheels = [
[[package]]
name = "numpy"
version = "2.4.0"
version = "2.4.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a4/7a/6a3d14e205d292b738db449d0de649b373a59edb0d0b4493821d0a3e8718/numpy-2.4.0.tar.gz", hash = "sha256:6e504f7b16118198f138ef31ba24d985b124c2c469fe8467007cf30fd992f934", size = 20685720, upload-time = "2025-12-20T16:18:19.023Z" }
sdist = { url = "https://files.pythonhosted.org/packages/24/62/ae72ff66c0f1fd959925b4c11f8c2dea61f47f6acaea75a08512cdfe3fed/numpy-2.4.1.tar.gz", hash = "sha256:a1ceafc5042451a858231588a104093474c6a5c57dcc724841f5c888d237d690", size = 20721320, upload-time = "2026-01-10T06:44:59.619Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8b/ff/f6400ffec95de41c74b8e73df32e3fff1830633193a7b1e409be7fb1bb8c/numpy-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2a8b6bb8369abefb8bd1801b054ad50e02b3275c8614dc6e5b0373c305291037", size = 16653117, upload-time = "2025-12-20T16:16:06.709Z" },
{ url = "https://files.pythonhosted.org/packages/fd/28/6c23e97450035072e8d830a3c411bf1abd1f42c611ff9d29e3d8f55c6252/numpy-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e284ca13d5a8367e43734148622caf0b261b275673823593e3e3634a6490f83", size = 12369711, upload-time = "2025-12-20T16:16:08.758Z" },
{ url = "https://files.pythonhosted.org/packages/bc/af/acbef97b630ab1bb45e6a7d01d1452e4251aa88ce680ac36e56c272120ec/numpy-2.4.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:49ff32b09f5aa0cd30a20c2b39db3e669c845589f2b7fc910365210887e39344", size = 5198355, upload-time = "2025-12-20T16:16:10.902Z" },
{ url = "https://files.pythonhosted.org/packages/c1/c8/4e0d436b66b826f2e53330adaa6311f5cac9871a5b5c31ad773b27f25a74/numpy-2.4.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:36cbfb13c152b1c7c184ddac43765db8ad672567e7bafff2cc755a09917ed2e6", size = 6545298, upload-time = "2025-12-20T16:16:12.607Z" },
{ url = "https://files.pythonhosted.org/packages/ef/27/e1f5d144ab54eac34875e79037011d511ac57b21b220063310cb96c80fbc/numpy-2.4.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:35ddc8f4914466e6fc954c76527aa91aa763682a4f6d73249ef20b418fe6effb", size = 14398387, upload-time = "2025-12-20T16:16:14.257Z" },
{ url = "https://files.pythonhosted.org/packages/67/64/4cb909dd5ab09a9a5d086eff9586e69e827b88a5585517386879474f4cf7/numpy-2.4.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc578891de1db95b2a35001b695451767b580bb45753717498213c5ff3c41d63", size = 16363091, upload-time = "2025-12-20T16:16:17.32Z" },
{ url = "https://files.pythonhosted.org/packages/9d/9c/8efe24577523ec6809261859737cf117b0eb6fdb655abdfdc81b2e468ce4/numpy-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98e81648e0b36e325ab67e46b5400a7a6d4a22b8a7c8e8bbfe20e7db7906bf95", size = 16176394, upload-time = "2025-12-20T16:16:19.524Z" },
{ url = "https://files.pythonhosted.org/packages/61/f0/1687441ece7b47a62e45a1f82015352c240765c707928edd8aef875d5951/numpy-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d57b5046c120561ba8fa8e4030fbb8b822f3063910fa901ffadf16e2b7128ad6", size = 18287378, upload-time = "2025-12-20T16:16:22.866Z" },
{ url = "https://files.pythonhosted.org/packages/d3/6f/f868765d44e6fc466467ed810ba9d8d6db1add7d4a748abfa2a4c99a3194/numpy-2.4.0-cp312-cp312-win32.whl", hash = "sha256:92190db305a6f48734d3982f2c60fa30d6b5ee9bff10f2887b930d7b40119f4c", size = 5955432, upload-time = "2025-12-20T16:16:25.06Z" },
{ url = "https://files.pythonhosted.org/packages/d4/b5/94c1e79fcbab38d1ca15e13777477b2914dd2d559b410f96949d6637b085/numpy-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:680060061adb2d74ce352628cb798cfdec399068aa7f07ba9fb818b2b3305f98", size = 12306201, upload-time = "2025-12-20T16:16:26.979Z" },
{ url = "https://files.pythonhosted.org/packages/70/09/c39dadf0b13bb0768cd29d6a3aaff1fb7c6905ac40e9aaeca26b1c086e06/numpy-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:39699233bc72dd482da1415dcb06076e32f60eddc796a796c5fb6c5efce94667", size = 10308234, upload-time = "2025-12-20T16:16:29.417Z" },
{ url = "https://files.pythonhosted.org/packages/78/7f/ec53e32bf10c813604edf07a3682616bd931d026fcde7b6d13195dfb684a/numpy-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d3703409aac693fa82c0aee023a1ae06a6e9d065dba10f5e8e80f642f1e9d0a2", size = 16656888, upload-time = "2026-01-10T06:42:40.913Z" },
{ url = "https://files.pythonhosted.org/packages/b8/e0/1f9585d7dae8f14864e948fd7fa86c6cb72dee2676ca2748e63b1c5acfe0/numpy-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7211b95ca365519d3596a1d8688a95874cc94219d417504d9ecb2df99fa7bfa8", size = 12373956, upload-time = "2026-01-10T06:42:43.091Z" },
{ url = "https://files.pythonhosted.org/packages/8e/43/9762e88909ff2326f5e7536fa8cb3c49fb03a7d92705f23e6e7f553d9cb3/numpy-2.4.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5adf01965456a664fc727ed69cc71848f28d063217c63e1a0e200a118d5eec9a", size = 5202567, upload-time = "2026-01-10T06:42:45.107Z" },
{ url = "https://files.pythonhosted.org/packages/4b/ee/34b7930eb61e79feb4478800a4b95b46566969d837546aa7c034c742ef98/numpy-2.4.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:26f0bcd9c79a00e339565b303badc74d3ea2bd6d52191eeca5f95936cad107d0", size = 6549459, upload-time = "2026-01-10T06:42:48.152Z" },
{ url = "https://files.pythonhosted.org/packages/79/e3/5f115fae982565771be994867c89bcd8d7208dbfe9469185497d70de5ddf/numpy-2.4.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0093e85df2960d7e4049664b26afc58b03236e967fb942354deef3208857a04c", size = 14404859, upload-time = "2026-01-10T06:42:49.947Z" },
{ url = "https://files.pythonhosted.org/packages/d9/7d/9c8a781c88933725445a859cac5d01b5871588a15969ee6aeb618ba99eee/numpy-2.4.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad270f438cbdd402c364980317fb6b117d9ec5e226fff5b4148dd9aa9fc6e02", size = 16371419, upload-time = "2026-01-10T06:42:52.409Z" },
{ url = "https://files.pythonhosted.org/packages/a6/d2/8aa084818554543f17cf4162c42f162acbd3bb42688aefdba6628a859f77/numpy-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:297c72b1b98100c2e8f873d5d35fb551fce7040ade83d67dd51d38c8d42a2162", size = 16182131, upload-time = "2026-01-10T06:42:54.694Z" },
{ url = "https://files.pythonhosted.org/packages/60/db/0425216684297c58a8df35f3284ef56ec4a043e6d283f8a59c53562caf1b/numpy-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf6470d91d34bf669f61d515499859fa7a4c2f7c36434afb70e82df7217933f9", size = 18295342, upload-time = "2026-01-10T06:42:56.991Z" },
{ url = "https://files.pythonhosted.org/packages/31/4c/14cb9d86240bd8c386c881bafbe43f001284b7cce3bc01623ac9475da163/numpy-2.4.1-cp312-cp312-win32.whl", hash = "sha256:b6bcf39112e956594b3331316d90c90c90fb961e39696bda97b89462f5f3943f", size = 5959015, upload-time = "2026-01-10T06:42:59.631Z" },
{ url = "https://files.pythonhosted.org/packages/51/cf/52a703dbeb0c65807540d29699fef5fda073434ff61846a564d5c296420f/numpy-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:e1a27bb1b2dee45a2a53f5ca6ff2d1a7f135287883a1689e930d44d1ff296c87", size = 12310730, upload-time = "2026-01-10T06:43:01.627Z" },
{ url = "https://files.pythonhosted.org/packages/69/80/a828b2d0ade5e74a9fe0f4e0a17c30fdc26232ad2bc8c9f8b3197cf7cf18/numpy-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:0e6e8f9d9ecf95399982019c01223dc130542960a12edfa8edd1122dfa66a8a8", size = 10312166, upload-time = "2026-01-10T06:43:03.673Z" },
]
[[package]]
@ -1175,14 +1174,14 @@ wheels = [
[[package]]
name = "pydash"
version = "8.0.5"
version = "8.0.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2f/24/91c037f47e434172c2112d65c00c84d475a6715425e3315ba2cbb7a87e66/pydash-8.0.5.tar.gz", hash = "sha256:7cc44ebfe5d362f4f5f06c74c8684143c5ac481376b059ff02570705523f9e2e", size = 164861, upload-time = "2025-01-17T16:08:50.562Z" }
sdist = { url = "https://files.pythonhosted.org/packages/75/c1/1c55272f49d761cec38ddb80be9817935b9c91ebd6a8988e10f532868d56/pydash-8.0.6.tar.gz", hash = "sha256:b2821547e9723f69cf3a986be4db64de41730be149b2641947ecd12e1e11025a", size = 164338, upload-time = "2026-01-17T16:42:56.576Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/86/e74c978800131c657fc5145f2c1c63e0cea01a49b6216f729cf77a2e1edf/pydash-8.0.5-py3-none-any.whl", hash = "sha256:b2625f8981862e19911daa07f80ed47b315ce20d9b5eb57aaf97aaf570c3892f", size = 102077, upload-time = "2025-01-17T16:08:47.91Z" },
{ url = "https://files.pythonhosted.org/packages/a5/b7/cc5e7974699db40014d58c7dd7c4ad4ffc244d36930dc9ec7d06ee67d7a9/pydash-8.0.6-py3-none-any.whl", hash = "sha256:ee70a81a5b292c007f28f03a4ee8e75c1f5d7576df5457b836ec7ab2839cc5d0", size = 101561, upload-time = "2026-01-17T16:42:55.448Z" },
]
[[package]]
@ -1281,24 +1280,26 @@ wheels = [
[[package]]
name = "regex"
version = "2025.11.3"
version = "2026.1.15"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/cc/a9/546676f25e573a4cf00fe8e119b78a37b6a8fe2dc95cda877b30889c9c45/regex-2025.11.3.tar.gz", hash = "sha256:1fedc720f9bb2494ce31a58a1631f9c82df6a09b49c19517ea5cc280b4541e01", size = 414669, upload-time = "2025-11-03T21:34:22.089Z" }
sdist = { url = "https://files.pythonhosted.org/packages/0b/86/07d5056945f9ec4590b518171c4254a5925832eb727b56d3c38a7476f316/regex-2026.1.15.tar.gz", hash = "sha256:164759aa25575cbc0651bef59a0b18353e54300d79ace8084c818ad8ac72b7d5", size = 414811, upload-time = "2026-01-14T23:18:02.775Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e8/74/18f04cb53e58e3fb107439699bd8375cf5a835eec81084e0bddbd122e4c2/regex-2025.11.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bc8ab71e2e31b16e40868a40a69007bc305e1109bd4658eb6cad007e0bf67c41", size = 489312, upload-time = "2025-11-03T21:31:34.343Z" },
{ url = "https://files.pythonhosted.org/packages/78/3f/37fcdd0d2b1e78909108a876580485ea37c91e1acf66d3bb8e736348f441/regex-2025.11.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:22b29dda7e1f7062a52359fca6e58e548e28c6686f205e780b02ad8ef710de36", size = 291256, upload-time = "2025-11-03T21:31:35.675Z" },
{ url = "https://files.pythonhosted.org/packages/bf/26/0a575f58eb23b7ebd67a45fccbc02ac030b737b896b7e7a909ffe43ffd6a/regex-2025.11.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a91e4a29938bc1a082cc28fdea44be420bf2bebe2665343029723892eb073e1", size = 288921, upload-time = "2025-11-03T21:31:37.07Z" },
{ url = "https://files.pythonhosted.org/packages/ea/98/6a8dff667d1af907150432cf5abc05a17ccd32c72a3615410d5365ac167a/regex-2025.11.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08b884f4226602ad40c5d55f52bf91a9df30f513864e0054bad40c0e9cf1afb7", size = 798568, upload-time = "2025-11-03T21:31:38.784Z" },
{ url = "https://files.pythonhosted.org/packages/64/15/92c1db4fa4e12733dd5a526c2dd2b6edcbfe13257e135fc0f6c57f34c173/regex-2025.11.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3e0b11b2b2433d1c39c7c7a30e3f3d0aeeea44c2a8d0bae28f6b95f639927a69", size = 864165, upload-time = "2025-11-03T21:31:40.559Z" },
{ url = "https://files.pythonhosted.org/packages/f9/e7/3ad7da8cdee1ce66c7cd37ab5ab05c463a86ffeb52b1a25fe7bd9293b36c/regex-2025.11.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:87eb52a81ef58c7ba4d45c3ca74e12aa4b4e77816f72ca25258a85b3ea96cb48", size = 912182, upload-time = "2025-11-03T21:31:42.002Z" },
{ url = "https://files.pythonhosted.org/packages/84/bd/9ce9f629fcb714ffc2c3faf62b6766ecb7a585e1e885eb699bcf130a5209/regex-2025.11.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a12ab1f5c29b4e93db518f5e3872116b7e9b1646c9f9f426f777b50d44a09e8c", size = 803501, upload-time = "2025-11-03T21:31:43.815Z" },
{ url = "https://files.pythonhosted.org/packages/7c/0f/8dc2e4349d8e877283e6edd6c12bdcebc20f03744e86f197ab6e4492bf08/regex-2025.11.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7521684c8c7c4f6e88e35ec89680ee1aa8358d3f09d27dfbdf62c446f5d4c695", size = 787842, upload-time = "2025-11-03T21:31:45.353Z" },
{ url = "https://files.pythonhosted.org/packages/f9/73/cff02702960bc185164d5619c0c62a2f598a6abff6695d391b096237d4ab/regex-2025.11.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7fe6e5440584e94cc4b3f5f4d98a25e29ca12dccf8873679a635638349831b98", size = 858519, upload-time = "2025-11-03T21:31:46.814Z" },
{ url = "https://files.pythonhosted.org/packages/61/83/0e8d1ae71e15bc1dc36231c90b46ee35f9d52fab2e226b0e039e7ea9c10a/regex-2025.11.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8e026094aa12b43f4fd74576714e987803a315c76edb6b098b9809db5de58f74", size = 850611, upload-time = "2025-11-03T21:31:48.289Z" },
{ url = "https://files.pythonhosted.org/packages/c8/f5/70a5cdd781dcfaa12556f2955bf170cd603cb1c96a1827479f8faea2df97/regex-2025.11.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:435bbad13e57eb5606a68443af62bed3556de2f46deb9f7d4237bc2f1c9fb3a0", size = 789759, upload-time = "2025-11-03T21:31:49.759Z" },
{ url = "https://files.pythonhosted.org/packages/59/9b/7c29be7903c318488983e7d97abcf8ebd3830e4c956c4c540005fcfb0462/regex-2025.11.3-cp312-cp312-win32.whl", hash = "sha256:3839967cf4dc4b985e1570fd8d91078f0c519f30491c60f9ac42a8db039be204", size = 266194, upload-time = "2025-11-03T21:31:51.53Z" },
{ url = "https://files.pythonhosted.org/packages/1a/67/3b92df89f179d7c367be654ab5626ae311cb28f7d5c237b6bb976cd5fbbb/regex-2025.11.3-cp312-cp312-win_amd64.whl", hash = "sha256:e721d1b46e25c481dc5ded6f4b3f66c897c58d2e8cfdf77bbced84339108b0b9", size = 277069, upload-time = "2025-11-03T21:31:53.151Z" },
{ url = "https://files.pythonhosted.org/packages/d7/55/85ba4c066fe5094d35b249c3ce8df0ba623cfd35afb22d6764f23a52a1c5/regex-2025.11.3-cp312-cp312-win_arm64.whl", hash = "sha256:64350685ff08b1d3a6fff33f45a9ca183dc1d58bbfe4981604e70ec9801bbc26", size = 270330, upload-time = "2025-11-03T21:31:54.514Z" },
{ url = "https://files.pythonhosted.org/packages/92/81/10d8cf43c807d0326efe874c1b79f22bfb0fb226027b0b19ebc26d301408/regex-2026.1.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4c8fcc5793dde01641a35905d6731ee1548f02b956815f8f1cab89e515a5bdf1", size = 489398, upload-time = "2026-01-14T23:14:43.741Z" },
{ url = "https://files.pythonhosted.org/packages/90/b0/7c2a74e74ef2a7c32de724658a69a862880e3e4155cba992ba04d1c70400/regex-2026.1.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bfd876041a956e6a90ad7cdb3f6a630c07d491280bfeed4544053cd434901681", size = 291339, upload-time = "2026-01-14T23:14:45.183Z" },
{ url = "https://files.pythonhosted.org/packages/19/4d/16d0773d0c818417f4cc20aa0da90064b966d22cd62a8c46765b5bd2d643/regex-2026.1.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9250d087bc92b7d4899ccd5539a1b2334e44eee85d848c4c1aef8e221d3f8c8f", size = 289003, upload-time = "2026-01-14T23:14:47.25Z" },
{ url = "https://files.pythonhosted.org/packages/c6/e4/1fc4599450c9f0863d9406e944592d968b8d6dfd0d552a7d569e43bceada/regex-2026.1.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8a154cf6537ebbc110e24dabe53095e714245c272da9c1be05734bdad4a61aa", size = 798656, upload-time = "2026-01-14T23:14:48.77Z" },
{ url = "https://files.pythonhosted.org/packages/b2/e6/59650d73a73fa8a60b3a590545bfcf1172b4384a7df2e7fe7b9aab4e2da9/regex-2026.1.15-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8050ba2e3ea1d8731a549e83c18d2f0999fbc99a5f6bd06b4c91449f55291804", size = 864252, upload-time = "2026-01-14T23:14:50.528Z" },
{ url = "https://files.pythonhosted.org/packages/6e/ab/1d0f4d50a1638849a97d731364c9a80fa304fec46325e48330c170ee8e80/regex-2026.1.15-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf065240704cb8951cc04972cf107063917022511273e0969bdb34fc173456c", size = 912268, upload-time = "2026-01-14T23:14:52.952Z" },
{ url = "https://files.pythonhosted.org/packages/dd/df/0d722c030c82faa1d331d1921ee268a4e8fb55ca8b9042c9341c352f17fa/regex-2026.1.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c32bef3e7aeee75746748643667668ef941d28b003bfc89994ecf09a10f7a1b5", size = 803589, upload-time = "2026-01-14T23:14:55.182Z" },
{ url = "https://files.pythonhosted.org/packages/66/23/33289beba7ccb8b805c6610a8913d0131f834928afc555b241caabd422a9/regex-2026.1.15-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d5eaa4a4c5b1906bd0d2508d68927f15b81821f85092e06f1a34a4254b0e1af3", size = 775700, upload-time = "2026-01-14T23:14:56.707Z" },
{ url = "https://files.pythonhosted.org/packages/e7/65/bf3a42fa6897a0d3afa81acb25c42f4b71c274f698ceabd75523259f6688/regex-2026.1.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:86c1077a3cc60d453d4084d5b9649065f3bf1184e22992bd322e1f081d3117fb", size = 787928, upload-time = "2026-01-14T23:14:58.312Z" },
{ url = "https://files.pythonhosted.org/packages/f4/f5/13bf65864fc314f68cdd6d8ca94adcab064d4d39dbd0b10fef29a9da48fc/regex-2026.1.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2b091aefc05c78d286657cd4db95f2e6313375ff65dcf085e42e4c04d9c8d410", size = 858607, upload-time = "2026-01-14T23:15:00.657Z" },
{ url = "https://files.pythonhosted.org/packages/a3/31/040e589834d7a439ee43fb0e1e902bc81bd58a5ba81acffe586bb3321d35/regex-2026.1.15-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:57e7d17f59f9ebfa9667e6e5a1c0127b96b87cb9cede8335482451ed00788ba4", size = 763729, upload-time = "2026-01-14T23:15:02.248Z" },
{ url = "https://files.pythonhosted.org/packages/9b/84/6921e8129687a427edf25a34a5594b588b6d88f491320b9de5b6339a4fcb/regex-2026.1.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c6c4dcdfff2c08509faa15d36ba7e5ef5fcfab25f1e8f85a0c8f45bc3a30725d", size = 850697, upload-time = "2026-01-14T23:15:03.878Z" },
{ url = "https://files.pythonhosted.org/packages/8a/87/3d06143d4b128f4229158f2de5de6c8f2485170c7221e61bf381313314b2/regex-2026.1.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf8ff04c642716a7f2048713ddc6278c5fd41faa3b9cab12607c7abecd012c22", size = 789849, upload-time = "2026-01-14T23:15:06.102Z" },
{ url = "https://files.pythonhosted.org/packages/77/69/c50a63842b6bd48850ebc7ab22d46e7a2a32d824ad6c605b218441814639/regex-2026.1.15-cp312-cp312-win32.whl", hash = "sha256:82345326b1d8d56afbe41d881fdf62f1926d7264b2fc1537f99ae5da9aad7913", size = 266279, upload-time = "2026-01-14T23:15:07.678Z" },
{ url = "https://files.pythonhosted.org/packages/f2/36/39d0b29d087e2b11fd8191e15e81cce1b635fcc845297c67f11d0d19274d/regex-2026.1.15-cp312-cp312-win_amd64.whl", hash = "sha256:4def140aa6156bc64ee9912383d4038f3fdd18fee03a6f222abd4de6357ce42a", size = 277166, upload-time = "2026-01-14T23:15:09.257Z" },
{ url = "https://files.pythonhosted.org/packages/28/32/5b8e476a12262748851fa8ab1b0be540360692325975b094e594dfebbb52/regex-2026.1.15-cp312-cp312-win_arm64.whl", hash = "sha256:c6c565d9a6e1a8d783c1948937ffc377dd5771e83bd56de8317c450a954d2056", size = 270415, upload-time = "2026-01-14T23:15:10.743Z" },
]
[[package]]
@ -1424,15 +1425,15 @@ wheels = [
[[package]]
name = "sse-starlette"
version = "3.1.2"
version = "3.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "starlette" },
]
sdist = { url = "https://files.pythonhosted.org/packages/da/34/f5df66cb383efdbf4f2db23cabb27f51b1dcb737efaf8a558f6f1d195134/sse_starlette-3.1.2.tar.gz", hash = "sha256:55eff034207a83a0eb86de9a68099bd0157838f0b8b999a1b742005c71e33618", size = 26303, upload-time = "2025-12-31T08:02:20.023Z" }
sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/00d280c03ffd39aaee0e86ec81e2d3b9253036a0f93f51d10503adef0e65/sse_starlette-3.2.0.tar.gz", hash = "sha256:8127594edfb51abe44eac9c49e59b0b01f1039d0c7461c6fd91d4e03b70da422", size = 27253, upload-time = "2026-01-17T13:11:05.62Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/95/8c4b76eec9ae574474e5d2997557cebf764bcd3586458956c30631ae08f4/sse_starlette-3.1.2-py3-none-any.whl", hash = "sha256:cd800dd349f4521b317b9391d3796fa97b71748a4da9b9e00aafab32dda375c8", size = 12484, upload-time = "2025-12-31T08:02:18.894Z" },
{ url = "https://files.pythonhosted.org/packages/96/7f/832f015020844a8b8f7a9cbc103dd76ba8e3875004c41e08440ea3a2b41a/sse_starlette-3.2.0-py3-none-any.whl", hash = "sha256:5876954bd51920fc2cd51baee47a080eb88a37b5b784e615abb0b283f801cdbf", size = 12763, upload-time = "2026-01-17T13:11:03.775Z" },
]
[[package]]

Binary file not shown.

Before

Width:  |  Height:  |  Size: 260 KiB

After

Width:  |  Height:  |  Size: 179 KiB

Before After
Before After

View file

@ -630,7 +630,7 @@ const ToolSelect = forwardRef<
key={item.id + item.key + (item.isLocal + "")}
className="h-5 bg-button-tertiery-fill-default flex items-center gap-1 w-auto flex-shrink-0 px-xs"
>
{item.name || item.mcp_name}
{item.name || item.mcp_name || item.key || `tool_${item.id}`}
<div className="flex items-center justify-center bg-button-secondary-fill-disabled rounded-sm">
<X
className="w-4 h-4 cursor-pointer text-button-secondary-icon-disabled"

View file

@ -282,7 +282,7 @@ export function AddWorker({
name: workerName,
type: workerName as AgentNameType,
log: [],
tools: [...selectedTools.map((tool) => tool.name)],
tools: [...selectedTools.map((tool) => tool.name || tool.mcp_name || tool.key || `tool_${tool.id}`)],
activeWebviewIds: [],
workerInfo: {
name: workerName,
@ -310,7 +310,7 @@ export function AddWorker({
type: workerName as AgentNameType,
log: [],
tools: [
...selectedTools.map((tool) => tool?.key || tool?.mcp_name || ""),
...selectedTools.map((tool) => tool.name || tool.mcp_name || tool.key || `tool_${tool.id}`),
],
activeWebviewIds: [],
workerInfo: {

View file

@ -14,17 +14,10 @@ import {
ChevronLeft,
House,
Share,
MoreHorizontal,
} from "lucide-react";
import "./index.css";
import folderIcon from "@/assets/Folder.svg";
import { Button } from "@/components/ui/button";
import {
DropdownMenu,
DropdownMenuContent,
DropdownMenuItem,
DropdownMenuTrigger,
} from "@/components/ui/dropdown-menu";
import { useLocation, useNavigate } from "react-router-dom";
import { useSidebarStore } from "@/store/sidebarStore";
import useChatStoreAdapter from "@/hooks/useChatStoreAdapter";
@ -264,16 +257,16 @@ function HeaderWin() {
<House className="w-4 h-4" />
</Button>
</TooltipSimple>
<Button
variant="ghost"
size="icon"
className="no-drag"
onClick={createNewProject}
>
<TooltipSimple content={t("layout.new-project")} side="bottom" align="center">
<Plus className="w-4 h-4" />
</TooltipSimple>
</Button>
<TooltipSimple content={t("layout.new-project")} side="bottom" align="center">
<Button
variant="ghost"
size="icon"
className="no-drag"
onClick={createNewProject}
>
<Plus className="w-4 h-4" />
</Button>
</TooltipSimple>
</div>
)}
{location.pathname !== "/history" && (
@ -344,37 +337,42 @@ function HeaderWin() {
</Button>
</TooltipSimple>
)}
<DropdownMenu>
<DropdownMenuTrigger asChild>
{chatStore.activeTaskId && chatStore.tasks[chatStore.activeTaskId as string] && (
<TooltipSimple content={t("layout.report-bug")} side="bottom" align="end">
<Button
onClick={exportLog}
variant="ghost"
size="icon"
className="no-drag"
>
<MoreHorizontal className="w-4 h-4" />
<FileDown className="w-4 h-4" />
</Button>
</DropdownMenuTrigger>
<DropdownMenuContent align="end" className="w-36">
{chatStore.activeTaskId && chatStore.tasks[chatStore.activeTaskId as string] && (
<DropdownMenuItem onClick={exportLog} className="cursor-pointer">
<FileDown className="w-4 h-4" />
{t("layout.report-bug")}
</DropdownMenuItem>
)}
<DropdownMenuItem onClick={getReferFriendsLink} className="cursor-pointer">
<img
src={giftIcon}
alt="gift-icon"
className="w-4 h-4"
/>
{t("layout.refer-friends")}
</DropdownMenuItem>
<DropdownMenuItem onClick={() => navigate("/history?tab=settings")} className="cursor-pointer">
<Settings className="w-4 h-4" />
{t("layout.settings")}
</DropdownMenuItem>
</DropdownMenuContent>
</DropdownMenu>
</TooltipSimple>
)}
<TooltipSimple content={t("layout.refer-friends")} side="bottom" align="end">
<Button
onClick={getReferFriendsLink}
variant="ghost"
size="icon"
className="no-drag"
>
<img
src={giftIcon}
alt="gift-icon"
className="w-4 h-4"
/>
</Button>
</TooltipSimple>
<TooltipSimple content={t("layout.settings")} side="bottom" align="end">
<Button
onClick={() => navigate("/history?tab=settings")}
variant="ghost"
size="icon"
className="no-drag"
>
<Settings className="w-4 h-4" />
</Button>
</TooltipSimple>
</div>
)}
{location.pathname === "/history" && (

View file

@ -109,11 +109,28 @@ export function Node({ id, data }: NodeProps) {
const workerList = useWorkerList();
const { setWorkerList } = useAuthStore();
const nodeRef = useRef<HTMLDivElement>(null);
const lastAutoExpandedTaskIdRef = useRef<string | null>(null);
useEffect(() => {
setIsExpanded(data.isExpanded);
}, [data.isExpanded]);
useEffect(() => {
const runningTask = data.agent?.tasks?.find(
(task) =>
task.status === "running" && task.toolkits && task.toolkits.length > 0
);
if (runningTask && runningTask.id !== lastAutoExpandedTaskIdRef.current) {
if (!isExpanded) {
setIsExpanded(true);
data.onExpandChange(id, true);
setSelectedTask(runningTask);
}
lastAutoExpandedTaskIdRef.current = runningTask.id;
}
}, [data.agent?.tasks, id, data.onExpandChange, isExpanded]);
// manually control node size
useEffect(() => {
if (data.isEditMode) {
@ -403,7 +420,7 @@ export function Node({ id, data }: NodeProps) {
</div>
<div
ref={toolsRef}
className="flex-shrink-0 text-text-label text-xs leading-tight min-h-4 font-normal mb-sm pr-3 text-"
className="flex-shrink-0 text-text-label text-xs leading-tight min-h-4 font-normal mb-sm pr-3"
>
{/* {JSON.stringify(data.agent)} */}
{agentToolkits[

View file

@ -9,22 +9,8 @@ const Update = () => {
const [isDownloading, setIsDownloading] = useState<boolean>(false);
const { t } = useTranslation();
// Some updater errors (e.g. GitHub 503 / missing release) are noisy and not actionable for users.
const shouldSuppressError = (message?: string) => {
if (!message) return false;
const lower = message.toLowerCase();
return (
lower.includes("unable to find latest version on github")
);
};
const checkUpdate = async () => {
const result = await window.ipcRenderer.invoke("check-update");
if (result?.error && !shouldSuppressError(result.error.message)) {
toast.error(t("update.update-check-failed"), {
description: result.error.message,
});
}
const checkUpdate = () => {
window.ipcRenderer.invoke("check-update");
};
const onUpdateCanAvailable = useCallback(
@ -50,10 +36,6 @@ const Update = () => {
const onUpdateError = useCallback(
(_event: Electron.IpcRendererEvent, err: ErrorType) => {
if (shouldSuppressError(err.message)) {
console.warn("[update] suppressed updater error:", err.message);
return;
}
toast.error(t("update.update-error"), {
description: err.message,
});

View file

@ -73,6 +73,15 @@ export const INIT_PROVODERS: Provider[] = [
is_valid: false,
model_type: ""
},
{
id: 'ModelArk',
name: 'ModelArk',
apiKey: '',
apiHost: 'https://ark.ap-southeast.bytepluses.com/api/v3',
description: "ModelArk model configuration.",
is_valid: false,
model_type: ""
},
{
id: 'aws-bedrock',
name: 'AWS Bedrock',

View file

@ -159,6 +159,8 @@ const resolveProcessTaskIdForToolkitEvent = (
// Throttle streaming decompose text updates to prevent excessive re-renders
const streamingDecomposeTextBuffer: Record<string, string> = {};
const streamingDecomposeTextTimers: Record<string, ReturnType<typeof setTimeout>> = {};
// TTFT (Time to First Token) tracking for task decomposition
const ttftTracking: Record<string, { confirmedAt: number; firstTokenLogged: boolean }> = {};
const chatStore = (initial?: Partial<ChatStore>) => createStore<ChatStore>()(
(set, get) => ({
@ -757,6 +759,11 @@ const chatStore = (initial?: Partial<ChatStore>) => createStore<ChatStore>()(
//Enable it for the rest of current SSE session
skipFirstConfirm = false;
// Record confirmed time for TTFT tracking
const ttftTaskId = getCurrentTaskId();
ttftTracking[ttftTaskId] = { confirmedAt: performance.now(), firstTokenLogged: false };
console.log(`[TTFT] Task ${ttftTaskId} confirmed at ${new Date().toISOString()}, starting TTFT measurement`);
return
}
@ -796,6 +803,13 @@ const chatStore = (initial?: Partial<ChatStore>) => createStore<ChatStore>()(
const text = content;
const currentId = getCurrentTaskId();
// Log TTFT (Time to First Token) on first decompose_text event
if (ttftTracking[currentId] && !ttftTracking[currentId].firstTokenLogged) {
ttftTracking[currentId].firstTokenLogged = true;
const ttft = performance.now() - ttftTracking[currentId].confirmedAt;
console.log(`%c[TTFT] 🚀 Time to First Token: ${ttft.toFixed(2)}ms - First streaming token received for task ${currentId}`, 'color: #4CAF50; font-weight: bold');
}
// Get current buffer or task state
const currentContent = streamingDecomposeTextBuffer[currentId] ||
getCurrentChatStore().tasks[currentId]?.streamingDecomposeText || "";
@ -829,6 +843,8 @@ const chatStore = (initial?: Partial<ChatStore>) => createStore<ChatStore>()(
if (agentMessages.step === "to_sub_tasks") {
// Clear streaming decompose text when task splitting is done
clearStreamingDecomposeText(currentTaskId);
// Clean up TTFT tracking
delete ttftTracking[currentTaskId];
// Check if this is a multi-turn scenario after task completion
const isMultiTurnAfterCompletion = tasks[currentTaskId].status === 'finished';

View file

@ -429,11 +429,19 @@ export function createProcessUtilsMock() {
getVenvsBaseDir: vi.fn(),
cleanupOldVenvs: vi.fn(),
isBinaryExists: vi.fn(),
getUvEnv: vi.fn(),
mockState: {} as MockEnvironmentState,
setup: (mockState: MockEnvironmentState) => {
utilsMock.mockState = mockState
utilsMock.getUvEnv.mockReturnValue({
UV_PYTHON_INSTALL_DIR: `${mockState.system.homedir}/.eigent/cache/uv_python`,
UV_TOOL_DIR: `${mockState.system.homedir}/.eigent/cache/uv_tool`,
UV_PROJECT_ENVIRONMENT: `${mockState.system.homedir}/.eigent/venvs/backend-mock`,
UV_HTTP_TIMEOUT: '300',
})
utilsMock.getResourcePath.mockReturnValue(
`${mockState.app.appPath}/resources`
)