feat: schedule and webhook triggers (#823)

Co-authored-by: Douglas <douglas.ym.lai@gmail.com>
Co-authored-by: a7m-1st <ahmed.jimi.awelkair500@gmail.com>
Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Tong Chen <web_chentong@163.com>
This commit is contained in:
Ahmed Awelkair A 2026-03-02 12:38:02 +00:00 committed by GitHub
parent c8f6f7e63c
commit 4fb2e5db9a
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
200 changed files with 24538 additions and 2126 deletions

View file

@ -0,0 +1,14 @@
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========

View file

@ -0,0 +1,135 @@
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
from fastapi import APIRouter, Depends, HTTPException
from sqlmodel import Session, select, and_
from typing import Optional, List
import logging
from pydantic import BaseModel
from app.model.config.config import Config
from app.type.config_group import ConfigGroup
from app.component.auth import Auth, auth_must
from app.component.database import session
logger = logging.getLogger("server_slack_controller")
class SlackChannelOut(BaseModel):
"""Output model for Slack channels."""
id: str
name: str
is_private: bool = False
is_member: bool = False
num_members: Optional[int] = None
class SlackChannelsResponse(BaseModel):
"""Response model for Slack channels list."""
channels: List[SlackChannelOut]
has_credentials: bool
router = APIRouter(prefix="/trigger/slack", tags=["Slack Integration"])
@router.get("/channels", name="get slack channels")
def get_slack_channels(
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
) -> SlackChannelsResponse:
"""
Get list of Slack channels for the authenticated user.
This endpoint fetches channels from the user's Slack workspace using their
stored credentials. Requires SLACK_BOT_TOKEN to be configured in user configs.
"""
user_id = auth.user.id
# Get Slack credentials from config
configs = session.exec(
select(Config).where(
and_(
Config.user_id == int(user_id),
Config.config_group == ConfigGroup.SLACK.value
)
)
).all()
credentials = {config.config_name: config.config_value for config in configs}
bot_token = credentials.get("SLACK_BOT_TOKEN")
if not bot_token:
logger.warning("Slack credentials not found", extra={"user_id": user_id})
return SlackChannelsResponse(channels=[], has_credentials=False)
try:
from slack_sdk import WebClient
from slack_sdk.errors import SlackApiError
client = WebClient(token=bot_token)
# Fetch all channels (public and private the bot has access to)
channels = []
cursor = None
while True:
response = client.conversations_list(
types="public_channel,private_channel",
cursor=cursor,
limit=200
)
for channel in response.get("channels", []):
channels.append(SlackChannelOut(
id=channel.get("id"),
name=channel.get("name"),
is_private=channel.get("is_private", False),
is_member=channel.get("is_member", False),
num_members=channel.get("num_members")
))
# Check for pagination
cursor = response.get("response_metadata", {}).get("next_cursor")
if not cursor:
break
logger.info("Slack channels fetched", extra={
"user_id": user_id,
"channel_count": len(channels)
})
return SlackChannelsResponse(channels=channels, has_credentials=True)
except ImportError:
logger.error("slack_sdk not installed")
raise HTTPException(
status_code=500,
detail="Slack SDK not installed on server"
)
except SlackApiError as e:
logger.error("Slack API error", extra={
"user_id": user_id,
"error": str(e)
})
raise HTTPException(
status_code=400,
detail=f"Slack API error: {e.response.get('error', 'Unknown error')}"
)
except Exception as e:
logger.error("Error fetching Slack channels", extra={
"user_id": user_id,
"error": str(e)
}, exc_info=True)
raise HTTPException(status_code=500, detail="Failed to fetch Slack channels")

View file

@ -0,0 +1,688 @@
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
from fastapi import APIRouter, Depends, HTTPException, Response, Query
from fastapi_pagination import Page
from fastapi_pagination.ext.sqlmodel import paginate
from sqlmodel import Session, select, desc, and_, delete
from typing import Optional
from uuid import uuid4
import logging
from pydantic import ValidationError
from app.model.trigger.trigger import Trigger, TriggerIn, TriggerOut, TriggerUpdate, TriggerConfigSchemaOut
from app.model.trigger.trigger_execution import TriggerExecution, TriggerExecutionOut
from app.model.trigger.app_configs import (
get_config_schema,
validate_config,
has_config,
validate_activation,
ActivationError,
)
from app.model.trigger.app_configs.config_registry import requires_authentication
from app.model.chat.chat_history import ChatHistory
from app.type.trigger_types import TriggerType, TriggerStatus
from app.component.auth import Auth, auth_must
from app.component.database import session
from app.component.redis_utils import get_redis_manager
from app.service.trigger.trigger_schedule_service import TriggerScheduleService
from fastapi_babel import _
from sqlalchemy import func
logger = logging.getLogger("server_trigger_controller")
def get_execution_counts(session: Session, trigger_ids: list[int]) -> dict[int, int]:
"""Get execution counts for multiple triggers in a single query."""
if not trigger_ids:
return {}
result = session.exec(
select(TriggerExecution.trigger_id, func.count(TriggerExecution.id))
.where(TriggerExecution.trigger_id.in_(trigger_ids))
.group_by(TriggerExecution.trigger_id)
).all()
return {trigger_id: count for trigger_id, count in result}
def trigger_to_out(trigger: Trigger, execution_count: int = 0) -> TriggerOut:
"""Convert Trigger model to TriggerOut with execution count."""
return TriggerOut(
id=trigger.id,
user_id=trigger.user_id,
project_id=trigger.project_id,
name=trigger.name,
description=trigger.description,
trigger_type=trigger.trigger_type,
status=trigger.status,
execution_count=execution_count,
webhook_url=trigger.webhook_url,
webhook_method=trigger.webhook_method,
custom_cron_expression=trigger.custom_cron_expression,
listener_type=trigger.listener_type,
agent_model=trigger.agent_model,
task_prompt=trigger.task_prompt,
config=trigger.config,
max_executions_per_hour=trigger.max_executions_per_hour,
max_executions_per_day=trigger.max_executions_per_day,
is_single_execution=trigger.is_single_execution,
last_executed_at=trigger.last_executed_at,
next_run_at=trigger.next_run_at,
last_execution_status=trigger.last_execution_status,
created_at=trigger.created_at,
updated_at=trigger.updated_at,
)
router = APIRouter(prefix="/trigger", tags=["Triggers"])
@router.post("/", name="create trigger", response_model=TriggerOut)
def create_trigger(
data: TriggerIn,
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
):
"""Create a new trigger."""
user_id = auth.user.id
try:
# Check user trigger limit (max 25 triggers per user)
user_trigger_count = session.exec(
select(func.count(Trigger.id)).where(Trigger.user_id == str(user_id))
).one()
if user_trigger_count >= 25:
logger.warning("User trigger limit reached", extra={
"user_id": user_id,
"current_count": user_trigger_count,
"limit": 25
})
raise HTTPException(
status_code=400,
detail="Maximum number of triggers (25) reached for this user"
)
# Check project trigger limit (max 5 triggers per project)
if data.project_id:
project_trigger_count = session.exec(
select(func.count(Trigger.id)).where(
and_(
Trigger.user_id == str(user_id),
Trigger.project_id == data.project_id
)
)
).one()
if project_trigger_count >= 5:
logger.warning("Project trigger limit reached", extra={
"user_id": user_id,
"project_id": data.project_id,
"current_count": project_trigger_count,
"limit": 5
})
raise HTTPException(
status_code=400,
detail="Maximum number of triggers (5) reached for this project"
)
# Check if project_id exists in chat_history, if not create one
if data.project_id:
existing_chat = session.exec(
select(ChatHistory).where(ChatHistory.project_id == data.project_id)
).first()
if not existing_chat:
# Create a new chat_history for this project
chat_history = ChatHistory(
user_id=user_id,
task_id=data.project_id, # Using project_id as task_id
project_id=data.project_id,
question=f"Project created via trigger: {data.name}",
language="en",
model_platform=data.agent_model or "none",
model_type=data.agent_model or "none",
installed_mcp="none", #Expects String
api_key="",
api_url="",
max_retries=3,
project_name=data.name,
summary=data.description or "",
tokens=0,
spend=0,
status=2 # completed status
)
session.add(chat_history)
session.commit()
session.refresh(chat_history)
logger.info("Chat history created for new project", extra={
"user_id": user_id,
"project_id": data.project_id,
"chat_history_id": chat_history.id
})
# Send WebSocket notification about new project
try:
redis_manager = get_redis_manager()
redis_manager.publish_execution_event({
"type": "project_created",
"project_id": data.project_id,
"project_name": data.name,
"chat_history_id": chat_history.id,
"trigger_name": data.name,
"user_id": str(user_id),
"created_at": chat_history.created_at.isoformat() if chat_history.created_at else None
})
logger.debug("WebSocket notification sent for new project", extra={
"user_id": user_id,
"project_id": data.project_id
})
except Exception as e:
logger.warning("Failed to send WebSocket notification for new project", extra={
"user_id": user_id,
"project_id": data.project_id,
"error": str(e)
})
# Generate webhook URL for webhook-based triggers
webhook_url = None
if data.trigger_type in (TriggerType.webhook, TriggerType.slack_trigger):
webhook_url = f"/webhook/trigger/{uuid4()}"
# Validate trigger-type specific config
if data.config and has_config(data.trigger_type):
try:
validate_config(data.trigger_type, data.config)
except ValidationError as e:
logger.warning("Invalid trigger config", extra={
"user_id": user_id,
"trigger_type": data.trigger_type.value,
"errors": e.errors()
})
raise HTTPException(
status_code=400,
detail=f"Invalid config for {data.trigger_type.value}: {e.errors()}"
)
# Create trigger instance
trigger_data = data.model_dump()
trigger_data["user_id"] = str(user_id)
trigger_data["webhook_url"] = webhook_url
# Check if authentication is required - set initial status accordingly
if has_config(data.trigger_type) and data.config and requires_authentication(data.trigger_type, data.config):
trigger_data["status"] = TriggerStatus.pending_verification
else:
trigger_data["status"] = TriggerStatus.active
trigger = Trigger(**trigger_data)
session.add(trigger)
session.commit()
session.refresh(trigger)
# Calculate next_run_at for scheduled triggers
if trigger.trigger_type == TriggerType.schedule and trigger.custom_cron_expression:
schedule_service = TriggerScheduleService(session)
trigger.next_run_at = schedule_service.calculate_next_run_at(trigger)
session.add(trigger)
session.commit()
session.refresh(trigger)
logger.info("Trigger created", extra={
"user_id": user_id,
"trigger_id": trigger.id,
"trigger_type": data.trigger_type.value,
"next_run_at": trigger.next_run_at.isoformat() if trigger.next_run_at else None
})
return trigger_to_out(trigger, 0) # New trigger has 0 executions
except Exception as e:
session.rollback()
logger.error("Trigger creation failed", extra={
"user_id": user_id,
"error": str(e)
}, exc_info=True)
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/", name="list triggers")
def list_triggers(
trigger_type: Optional[TriggerType] = Query(None, description="Filter by trigger type"),
status: Optional[TriggerStatus] = Query(None, description="Filter by status"),
project_id: Optional[str] = Query(None, description="Filter by project ID"),
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
) -> Page[TriggerOut]:
"""List triggers for current user."""
user_id = auth.user.id
# Build query with filters
conditions = [Trigger.user_id == str(user_id)]
if trigger_type:
conditions.append(Trigger.trigger_type == trigger_type)
if status is not None:
conditions.append(Trigger.status == status)
if project_id:
conditions.append(Trigger.project_id == project_id)
stmt = (
select(Trigger)
.where(and_(*conditions))
.order_by(desc(Trigger.created_at))
)
result = paginate(session, stmt)
total = result.total if hasattr(result, 'total') else 0
# Get execution counts for all triggers in the result
trigger_ids = [t.id for t in result.items]
counts = get_execution_counts(session, trigger_ids)
# Convert triggers to TriggerOut with execution counts
result.items = [trigger_to_out(t, counts.get(t.id, 0)) for t in result.items]
logger.debug("Triggers listed", extra={
"user_id": user_id,
"total": total,
"filters": {
"trigger_type": trigger_type.value if trigger_type else None,
"status": status.value if status is not None else None,
"project_id": project_id
}
})
return result
@router.get("/{trigger_id}", name="get trigger", response_model=TriggerOut)
def get_trigger(
trigger_id: int,
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
):
"""Get a specific trigger by ID."""
user_id = auth.user.id
trigger = session.exec(
select(Trigger).where(
and_(Trigger.id == trigger_id, Trigger.user_id == str(user_id))
)
).first()
if not trigger:
logger.warning("Trigger not found", extra={
"user_id": user_id,
"trigger_id": trigger_id
})
raise HTTPException(status_code=404, detail="Trigger not found")
# Get execution count
counts = get_execution_counts(session, [trigger_id])
execution_count = counts.get(trigger_id, 0)
logger.debug("Trigger retrieved", extra={
"user_id": user_id,
"trigger_id": trigger_id
})
return trigger_to_out(trigger, execution_count)
@router.put("/{trigger_id}", name="update trigger", response_model=TriggerOut)
def update_trigger(
trigger_id: int,
data: TriggerUpdate,
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
):
"""Update a trigger."""
user_id = auth.user.id
trigger = session.exec(
select(Trigger).where(
and_(Trigger.id == trigger_id, Trigger.user_id == str(user_id))
)
).first()
if not trigger:
logger.warning("Trigger not found for update", extra={
"user_id": user_id,
"trigger_id": trigger_id
})
raise HTTPException(status_code=404, detail="Trigger not found")
try:
update_data = data.model_dump(exclude_unset=True)
# Validate config if being updated
if "config" in update_data and update_data["config"] is not None:
if has_config(trigger.trigger_type):
try:
validate_config(trigger.trigger_type, update_data["config"])
except ValidationError as e:
logger.warning("Invalid trigger config on update", extra={
"user_id": user_id,
"trigger_id": trigger_id,
"trigger_type": trigger.trigger_type.value,
"errors": e.errors()
})
raise HTTPException(
status_code=400,
detail=f"Invalid config for {trigger.trigger_type.value}: {e.errors()}"
)
for key, value in update_data.items():
setattr(trigger, key, value)
# Recalculate next_run_at if cron expression or status changed for scheduled triggers
if trigger.trigger_type == TriggerType.schedule:
if "custom_cron_expression" in update_data or "status" in update_data:
if trigger.status == TriggerStatus.active and trigger.custom_cron_expression:
schedule_service = TriggerScheduleService(session)
trigger.next_run_at = schedule_service.calculate_next_run_at(trigger)
session.add(trigger)
session.commit()
session.refresh(trigger)
# Get execution count
counts = get_execution_counts(session, [trigger_id])
execution_count = counts.get(trigger_id, 0)
logger.info("Trigger updated", extra={
"user_id": user_id,
"trigger_id": trigger_id,
"fields_updated": list(update_data.keys()),
"next_run_at": trigger.next_run_at.isoformat() if trigger.next_run_at else None
})
return trigger_to_out(trigger, execution_count)
except Exception as e:
session.rollback()
logger.error("Trigger update failed", extra={
"user_id": user_id,
"trigger_id": trigger_id,
"error": str(e)
}, exc_info=True)
raise HTTPException(status_code=500, detail="Internal server error")
@router.delete("/{trigger_id}", name="delete trigger")
def delete_trigger(
trigger_id: int,
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
):
"""Delete a trigger."""
user_id = auth.user.id
trigger = session.exec(
select(Trigger).where(
and_(Trigger.id == trigger_id, Trigger.user_id == str(user_id))
)
).first()
if not trigger:
logger.warning("Trigger not found for deletion", extra={
"user_id": user_id,
"trigger_id": trigger_id
})
raise HTTPException(status_code=404, detail="Trigger not found")
try:
# Delete execution logs first (bulk delete)
session.exec(
delete(TriggerExecution).where(
TriggerExecution.trigger_id == trigger_id
)
)
# Then delete the trigger
session.delete(trigger)
session.commit()
logger.info("Trigger deleted", extra={
"user_id": user_id,
"trigger_id": trigger_id
})
return Response(status_code=204)
except Exception as e:
session.rollback()
logger.error("Trigger deletion failed", extra={
"user_id": user_id,
"trigger_id": trigger_id,
"error": str(e)
}, exc_info=True)
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/{trigger_id}/activate", name="activate trigger", response_model=TriggerOut)
def activate_trigger(
trigger_id: int,
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
):
"""Activate a trigger."""
user_id = auth.user.id
trigger = session.exec(
select(Trigger).where(
and_(Trigger.id == trigger_id, Trigger.user_id == str(user_id))
)
).first()
if not trigger:
logger.warning("Trigger not found for activation", extra={
"user_id": user_id,
"trigger_id": trigger_id
})
raise HTTPException(status_code=404, detail="Trigger not found")
try:
# Check activation requirements for trigger types with configs
if has_config(trigger.trigger_type):
try:
validate_activation(
trigger_type=trigger.trigger_type,
config_data=trigger.config,
user_id=int(user_id),
session=session
)
except ActivationError as e:
logger.warning("Trigger activation requirements not met", extra={
"user_id": user_id,
"trigger_id": trigger_id,
"trigger_type": trigger.trigger_type.value,
"missing_requirements": e.missing_requirements
})
raise HTTPException(
status_code=400,
detail={
"message": e.message,
"missing_requirements": e.missing_requirements,
"trigger_type": trigger.trigger_type.value
}
)
# Check if authentication is required - set to pending_verification if so
if has_config(trigger.trigger_type) and requires_authentication(trigger.trigger_type, trigger.config):
trigger.status = TriggerStatus.pending_verification
logger.info("Trigger set to pending verification (authentication required)", extra={
"user_id": user_id,
"trigger_id": trigger_id,
"trigger_type": trigger.trigger_type.value
})
# Save the status change before raising the exception
session.add(trigger)
session.commit()
session.refresh(trigger)
raise HTTPException(
status_code=401,
detail={
"message": "Authentication required for this trigger type",
"missing_requirements": ["authentication"],
"trigger_type": trigger.trigger_type.value
}
)
else:
trigger.status = TriggerStatus.active
session.add(trigger)
session.commit()
session.refresh(trigger)
# Get execution count
counts = get_execution_counts(session, [trigger_id])
execution_count = counts.get(trigger_id, 0)
logger.info("Trigger status updated", extra={
"user_id": user_id,
"trigger_id": trigger_id,
"status": trigger.status.value
})
return trigger_to_out(trigger, execution_count)
except HTTPException:
raise
except Exception as e:
session.rollback()
logger.error("Trigger activation failed", extra={
"user_id": user_id,
"trigger_id": trigger_id,
"error": str(e)
}, exc_info=True)
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/{trigger_id}/deactivate", name="deactivate trigger", response_model=TriggerOut)
def deactivate_trigger(
trigger_id: int,
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
):
"""Deactivate a trigger."""
user_id = auth.user.id
trigger = session.exec(
select(Trigger).where(
and_(Trigger.id == trigger_id, Trigger.user_id == str(user_id))
)
).first()
if not trigger:
logger.warning("Trigger not found for deactivation", extra={
"user_id": user_id,
"trigger_id": trigger_id
})
raise HTTPException(status_code=404, detail="Trigger not found")
try:
trigger.status = TriggerStatus.inactive
session.add(trigger)
session.commit()
session.refresh(trigger)
# Get execution count
counts = get_execution_counts(session, [trigger_id])
execution_count = counts.get(trigger_id, 0)
logger.info("Trigger deactivated", extra={
"user_id": user_id,
"trigger_id": trigger_id
})
return trigger_to_out(trigger, execution_count)
except Exception as e:
session.rollback()
logger.error("Trigger deactivation failed", extra={
"user_id": user_id,
"trigger_id": trigger_id,
"error": str(e)
}, exc_info=True)
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/{trigger_id}/executions", name="list trigger executions")
def list_trigger_executions(
trigger_id: int,
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
) -> Page[TriggerExecutionOut]:
"""List executions for a specific trigger."""
user_id = auth.user.id
# First verify the trigger belongs to the user
trigger = session.exec(
select(Trigger).where(
and_(Trigger.id == trigger_id, Trigger.user_id == str(user_id))
)
).first()
if not trigger:
logger.warning("Trigger not found for executions list", extra={
"user_id": user_id,
"trigger_id": trigger_id
})
raise HTTPException(status_code=404, detail="Trigger not found")
# Get executions for this trigger
stmt = (
select(TriggerExecution)
.where(TriggerExecution.trigger_id == trigger_id)
.order_by(desc(TriggerExecution.created_at))
)
result = paginate(session, stmt)
total = result.total if hasattr(result, 'total') else 0
logger.debug("Trigger executions listed", extra={
"user_id": user_id,
"trigger_id": trigger_id,
"total": total
})
return result
# ============================================================================
# Trigger Config Endpoints
# ============================================================================
@router.get("/{trigger_type}/config", name="get trigger type config schema")
def get_trigger_type_config(
trigger_type: TriggerType,
auth: Auth = Depends(auth_must)
) -> TriggerConfigSchemaOut:
"""
Get the configuration schema for a specific trigger type.
This endpoint returns the JSON schema for the trigger type's config field,
which can be used by the frontend to dynamically render configuration forms.
"""
schema = get_config_schema(trigger_type)
return TriggerConfigSchemaOut(
trigger_type=trigger_type.value,
has_config=has_config(trigger_type),
schema_=schema
)

View file

@ -0,0 +1,783 @@
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
from fastapi import APIRouter, Depends, HTTPException, Response, WebSocket, WebSocketDisconnect
from fastapi_pagination import Page
from fastapi_pagination.ext.sqlmodel import paginate
from sqlmodel import Session, select, desc, and_
from typing import Optional, Dict, Any
from datetime import datetime, timezone
from uuid import uuid4
import logging
import asyncio
from app.model.trigger.trigger_execution import (
TriggerExecution,
TriggerExecutionIn,
TriggerExecutionOut,
TriggerExecutionUpdate
)
from app.model.trigger.trigger import Trigger
from app.model.user.user import User
from app.type.trigger_types import ExecutionStatus, ExecutionType
from app.component.auth import Auth, auth_must
from app.component.database import session
from app.component.redis_utils import get_redis_manager
from app.service.trigger.trigger_service import TriggerService
logger = logging.getLogger("server_trigger_execution_controller")
# Store active WebSocket connections per session (WebSocket objects only, metadata in Redis)
# Format: {session_id: WebSocket}
# This is per-worker, and Redis pub/sub is used to broadcast across workers
active_websockets: Dict[str, WebSocket] = {}
# Background task for Redis pub/sub
_pubsub_task = None
router = APIRouter(prefix="/execution", tags=["Trigger Executions"])
@router.post("/", name="create trigger execution", response_model=TriggerExecutionOut)
async def create_trigger_execution(
data: TriggerExecutionIn,
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
):
"""Create a new trigger execution."""
user_id = auth.user.id
# Verify the trigger exists and belongs to the user
trigger = session.exec(
select(Trigger).where(
and_(Trigger.id == data.trigger_id, Trigger.user_id == str(user_id))
)
).first()
if not trigger:
logger.warning("Trigger not found for execution creation", extra={
"user_id": user_id,
"trigger_id": data.trigger_id
})
raise HTTPException(status_code=404, detail="Trigger not found")
try:
execution_data = data.model_dump()
execution = TriggerExecution(**execution_data)
session.add(execution)
session.commit()
session.refresh(execution)
# Update trigger last executed timestamp
trigger.last_executed_at = datetime.now(timezone.utc)
session.add(trigger)
session.commit()
logger.info("Trigger execution created", extra={
"user_id": user_id,
"trigger_id": data.trigger_id,
"execution_id": execution.execution_id,
"execution_type": data.execution_type.value
})
# Publish to Redis pub/sub (broadcasts to all workers)
redis_manager = get_redis_manager()
redis_manager.publish_execution_event({
"type": "execution_created",
"execution_id": execution.execution_id,
"trigger_id": trigger.id,
"trigger_type": trigger.trigger_type.value if trigger.trigger_type else "unknown",
"task_prompt": trigger.task_prompt,
"status": execution.status.value,
"input_data": execution.input_data,
"execution_type": data.execution_type.value,
"user_id": str(user_id),
"timestamp": datetime.now(timezone.utc).isoformat(),
"project_id": str(trigger.project_id)
})
return execution
except Exception as e:
session.rollback()
logger.error("Trigger execution creation failed", extra={
"user_id": user_id,
"trigger_id": data.trigger_id,
"error": str(e)
}, exc_info=True)
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/", name="list executions")
def list_executions(
trigger_id: Optional[int] = None,
status: Optional[ExecutionStatus] = None,
execution_type: Optional[ExecutionType] = None,
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
) -> Page[TriggerExecutionOut]:
"""List trigger executions for current user."""
user_id = auth.user.id
# Get all trigger IDs that belong to the user
user_trigger_ids = session.exec(
select(Trigger.id).where(Trigger.user_id == str(user_id))
).all()
if not user_trigger_ids:
# User has no triggers, return empty result
return Page(items=[], total=0, page=1, size=50, pages=0)
# Build conditions
conditions = [TriggerExecution.trigger_id.in_(user_trigger_ids)]
if trigger_id:
if trigger_id not in user_trigger_ids:
raise HTTPException(status_code=404, detail="Trigger not found")
conditions.append(TriggerExecution.trigger_id == trigger_id)
if status is not None:
conditions.append(TriggerExecution.status == status)
if execution_type:
conditions.append(TriggerExecution.execution_type == execution_type)
stmt = (
select(TriggerExecution)
.where(and_(*conditions))
.order_by(desc(TriggerExecution.created_at))
)
result = paginate(session, stmt)
total = result.total if hasattr(result, 'total') else 0
logger.debug("Executions listed", extra={
"user_id": user_id,
"total": total,
"filters": {
"trigger_id": trigger_id,
"status": status.value if status is not None else None,
"execution_type": execution_type.value if execution_type else None
}
})
return result
@router.get("/{execution_id}", name="get execution", response_model=TriggerExecutionOut)
def get_execution(
execution_id: str,
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
):
"""Get a specific execution by execution ID."""
user_id = auth.user.id
# Get the execution and verify ownership through trigger
execution = session.exec(
select(TriggerExecution)
.join(Trigger)
.where(
and_(
TriggerExecution.execution_id == execution_id,
Trigger.user_id == str(user_id)
)
)
).first()
if not execution:
logger.warning("Execution not found", extra={
"user_id": user_id,
"execution_id": execution_id
})
raise HTTPException(status_code=404, detail="Execution not found")
logger.debug("Execution retrieved", extra={
"user_id": user_id,
"execution_id": execution_id
})
return execution
@router.put("/{execution_id}", name="update execution", response_model=TriggerExecutionOut)
async def update_execution(
execution_id: str,
data: TriggerExecutionUpdate,
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
):
"""Update a trigger execution."""
user_id = auth.user.id
# Get the execution and verify ownership through trigger
execution = session.exec(
select(TriggerExecution)
.join(Trigger)
.where(
and_(
TriggerExecution.execution_id == execution_id,
Trigger.user_id == str(user_id)
)
)
).first()
if not execution:
logger.warning("Execution not found for update", extra={
"user_id": user_id,
"execution_id": execution_id
})
raise HTTPException(status_code=404, detail="Execution not found")
try:
update_data = data.model_dump(exclude_unset=True)
# Check if status is being updated - use TriggerService for proper failure tracking
if "status" in update_data:
trigger_service = TriggerService(session)
# Convert status string back to enum for TriggerService
status_value = ExecutionStatus(update_data["status"]) if isinstance(update_data["status"], str) else update_data["status"]
trigger_service.update_execution_status(
execution=execution,
status=status_value,
output_data=update_data.get("output_data"),
error_message=update_data.get("error_message"),
tokens_used=update_data.get("tokens_used"),
tools_executed=update_data.get("tools_executed")
)
# Remove status-related fields from update_data since TriggerService handled them
for key in ["status", "output_data", "error_message", "tokens_used", "tools_executed"]:
update_data.pop(key, None)
# Update remaining fields
if update_data:
# Auto-calculate duration if both started_at and completed_at are set
if ("started_at" in update_data or "completed_at" in update_data) and execution.started_at:
completed_at = update_data.get("completed_at") or execution.completed_at
if completed_at:
# Ensure both datetimes are timezone-aware for subtraction
started_at = execution.started_at
if started_at.tzinfo is None:
started_at = started_at.replace(tzinfo=timezone.utc)
if completed_at.tzinfo is None:
completed_at = completed_at.replace(tzinfo=timezone.utc)
duration = (completed_at - started_at).total_seconds()
update_data["duration_seconds"] = duration
for key, value in update_data.items():
setattr(execution, key, value)
session.add(execution)
session.commit()
session.refresh(execution)
# Get trigger for event publishing
trigger = session.get(Trigger, execution.trigger_id)
logger.info("Execution updated", extra={
"user_id": user_id,
"execution_id": execution_id,
"fields_updated": list(data.model_dump(exclude_unset=True).keys())
})
# Publish to Redis pub/sub (broadcasts to all workers)
redis_manager = get_redis_manager()
redis_manager.publish_execution_event({
"type": "execution_updated",
"execution_id": execution_id,
"trigger_id": execution.trigger_id,
"status": execution.status.value,
"updated_fields": list(update_data.keys()),
"user_id": str(user_id),
"timestamp": datetime.now(timezone.utc).isoformat(),
"project_id": str(trigger.project_id) if trigger else None
})
return execution
except Exception as e:
session.rollback()
logger.error("Execution update failed", extra={
"user_id": user_id,
"execution_id": execution_id,
"error": str(e)
}, exc_info=True)
raise HTTPException(status_code=500, detail="Internal server error")
@router.delete("/{execution_id}", name="delete execution")
def delete_execution(
execution_id: str,
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
):
"""Delete a trigger execution."""
user_id = auth.user.id
# Get the execution and verify ownership through trigger
execution = session.exec(
select(TriggerExecution)
.join(Trigger)
.where(
and_(
TriggerExecution.execution_id == execution_id,
Trigger.user_id == str(user_id)
)
)
).first()
if not execution:
logger.warning("Execution not found for deletion", extra={
"user_id": user_id,
"execution_id": execution_id
})
raise HTTPException(status_code=404, detail="Execution not found")
try:
session.delete(execution)
session.commit()
logger.info("Execution deleted", extra={
"user_id": user_id,
"execution_id": execution_id
})
return Response(status_code=204)
except Exception as e:
session.rollback()
logger.error("Execution deletion failed", extra={
"user_id": user_id,
"execution_id": execution_id,
"error": str(e)
}, exc_info=True)
raise HTTPException(status_code=500, detail="Internal server error")
@router.post("/{execution_id}/retry", name="retry execution", response_model=TriggerExecutionOut)
def retry_execution(
execution_id: str,
session: Session = Depends(session),
auth: Auth = Depends(auth_must)
):
"""Retry a failed execution."""
user_id = auth.user.id
# Get the execution and verify ownership through trigger
execution = session.exec(
select(TriggerExecution)
.join(Trigger)
.where(
and_(
TriggerExecution.execution_id == execution_id,
Trigger.user_id == str(user_id)
)
)
).first()
if not execution:
logger.warning("Execution not found for retry", extra={
"user_id": user_id,
"execution_id": execution_id
})
raise HTTPException(status_code=404, detail="Execution not found")
if execution.status != ExecutionStatus.failed:
raise HTTPException(status_code=400, detail="Only failed executions can be retried")
if execution.attempts >= execution.max_retries:
raise HTTPException(status_code=400, detail="Maximum retry attempts exceeded")
try:
# Create a new execution for the retry
new_execution_id = str(uuid4())
new_execution = TriggerExecution(
trigger_id=execution.trigger_id,
execution_id=new_execution_id,
execution_type=execution.execution_type,
input_data=execution.input_data,
attempts=execution.attempts + 1,
max_retries=execution.max_retries
)
session.add(new_execution)
session.commit()
session.refresh(new_execution)
# Get trigger for event publishing
trigger = session.get(Trigger, execution.trigger_id)
logger.info("Execution retry created", extra={
"user_id": user_id,
"original_execution_id": execution_id,
"new_execution_id": new_execution_id,
"attempts": new_execution.attempts
})
# Publish to Redis pub/sub (broadcasts to all workers)
redis_manager = get_redis_manager()
redis_manager.publish_execution_event({
"type": "execution_created",
"execution_id": new_execution.execution_id,
"trigger_id": trigger.id if trigger else execution.trigger_id,
"trigger_type": trigger.trigger_type.value if trigger and trigger.trigger_type else "unknown",
"task_prompt": trigger.task_prompt if trigger else None,
"status": new_execution.status.value,
"input_data": new_execution.input_data,
"execution_type": new_execution.execution_type.value,
"user_id": str(user_id),
"timestamp": datetime.now(timezone.utc).isoformat(),
"project_id": str(trigger.project_id) if trigger else None
})
return new_execution
except Exception as e:
session.rollback()
logger.error("Execution retry failed", extra={
"user_id": user_id,
"execution_id": execution_id,
"error": str(e)
}, exc_info=True)
raise HTTPException(status_code=500, detail="Internal server error")
@router.websocket("/subscribe")
async def subscribe_executions(websocket: WebSocket):
"""Subscribe to trigger execution events via WebSocket.
Client sends: {"type": "subscribe", "session_id": "unique-session-id", "auth_token": "bearer-token"}
Client acknowledges execution: {"type": "ack", "execution_id": "exec-id"}
Server sends: {"type": "execution_created", "execution_id": "...", ...}
Server sends: {"type": "heartbeat", "timestamp": "..."}
"""
# Ensure pub/sub listener is started in THIS worker process
await start_pubsub_listener()
await websocket.accept()
session_id = None
user_id = None
db_session = None
try:
# Create database session manually for WebSocket
from app.component.database import session_make
db_session = session_make()
# Wait for subscription message
data = await websocket.receive_json()
if data.get("type") != "subscribe" or not data.get("session_id"):
await websocket.send_json({
"type": "error",
"message": "Invalid subscription. Send {type: 'subscribe', session_id: 'your-session-id', auth_token: 'bearer-token'}"
})
await websocket.close()
return
session_id = data["session_id"]
auth_token = data.get("auth_token")
# Authenticate user
if not auth_token:
await websocket.send_json({
"type": "error",
"message": "Authentication required. Provide 'auth_token' in subscription message"
})
await websocket.close()
return
try:
from app.component.auth import Auth
# Decode token and fetch user
auth = Auth.decode_token(auth_token)
user = db_session.get(User, auth.id)
if not user:
raise Exception("User not found")
auth._user = user
user_id = auth.user.id
logger.info(f"User authenticated for WebSocket {user_id} and {session_id}", extra={
"user_id": user_id,
"session_id": session_id
})
except Exception as e:
await websocket.send_json({
"type": "error",
"message": "Authentication failed"
})
await websocket.close()
logger.warning("WebSocket authentication failed", extra={
"session_id": session_id,
"error": str(e)
})
return
# Register session in Redis and store WebSocket reference
redis_manager = get_redis_manager()
redis_manager.store_session(session_id, str(user_id))
active_websockets[session_id] = websocket
logger.info(f"WebSocket session registered", extra={
"session_id": session_id,
"user_id": user_id,
"total_active": len(active_websockets)
})
await websocket.send_json({
"type": "connected",
"session_id": session_id,
"timestamp": datetime.now(timezone.utc).isoformat()
})
logger.info("Client subscribed to executions", extra={
"session_id": session_id,
"user_id": user_id,
"total_sessions": len(active_websockets),
"all_session_ids": list(active_websockets.keys())
})
# Handle incoming messages (acknowledgments)
async def handle_messages():
while True:
try:
msg = await websocket.receive_json()
if msg.get("type") == "ack" and msg.get("execution_id"):
execution_id = msg["execution_id"]
# Remove from pending in Redis
redis_manager.remove_pending_execution(session_id, execution_id)
# Update execution status to running
execution = db_session.exec(
select(TriggerExecution).where(
TriggerExecution.execution_id == execution_id
)
).first()
if execution and execution.status == ExecutionStatus.pending:
execution.status = ExecutionStatus.running
execution.started_at = datetime.now(timezone.utc)
db_session.add(execution)
db_session.commit()
logger.info("Execution acknowledged and started", extra={
"session_id": session_id,
"execution_id": execution_id
})
await websocket.send_json({
"type": "ack_confirmed",
"execution_id": execution_id,
"status": "running"
})
elif msg.get("type") == "ping":
# Publish pong through Redis pub/sub
redis_manager.publish_execution_event({
"type": "pong",
"session_id": session_id,
"user_id": str(user_id),
"timestamp": datetime.now(timezone.utc).isoformat()
})
except WebSocketDisconnect:
break
# Start heartbeat task
async def send_heartbeat():
while True:
await asyncio.sleep(30)
try:
await websocket.send_json({
"type": "heartbeat",
"timestamp": datetime.now(timezone.utc).isoformat()
})
except:
break
# Run both tasks concurrently
await asyncio.gather(
handle_messages(),
send_heartbeat(),
return_exceptions=True
)
except WebSocketDisconnect as e:
logger.info("Client disconnected", extra={
"session_id": session_id,
"disconnect_code": getattr(e, 'code', None),
"reason": "websocket_disconnect"
})
except Exception as e:
logger.error("WebSocket error", extra={"session_id": session_id, "error": str(e)}, exc_info=True)
finally:
# Mark pending executions as missed
if session_id:
redis_manager = get_redis_manager()
# Clean up session from Redis and local WebSocket dict
redis_manager.remove_session(session_id)
if session_id in active_websockets:
del active_websockets[session_id]
logger.info("Session cleaned up", extra={"session_id": session_id})
# Close database session
if db_session:
db_session.close()
async def handle_pubsub_message(event_data: Dict[str, Any]):
"""Handle execution events from Redis pub/sub.
This function is called by each worker when a message is published.
Each worker will send the message to its own local WebSocket connections.
"""
try:
event_type = event_data.get("type")
logger.info(f"[PUBSUB] Received event from Redis: {event_type}", extra={
"event_type": event_type,
"execution_id": event_data.get("execution_id"),
"user_id": event_data.get("user_id")
})
# Handle pong events - send only to the specific session
if event_type == "pong":
target_session_id = event_data.get("session_id")
if target_session_id and target_session_id in active_websockets:
try:
ws = active_websockets[target_session_id]
await ws.send_json({
"type": "pong",
"timestamp": event_data.get("timestamp")
})
logger.debug("Pong sent via Redis pub/sub", extra={
"session_id": target_session_id
})
except Exception as e:
logger.error("Failed to send pong", extra={
"session_id": target_session_id,
"error": str(e)
})
return
execution_id = event_data.get("execution_id")
event_user_id = event_data.get("user_id")
if not event_user_id:
logger.warning("Event missing user_id, cannot filter subscribers", extra={
"execution_id": execution_id
})
return
# Get user sessions from Redis
redis_manager = get_redis_manager()
user_session_ids = redis_manager.get_user_sessions(event_user_id)
# Get user sessions from Redis and match with local connections
logger.debug(f"User has {len(user_session_ids)} active session(s)", extra={
"user_id": event_user_id,
"session_count": len(user_session_ids)
})
# Only notify sessions that are connected to THIS worker
local_sessions = set(active_websockets.keys()) & user_session_ids
if not local_sessions:
logger.debug("No local WebSocket connections for this user", extra={
"user_id": event_user_id,
"execution_id": execution_id
})
return # No local connections for this user
logger.info(f"Broadcasting execution to {len(local_sessions)} WebSocket(s)", extra={
"execution_id": execution_id,
"user_id": event_user_id,
"session_count": len(local_sessions)
})
disconnected_sessions = []
notified_count = 0
for session_id in local_sessions:
try:
ws = active_websockets.get(session_id)
if not ws:
disconnected_sessions.append(session_id)
continue
# Send execution event
await ws.send_json(event_data)
notified_count += 1
# Track as pending if it's a new execution
if event_data.get("type") == "execution_created" and execution_id:
redis_manager.add_pending_execution(session_id, execution_id)
# Confirm delivery for webhook to proceed
redis_manager.confirm_delivery(execution_id, session_id)
logger.debug("Notified session of execution", extra={
"session_id": session_id,
"user_id": event_user_id,
"execution_id": execution_id
})
except Exception as e:
logger.error("Failed to notify session", extra={
"session_id": session_id,
"error": str(e)
})
disconnected_sessions.append(session_id)
# Clean up disconnected sessions
for session_id in disconnected_sessions:
redis_manager.remove_session(session_id)
if session_id in active_websockets:
del active_websockets[session_id]
if notified_count > 0:
logger.debug("Execution event broadcast complete", extra={
"execution_id": execution_id,
"user_id": event_user_id,
"sessions_notified": notified_count
})
except Exception as e:
logger.error(f"Error handling pub/sub message: {str(e)}", extra={
"error": str(e),
"error_type": type(e).__name__,
"event_data": event_data
}, exc_info=True)
async def start_pubsub_listener():
"""Start the Redis pub/sub listener for this worker."""
global _pubsub_task
if _pubsub_task is not None:
return # Already started
import os
logger.info(f"[PID {os.getpid()}] Starting Redis pub/sub listener for execution events")
redis_manager = get_redis_manager()
async def run_subscriber():
try:
await redis_manager.subscribe_to_execution_events(handle_pubsub_message)
except Exception as e:
logger.error("Pub/sub listener crashed", extra={"error": str(e)}, exc_info=True)
_pubsub_task = asyncio.create_task(run_subscriber())

View file

@ -0,0 +1,348 @@
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. =========
"""
Webhook Controller
Handles incoming webhook triggers with modular app-specific processing.
"""
from fastapi import APIRouter, Depends, HTTPException, Request
from sqlmodel import Session, select, and_, or_
from uuid import uuid4
from datetime import datetime, timezone
import json
import logging
from fastapi_limiter.depends import RateLimiter
from app.model.trigger.trigger import Trigger
from app.model.trigger.trigger_execution import TriggerExecution
from app.type.trigger_types import TriggerType, TriggerStatus, ExecutionType, ExecutionStatus
from app.component.database import session
from app.component.trigger_utils import check_rate_limits
from app.service.trigger.app_handler_service import get_app_handler
logger = logging.getLogger("server_webhook_controller")
router = APIRouter(prefix="/webhook", tags=["Webhook"])
# Trigger types that use webhooks
WEBHOOK_TRIGGER_TYPES = [TriggerType.webhook, TriggerType.slack_trigger]
@router.api_route("/trigger/{webhook_uuid}", methods=["GET", "POST"], name="webhook trigger", dependencies=[Depends(RateLimiter(times=10, seconds=60))])
async def webhook_trigger(
webhook_uuid: str,
request: Request,
db_session: Session = Depends(session)
):
"""Handle incoming webhook triggers with app-specific processing."""
try:
# Get request body
body = await request.body()
try:
input_data = json.loads(body) if body else {}
except json.JSONDecodeError:
input_data = {"raw_body": body.decode()}
headers = dict(request.headers)
webhook_url = f"/webhook/trigger/{webhook_uuid}"
# Find the trigger (allow active and pending_verification for verification flows)
trigger = db_session.exec(
select(Trigger).where(
and_(
Trigger.webhook_url == webhook_url,
Trigger.trigger_type.in_(WEBHOOK_TRIGGER_TYPES),
Trigger.status.in_([TriggerStatus.active, TriggerStatus.pending_verification])
)
)
).first()
if not trigger:
logger.warning("Webhook trigger not found or inactive", extra={
"webhook_uuid": webhook_uuid
})
raise HTTPException(status_code=404, detail="Webhook not found or inactive")
# Get app handler based on trigger_type
handler = get_app_handler(trigger.trigger_type)
# App-specific authentication
if handler:
auth_result = await handler.authenticate(request, body, trigger, db_session)
if not auth_result.success:
raise HTTPException(status_code=401, detail=auth_result.reason or "Invalid signature")
# Return challenge response for URL verification (e.g., Slack)
# Don't update status yet - wait for actual events to confirm integration works
if auth_result.data:
logger.info("URL verification challenge received", extra={
"trigger_id": trigger.id,
"trigger_type": trigger.trigger_type.value,
"status": trigger.status.value
})
return auth_result.data
# Update trigger status from pending_verification to active after receiving
# a real event (not just URL verification) with valid signature
if trigger.status == TriggerStatus.pending_verification:
trigger.status = TriggerStatus.active
db_session.add(trigger)
db_session.commit()
db_session.refresh(trigger)
logger.info("Trigger status updated to active after receiving valid event", extra={
"trigger_id": trigger.id,
"trigger_type": trigger.trigger_type.value
})
# Notify Redis subscribers of successful activation
try:
from app.component.redis_utils import get_redis_manager
redis_manager = get_redis_manager()
redis_manager.publish_execution_event({
"type": "trigger_activated",
"trigger_id": trigger.id,
"trigger_type": trigger.trigger_type.value,
"task_prompt": trigger.task_prompt,
"user_id": str(trigger.user_id),
"project_id": str(trigger.project_id),
"webhook_uuid": webhook_uuid
})
except Exception as e:
logger.warning(f"Failed to publish activation event: {e}")
# Default webhook: validate request method
if trigger.trigger_type == TriggerType.webhook and trigger.webhook_method:
expected_method = trigger.webhook_method.value if hasattr(trigger.webhook_method, 'value') else str(trigger.webhook_method)
expected_method = expected_method.rstrip(',')
if request.method.upper() != expected_method.upper():
raise HTTPException(
status_code=405,
detail=f"Method not allowed. This webhook only accepts {expected_method} requests"
)
# Prepare request metadata for filtering and normalization
safe_headers = {k: v for k, v in headers.items() if k.lower() not in ['authorization', 'cookie']}
query_params = dict(request.query_params)
body_raw = body.decode() if body else ""
request_meta = {
"headers": safe_headers,
"query_params": query_params,
"method": request.method,
"url": str(request.url),
"client_ip": request.client.host if request.client else None
}
# App-specific event filtering (pass headers and body for webhook config filtering)
if handler:
# For default webhook handler, pass additional context
if trigger.trigger_type == TriggerType.webhook:
filter_result = await handler.filter_event(
input_data,
trigger,
headers=safe_headers,
body_raw=body_raw
)
else:
filter_result = await handler.filter_event(input_data, trigger)
if not filter_result.success:
logger.debug("Event filtered", extra={
"trigger_id": trigger.id,
"reason": filter_result.reason
})
return {"status": "ignored", "reason": filter_result.reason}
# Check rate limits
current_time = datetime.now(timezone.utc)
if trigger.max_executions_per_hour or trigger.max_executions_per_day:
if not check_rate_limits(db_session, trigger):
logger.warning("Webhook rate limit exceeded", extra={
"trigger_id": trigger.id
})
raise HTTPException(status_code=429, detail="Rate limit exceeded")
# Check single execution
if trigger.is_single_execution:
from sqlmodel import func
execution_count = db_session.exec(
select(func.count(TriggerExecution.id)).where(
TriggerExecution.trigger_id == trigger.id
)
).first()
if execution_count > 0:
raise HTTPException(status_code=409, detail="Single execution trigger already executed")
# Normalize input data (pass request_meta for full webhook input)
if handler:
execution_input = handler.normalize_payload(input_data, trigger, request_meta=request_meta)
else:
execution_input = {
"headers": safe_headers,
"query_params": query_params,
"body": input_data,
"method": request.method,
"url": str(request.url),
"client_ip": request.client.host if request.client else None
}
# Determine execution type
execution_type = handler.execution_type if handler else ExecutionType.webhook
# Create execution record
execution_id = str(uuid4())
execution = TriggerExecution(
trigger_id=trigger.id,
execution_id=execution_id,
execution_type=execution_type,
status=ExecutionStatus.pending,
input_data=execution_input,
started_at=current_time
)
db_session.add(execution)
# Update trigger
trigger.last_executed_at = current_time
trigger.last_execution_status = "pending"
db_session.add(trigger)
db_session.commit()
db_session.refresh(execution)
logger.info("Webhook trigger executed", extra={
"trigger_id": trigger.id,
"execution_id": execution_id,
"trigger_type": trigger.trigger_type.value,
"user_id": trigger.user_id
})
# Notify WebSocket subscribers and wait for delivery confirmation
try:
from app.component.redis_utils import get_redis_manager
redis_manager = get_redis_manager()
# Check if user has any active WebSocket sessions
has_active_sessions = redis_manager.has_active_sessions_for_user(str(trigger.user_id))
redis_manager.publish_execution_event({
"type": "execution_created",
"execution_id": execution_id,
"trigger_id": trigger.id,
"trigger_type": trigger.trigger_type.value,
"task_prompt": trigger.task_prompt,
"status": "pending",
"input_data": execution_input,
"user_id": str(trigger.user_id),
"project_id": str(trigger.project_id)
})
if has_active_sessions:
# Wait for delivery confirmation (10 second timeout)
delivery_confirmation = await redis_manager.wait_for_delivery(
execution_id,
timeout=10.0
)
if delivery_confirmation:
logger.info("Webhook delivery confirmed", extra={
"execution_id": execution_id,
"session_id": delivery_confirmation.get("session_id")
})
return {
"success": True,
"execution_id": execution_id,
"message": "Webhook trigger delivered to client",
"delivered": True,
"session_id": delivery_confirmation.get("session_id")
}
else:
logger.warning("Webhook delivery confirmation timed out", extra={
"execution_id": execution_id,
"trigger_id": trigger.id
})
return {
"success": True,
"execution_id": execution_id,
"message": "Webhook trigger processed but delivery not confirmed",
"delivered": False,
"reason": "timeout"
}
else:
# No active sessions, execution is queued
logger.info("No active WebSocket sessions for user", extra={
"execution_id": execution_id,
"user_id": trigger.user_id
})
return {
"success": True,
"execution_id": execution_id,
"message": "Webhook trigger processed, no active client connected",
"delivered": False,
"reason": "no_active_sessions"
}
except Exception as e:
logger.warning(f"Failed to publish/confirm WebSocket event: {e}")
return {
"success": True,
"execution_id": execution_id,
"message": "Webhook trigger processed but WebSocket notification failed",
"delivered": False,
"reason": "websocket_notification_error"
}
except HTTPException:
raise
except Exception as e:
logger.error("Webhook trigger processing failed", extra={
"webhook_uuid": webhook_uuid,
"error": str(e)
}, exc_info=True)
raise HTTPException(status_code=500, detail="Internal server error")
@router.get("/trigger/{webhook_uuid}/info", name="webhook info")
def get_webhook_info(
webhook_uuid: str,
db_session: Session = Depends(session)
):
"""Get information about a webhook trigger (public endpoint)."""
webhook_url = f"/webhook/trigger/{webhook_uuid}"
trigger = db_session.exec(
select(Trigger).where(
and_(
Trigger.webhook_url == webhook_url,
Trigger.trigger_type.in_(WEBHOOK_TRIGGER_TYPES)
)
)
).first()
if not trigger:
raise HTTPException(status_code=404, detail="Webhook not found")
return {
"name": trigger.name,
"description": trigger.description,
"status": trigger.status.value,
"trigger_type": trigger.trigger_type.value,
"is_active": trigger.status == TriggerStatus.active,
"webhook_method": trigger.webhook_method.value if trigger.webhook_method else None,
"last_executed_at": trigger.last_executed_at.isoformat() if trigger.last_executed_at else None,
}