mirror of
https://github.com/Skyvern-AI/skyvern.git
synced 2026-04-28 03:30:10 +00:00
Fix: apply default login prompt for MCP-created workflow login blocks (#SKY-8637) (#5342)
This commit is contained in:
parent
84cb07bc78
commit
c581fb310d
20 changed files with 1864 additions and 140 deletions
270
docs/docs.json
270
docs/docs.json
|
|
@ -55,71 +55,6 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"tab": "Developers",
|
||||
"groups": [
|
||||
{
|
||||
"group": "Running Automations",
|
||||
"pages": [
|
||||
"running-automations/run-a-task",
|
||||
"running-automations/task-parameters",
|
||||
"running-automations/extract-structured-data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Multi-Step Automations",
|
||||
"pages": [
|
||||
"multi-step-automations/build-a-workflow",
|
||||
"multi-step-automations/workflow-blocks-reference",
|
||||
"multi-step-automations/file-operations",
|
||||
"multi-step-automations/workflow-parameters",
|
||||
"multi-step-automations/scheduling-workflows"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Optimization",
|
||||
"pages": [
|
||||
"optimization/browser-sessions",
|
||||
"optimization/browser-profiles",
|
||||
"optimization/cost-control"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Going to Production",
|
||||
"pages": [
|
||||
"going-to-production/webhooks",
|
||||
"going-to-production/proxy-geolocation",
|
||||
"going-to-production/error-handling",
|
||||
"going-to-production/reliability-tips",
|
||||
"going-to-production/captcha-bot-detection",
|
||||
"integrations/mcp",
|
||||
"integrations/cli"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Debugging",
|
||||
"pages": [
|
||||
"debugging/using-artifacts",
|
||||
"debugging/troubleshooting-guide",
|
||||
"debugging/observability-with-laminar",
|
||||
"debugging/faq"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Self-Hosted Deployment",
|
||||
"pages": [
|
||||
"self-hosted/overview",
|
||||
"self-hosted/docker",
|
||||
"self-hosted/llm-configuration",
|
||||
"self-hosted/browser",
|
||||
"self-hosted/proxy",
|
||||
"self-hosted/kubernetes",
|
||||
"self-hosted/storage",
|
||||
"integrations/local-llms"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"tab": "Cloud UI",
|
||||
"groups": [
|
||||
|
|
@ -188,6 +123,71 @@
|
|||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"tab": "Developers",
|
||||
"groups": [
|
||||
{
|
||||
"group": "Running Automations",
|
||||
"pages": [
|
||||
"running-automations/run-a-task",
|
||||
"running-automations/task-parameters",
|
||||
"running-automations/extract-structured-data"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Multi-Step Automations",
|
||||
"pages": [
|
||||
"multi-step-automations/build-a-workflow",
|
||||
"multi-step-automations/workflow-blocks-reference",
|
||||
"multi-step-automations/file-operations",
|
||||
"multi-step-automations/workflow-parameters",
|
||||
"multi-step-automations/scheduling-workflows"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Optimization",
|
||||
"pages": [
|
||||
"optimization/browser-sessions",
|
||||
"optimization/browser-profiles",
|
||||
"optimization/cost-control"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Going to Production",
|
||||
"pages": [
|
||||
"going-to-production/webhooks",
|
||||
"going-to-production/proxy-geolocation",
|
||||
"going-to-production/error-handling",
|
||||
"going-to-production/reliability-tips",
|
||||
"going-to-production/captcha-bot-detection",
|
||||
"integrations/mcp",
|
||||
"integrations/cli"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Debugging",
|
||||
"pages": [
|
||||
"debugging/using-artifacts",
|
||||
"debugging/troubleshooting-guide",
|
||||
"debugging/observability-with-laminar",
|
||||
"debugging/faq"
|
||||
]
|
||||
},
|
||||
{
|
||||
"group": "Self-Hosted Deployment",
|
||||
"pages": [
|
||||
"self-hosted/overview",
|
||||
"self-hosted/docker",
|
||||
"self-hosted/llm-configuration",
|
||||
"self-hosted/browser",
|
||||
"self-hosted/proxy",
|
||||
"self-hosted/kubernetes",
|
||||
"self-hosted/storage",
|
||||
"integrations/local-llms"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"tab": "Cookbooks",
|
||||
"pages": [
|
||||
|
|
@ -414,6 +414,146 @@
|
|||
{
|
||||
"source": "/integrations/ollama-litellm",
|
||||
"destination": "/integrations/local-llms"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/agent/run-task",
|
||||
"destination": "/api-reference/agent/run-a-task"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/agent/get-run",
|
||||
"destination": "/api-reference/agent/get-a-run-by-id"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/agent/cancel-run",
|
||||
"destination": "/api-reference/agent/cancel-a-run-by-id"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/agent/retry-run-webhook",
|
||||
"destination": "/api-reference/agent/retry-run-webhook"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/agent/login",
|
||||
"destination": "/api-reference/agent/login-task"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/agent/get-run-timeline",
|
||||
"destination": "/api-reference/agent/get-run-timeline"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/workflows/get-workflows",
|
||||
"destination": "/api-reference/workflows/get-workflows"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/workflows/create-workflow",
|
||||
"destination": "/api-reference/workflows/create-a-new-workflow"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/workflows/get-workflow",
|
||||
"destination": "/api-reference/workflows/get-workflows"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/workflows/get-workflow-versions",
|
||||
"destination": "/api-reference/workflows/get-workflows"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/workflows/update-workflow",
|
||||
"destination": "/api-reference/workflows/update-a-workflow"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/workflows/delete-workflow",
|
||||
"destination": "/api-reference/workflows/delete-a-workflow"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/workflow-runs/run-workflow",
|
||||
"destination": "/api-reference/workflows/run-a-workflow"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/workflow-runs/get-workflow-runs",
|
||||
"destination": "/api-reference/workflows/get-workflows"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/workflow-runs/get-run",
|
||||
"destination": "/api-reference/agent/get-a-run-by-id"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/workflow-runs/cancel-run",
|
||||
"destination": "/api-reference/agent/cancel-a-run-by-id"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/workflow-runs/retry-run-webhook",
|
||||
"destination": "/api-reference/agent/retry-run-webhook"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/workflow-runs/get-run-timeline",
|
||||
"destination": "/api-reference/agent/get-run-timeline"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/browser-sessions/create-browser-session",
|
||||
"destination": "/api-reference/browser-sessions/create-a-session"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/browser-sessions/get-browser-session",
|
||||
"destination": "/api-reference/browser-sessions/get-a-session"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/browser-sessions/close-browser-session",
|
||||
"destination": "/api-reference/browser-sessions/close-a-session"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/browser-sessions/get-browser-sessions",
|
||||
"destination": "/api-reference/browser-sessions/get-active-browser-sessions"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/browser-profiles/create-browser-profile",
|
||||
"destination": "/api-reference/browser-profiles/create-a-browser-profile"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/browser-profiles/list-browser-profiles",
|
||||
"destination": "/api-reference/browser-profiles/list-browser-profiles"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/browser-profiles/get-browser-profile",
|
||||
"destination": "/api-reference/browser-profiles/get-browser-profile"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/browser-profiles/delete-browser-profile",
|
||||
"destination": "/api-reference/browser-profiles/delete-browser-profile"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/credentials/create-credential",
|
||||
"destination": "/api-reference/credentials/create-credential"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/credentials/get-credential",
|
||||
"destination": "/api-reference/credentials/get-credential-by-id"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/credentials/update-credential",
|
||||
"destination": "/api-reference/credentials/create-credential"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/credentials/delete-credential",
|
||||
"destination": "/api-reference/credentials/delete-credential"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/credentials/get-credentials",
|
||||
"destination": "/api-reference/credentials/get-all-credentials"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/credentials/send-totp-code",
|
||||
"destination": "/api-reference/credentials/send-totp-code"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/artifacts/get-run-artifacts",
|
||||
"destination": "/api-reference/artifacts/get-artifacts-for-a-run"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/artifacts/get-artifact-content-v-1-artifacts-artifact-id-content-get",
|
||||
"destination": "/api-reference/artifacts/get-an-artifact"
|
||||
},
|
||||
{
|
||||
"source": "/api-reference/api-reference/files/upload-file",
|
||||
"destination": "/api-reference/files/upload-file"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -452,6 +452,9 @@ a.sk-bento-cell-link:hover .sk-arrow-sm {
|
|||
============================================= */
|
||||
|
||||
@media (max-width: 900px) {
|
||||
.sk-hero {
|
||||
margin-top: 130px;
|
||||
}
|
||||
.sk-hero-inner {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -871,6 +871,485 @@
|
|||
]
|
||||
}
|
||||
},
|
||||
"/v1/folders": {
|
||||
"post": {
|
||||
"tags": [
|
||||
"Workflow Folders"
|
||||
],
|
||||
"summary": "Create folder",
|
||||
"description": "Create a new folder to organize workflows",
|
||||
"operationId": "create_folder_v1_folders_post",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "x-api-key",
|
||||
"in": "header",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings.",
|
||||
"title": "X-Api-Key"
|
||||
},
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings."
|
||||
}
|
||||
],
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/FolderCreate"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successfully created folder",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Folder"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Invalid request"
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-fern-sdk-method-name": "create_folder"
|
||||
},
|
||||
"get": {
|
||||
"tags": [
|
||||
"Workflow Folders"
|
||||
],
|
||||
"summary": "Get folders",
|
||||
"description": "Get all folders for the organization",
|
||||
"operationId": "get_folders_v1_folders_get",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "page",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"minimum": 1,
|
||||
"description": "Page number",
|
||||
"default": 1,
|
||||
"title": "Page"
|
||||
},
|
||||
"description": "Page number"
|
||||
},
|
||||
{
|
||||
"name": "page_size",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"maximum": 500,
|
||||
"minimum": 1,
|
||||
"description": "Number of folders per page",
|
||||
"default": 100,
|
||||
"title": "Page Size"
|
||||
},
|
||||
"description": "Number of folders per page"
|
||||
},
|
||||
{
|
||||
"name": "search",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Search folders by title or description",
|
||||
"title": "Search"
|
||||
},
|
||||
"description": "Search folders by title or description"
|
||||
},
|
||||
{
|
||||
"name": "x-api-key",
|
||||
"in": "header",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings.",
|
||||
"title": "X-Api-Key"
|
||||
},
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings."
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successfully retrieved folders",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/Folder"
|
||||
},
|
||||
"title": "Response Get Folders V1 Folders Get"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-fern-sdk-method-name": "get_folders"
|
||||
}
|
||||
},
|
||||
"/v1/folders/{folder_id}": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"Workflow Folders"
|
||||
],
|
||||
"summary": "Get folder",
|
||||
"description": "Get a specific folder by ID",
|
||||
"operationId": "get_folder_v1_folders__folder_id__get",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "folder_id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"description": "Folder ID",
|
||||
"examples": [
|
||||
"fld_123"
|
||||
],
|
||||
"title": "Folder Id"
|
||||
},
|
||||
"description": "Folder ID"
|
||||
},
|
||||
{
|
||||
"name": "x-api-key",
|
||||
"in": "header",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings.",
|
||||
"title": "X-Api-Key"
|
||||
},
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings."
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successfully retrieved folder",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Folder"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Folder not found"
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-fern-sdk-method-name": "get_folder"
|
||||
},
|
||||
"put": {
|
||||
"tags": [
|
||||
"Workflow Folders"
|
||||
],
|
||||
"summary": "Update folder",
|
||||
"description": "Update a folder's title or description",
|
||||
"operationId": "update_folder_v1_folders__folder_id__put",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "folder_id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"description": "Folder ID",
|
||||
"examples": [
|
||||
"fld_123"
|
||||
],
|
||||
"title": "Folder Id"
|
||||
},
|
||||
"description": "Folder ID"
|
||||
},
|
||||
{
|
||||
"name": "x-api-key",
|
||||
"in": "header",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings.",
|
||||
"title": "X-Api-Key"
|
||||
},
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings."
|
||||
}
|
||||
],
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/FolderUpdate"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successfully updated folder",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Folder"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Folder not found"
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-fern-sdk-method-name": "update_folder"
|
||||
},
|
||||
"delete": {
|
||||
"tags": [
|
||||
"Workflow Folders"
|
||||
],
|
||||
"summary": "Delete folder",
|
||||
"description": "Delete a folder. Optionally delete all workflows in the folder.",
|
||||
"operationId": "delete_folder_v1_folders__folder_id__delete",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "folder_id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"description": "Folder ID",
|
||||
"examples": [
|
||||
"fld_123"
|
||||
],
|
||||
"title": "Folder Id"
|
||||
},
|
||||
"description": "Folder ID"
|
||||
},
|
||||
{
|
||||
"name": "delete_workflows",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"type": "boolean",
|
||||
"description": "If true, also delete all workflows in this folder",
|
||||
"default": false,
|
||||
"title": "Delete Workflows"
|
||||
},
|
||||
"description": "If true, also delete all workflows in this folder"
|
||||
},
|
||||
{
|
||||
"name": "x-api-key",
|
||||
"in": "header",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings.",
|
||||
"title": "X-Api-Key"
|
||||
},
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings."
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successfully deleted folder",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"additionalProperties": true,
|
||||
"title": "Response Delete Folder V1 Folders Folder Id Delete"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "Folder not found"
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-fern-sdk-method-name": "delete_folder"
|
||||
}
|
||||
},
|
||||
"/v1/workflows/{workflow_permanent_id}/folder": {
|
||||
"put": {
|
||||
"tags": [
|
||||
"Workflow Folders"
|
||||
],
|
||||
"summary": "Update workflow folder",
|
||||
"description": "Update a workflow's folder assignment for the latest version",
|
||||
"operationId": "update_workflow_folder_v1_workflows__workflow_permanent_id__folder_put",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "workflow_permanent_id",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"description": "Workflow permanent ID",
|
||||
"examples": [
|
||||
"wpid_123"
|
||||
],
|
||||
"title": "Workflow Permanent Id"
|
||||
},
|
||||
"description": "Workflow permanent ID"
|
||||
},
|
||||
{
|
||||
"name": "x-api-key",
|
||||
"in": "header",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings.",
|
||||
"title": "X-Api-Key"
|
||||
},
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings."
|
||||
}
|
||||
],
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/UpdateWorkflowFolderRequest"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successfully updated workflow folder",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/Workflow"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"400": {
|
||||
"description": "Folder not found"
|
||||
},
|
||||
"404": {
|
||||
"description": "Workflow not found"
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-fern-sdk-method-name": "update_workflow_folder"
|
||||
}
|
||||
},
|
||||
"/v1/artifacts/{artifact_id}/content": {
|
||||
"get": {
|
||||
"tags": [
|
||||
|
|
@ -1196,6 +1675,128 @@
|
|||
]
|
||||
}
|
||||
},
|
||||
"/v1/runs": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"agent"
|
||||
],
|
||||
"summary": "Get Runs V2",
|
||||
"operationId": "get_runs_v2_v1_runs_get",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "page",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"maximum": 100,
|
||||
"minimum": 1,
|
||||
"default": 1,
|
||||
"title": "Page"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "page_size",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"type": "integer",
|
||||
"maximum": 100,
|
||||
"minimum": 1,
|
||||
"default": 10,
|
||||
"title": "Page Size"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "status",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/RunStatus"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Status"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "search_key",
|
||||
"in": "query",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"minLength": 3
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Case-insensitive substring search (min 3 chars for trigram index).",
|
||||
"examples": [
|
||||
"login_url",
|
||||
"wr_abc123"
|
||||
],
|
||||
"title": "Search Key"
|
||||
},
|
||||
"description": "Case-insensitive substring search (min 3 chars for trigram index)."
|
||||
},
|
||||
{
|
||||
"name": "x-api-key",
|
||||
"in": "header",
|
||||
"required": false,
|
||||
"schema": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings.",
|
||||
"title": "X-Api-Key"
|
||||
},
|
||||
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings."
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "Successful Response",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/TaskRunListItem"
|
||||
},
|
||||
"title": "Response Get Runs V2 V1 Runs Get"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"422": {
|
||||
"description": "Validation Error",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/HTTPValidationError"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-fern-sdk-method-name": "get_runs_v2"
|
||||
}
|
||||
},
|
||||
"/v1/workflows/runs": {
|
||||
"get": {
|
||||
"tags": [
|
||||
|
|
@ -8660,6 +9261,125 @@
|
|||
],
|
||||
"title": "FileUploadBlockYAML"
|
||||
},
|
||||
"Folder": {
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"minLength": 1,
|
||||
"title": "Title",
|
||||
"description": "Folder title"
|
||||
},
|
||||
"description": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Description",
|
||||
"description": "Folder description"
|
||||
},
|
||||
"folder_id": {
|
||||
"type": "string",
|
||||
"title": "Folder Id"
|
||||
},
|
||||
"organization_id": {
|
||||
"type": "string",
|
||||
"title": "Organization Id"
|
||||
},
|
||||
"workflow_count": {
|
||||
"type": "integer",
|
||||
"title": "Workflow Count",
|
||||
"description": "Number of workflows in this folder",
|
||||
"default": 0
|
||||
},
|
||||
"created_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"title": "Created At"
|
||||
},
|
||||
"modified_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"title": "Modified At"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": [
|
||||
"title",
|
||||
"folder_id",
|
||||
"organization_id",
|
||||
"created_at",
|
||||
"modified_at"
|
||||
],
|
||||
"title": "Folder",
|
||||
"description": "Response model for a folder"
|
||||
},
|
||||
"FolderCreate": {
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"minLength": 1,
|
||||
"title": "Title",
|
||||
"description": "Folder title"
|
||||
},
|
||||
"description": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Description",
|
||||
"description": "Folder description"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": [
|
||||
"title"
|
||||
],
|
||||
"title": "FolderCreate",
|
||||
"description": "Request model for creating a folder"
|
||||
},
|
||||
"FolderUpdate": {
|
||||
"properties": {
|
||||
"title": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"maxLength": 255,
|
||||
"minLength": 1
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Title",
|
||||
"description": "Folder title"
|
||||
},
|
||||
"description": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Description",
|
||||
"description": "Folder description"
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"title": "FolderUpdate",
|
||||
"description": "Request model for updating a folder"
|
||||
},
|
||||
"ForLoopBlock": {
|
||||
"properties": {
|
||||
"label": {
|
||||
|
|
@ -13496,6 +14216,92 @@
|
|||
],
|
||||
"title": "TaskBlockYAML"
|
||||
},
|
||||
"TaskRunListItem": {
|
||||
"properties": {
|
||||
"task_run_id": {
|
||||
"type": "string",
|
||||
"title": "Task Run Id"
|
||||
},
|
||||
"run_id": {
|
||||
"type": "string",
|
||||
"title": "Run Id"
|
||||
},
|
||||
"task_run_type": {
|
||||
"type": "string",
|
||||
"title": "Task Run Type"
|
||||
},
|
||||
"status": {
|
||||
"type": "string",
|
||||
"title": "Status"
|
||||
},
|
||||
"title": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Title"
|
||||
},
|
||||
"started_at": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Started At"
|
||||
},
|
||||
"finished_at": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Finished At"
|
||||
},
|
||||
"created_at": {
|
||||
"type": "string",
|
||||
"format": "date-time",
|
||||
"title": "Created At"
|
||||
},
|
||||
"workflow_permanent_id": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Workflow Permanent Id"
|
||||
},
|
||||
"script_run": {
|
||||
"type": "boolean",
|
||||
"title": "Script Run",
|
||||
"default": false
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"required": [
|
||||
"task_run_id",
|
||||
"run_id",
|
||||
"task_run_type",
|
||||
"status",
|
||||
"created_at"
|
||||
],
|
||||
"title": "TaskRunListItem",
|
||||
"description": "Lightweight run-history item backed by the task_runs table."
|
||||
},
|
||||
"TaskRunRequest": {
|
||||
"properties": {
|
||||
"prompt": {
|
||||
|
|
@ -14784,6 +15590,25 @@
|
|||
"title": "TotpType",
|
||||
"description": "Type of 2FA/TOTP method used."
|
||||
},
|
||||
"UpdateWorkflowFolderRequest": {
|
||||
"properties": {
|
||||
"folder_id": {
|
||||
"anyOf": [
|
||||
{
|
||||
"type": "string"
|
||||
},
|
||||
{
|
||||
"type": "null"
|
||||
}
|
||||
],
|
||||
"title": "Folder Id",
|
||||
"description": "Folder ID to assign workflow to. Set to null to remove from folder."
|
||||
}
|
||||
},
|
||||
"type": "object",
|
||||
"title": "UpdateWorkflowFolderRequest",
|
||||
"description": "Request model for updating a workflow's folder assignment"
|
||||
},
|
||||
"UploadFileResponse": {
|
||||
"properties": {
|
||||
"s3_uri": {
|
||||
|
|
@ -18382,9 +19207,11 @@
|
|||
"enum": [
|
||||
"manual",
|
||||
"api",
|
||||
"scheduled"
|
||||
"scheduled",
|
||||
"webhook"
|
||||
],
|
||||
"title": "WorkflowRunTriggerType"
|
||||
"title": "WorkflowRunTriggerType",
|
||||
"description": "How a workflow run was initiated.\n\n- manual: User clicked \"Run\" in the UI\n- api: Direct API call to the run endpoint\n- scheduled: Triggered by a cron schedule\n- webhook: Triggered by an external system via the webhook endpoint"
|
||||
},
|
||||
"WorkflowStatus": {
|
||||
"type": "string",
|
||||
|
|
|
|||
|
|
@ -23,6 +23,30 @@ MAX_UPLOAD_FILE_COUNT = 50
|
|||
AZURE_BLOB_STORAGE_MAX_UPLOAD_FILE_COUNT = 50
|
||||
DEFAULT_MAX_SCREENSHOT_SCROLLS = 3
|
||||
|
||||
# Default navigation_goal for LoginBlocks. Instructs the LLM how to find the login
|
||||
# page, fill credentials, and handle multi-step flows / 2FA.
|
||||
DEFAULT_LOGIN_PROMPT = """\
|
||||
If you're not on the login page, navigate to the login page first.
|
||||
First, dismiss any promotional popups or cookie prompts that could block interaction with the page.
|
||||
|
||||
Log in using the credentials provided in the user details:
|
||||
1. Find the username/email input field and enter the username or email from the provided credentials.
|
||||
2. Find the password input field and enter the password from the provided credentials. \
|
||||
Some websites use a multi-step login flow where you enter the email first, click a "Continue" or "Next" button, \
|
||||
and then the password field appears on the next step. Handle this by entering the email, clicking continue, \
|
||||
then entering the password once the field is revealed.
|
||||
3. Click the login/sign-in button to submit the credentials.
|
||||
4. If a 2-factor authentication step appears, enter the authentication code.
|
||||
|
||||
Make sure you enter the username and password separately — do not paste both into the same field.
|
||||
Use your action history to determine if you already attempted to log in. \
|
||||
If you have not clicked the login button since filling in the credentials, try submitting before assuming failure.
|
||||
|
||||
If you fail to log in or can't find the login page after several trials, terminate.
|
||||
If the credentials are invalid, expired, or explicitly rejected by the website (e.g., "Invalid credentials", \
|
||||
"Wrong password"), terminate immediately and take no further actions.
|
||||
If login is completed, you're successful."""
|
||||
|
||||
# Default complete_criterion for LoginBlocks. Guides the LLM to check for actual
|
||||
# logged-in indicators rather than relying on page location, which fails on sites
|
||||
# that redirect to the homepage after successful login.
|
||||
|
|
|
|||
|
|
@ -381,6 +381,9 @@ class AgentDB(BaseAlchemyDB):
|
|||
async def create_workflow_run_parameter(self, *args: Any, **kwargs: Any) -> Any:
|
||||
return await self.workflow_runs.create_workflow_run_parameter(*args, **kwargs)
|
||||
|
||||
async def create_workflow_run_parameters(self, *args: Any, **kwargs: Any) -> Any:
|
||||
return await self.workflow_runs.create_workflow_run_parameters(*args, **kwargs)
|
||||
|
||||
async def get_workflow_run_parameters(self, *args: Any, **kwargs: Any) -> Any:
|
||||
return await self.workflow_runs.get_workflow_run_parameters(*args, **kwargs)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ from sqlalchemy.ext.asyncio import create_async_engine
|
|||
|
||||
from skyvern.forge.sdk.db.agent_db import AgentDB
|
||||
from skyvern.forge.sdk.db.models import Base
|
||||
from skyvern.forge.sdk.workflow.models.parameter import WorkflowParameterType
|
||||
|
||||
|
||||
@pytest_asyncio.fixture
|
||||
|
|
@ -66,3 +67,52 @@ async def test_get_organization_not_found(agent_db: AgentDB) -> None:
|
|||
|
||||
retrieved_by_domain = await agent_db.get_organization_by_domain(domain="nonexistent.com")
|
||||
assert retrieved_by_domain is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_workflow_run_parameters_persists_all_values(agent_db: AgentDB) -> None:
|
||||
organization = await agent_db.create_organization(
|
||||
organization_name="Workflow Parameter Org",
|
||||
domain="workflow-params.test",
|
||||
)
|
||||
workflow = await agent_db.create_workflow(
|
||||
title="Workflow Parameter Test",
|
||||
workflow_definition={"parameters": [], "blocks": []},
|
||||
organization_id=organization.organization_id,
|
||||
)
|
||||
workflow_run = await agent_db.create_workflow_run(
|
||||
workflow_permanent_id=workflow.workflow_permanent_id,
|
||||
workflow_id=workflow.workflow_id,
|
||||
organization_id=organization.organization_id,
|
||||
)
|
||||
|
||||
url_parameter = await agent_db.create_workflow_parameter(
|
||||
workflow_id=workflow.workflow_id,
|
||||
workflow_parameter_type=WorkflowParameterType.STRING,
|
||||
key="url",
|
||||
default_value=None,
|
||||
)
|
||||
count_parameter = await agent_db.create_workflow_parameter(
|
||||
workflow_id=workflow.workflow_id,
|
||||
workflow_parameter_type=WorkflowParameterType.INTEGER,
|
||||
key="count",
|
||||
default_value=None,
|
||||
)
|
||||
|
||||
created_parameters = await agent_db.create_workflow_run_parameters(
|
||||
workflow_run_id=workflow_run.workflow_run_id,
|
||||
workflow_parameter_values=[
|
||||
(url_parameter, "https://example.com"),
|
||||
(count_parameter, "7"),
|
||||
],
|
||||
)
|
||||
|
||||
assert [parameter.value for parameter in created_parameters] == ["https://example.com", 7]
|
||||
assert all(parameter.created_at is not None for parameter in created_parameters)
|
||||
|
||||
stored_parameters = await agent_db.get_workflow_run_parameters(workflow_run.workflow_run_id)
|
||||
assert len(stored_parameters) == 2
|
||||
assert {parameter.key: run_parameter.value for parameter, run_parameter in stored_parameters} == {
|
||||
"url": "https://example.com",
|
||||
"count": 7,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -847,9 +847,42 @@ class WorkflowRunsMixin:
|
|||
value=value,
|
||||
)
|
||||
session.add(workflow_run_parameter)
|
||||
await session.flush()
|
||||
converted = convert_to_workflow_run_parameter(
|
||||
workflow_run_parameter, workflow_parameter, self.debug_enabled
|
||||
)
|
||||
await session.commit()
|
||||
await session.refresh(workflow_run_parameter)
|
||||
return convert_to_workflow_run_parameter(workflow_run_parameter, workflow_parameter, self.debug_enabled)
|
||||
return converted
|
||||
|
||||
@db_operation("create_workflow_run_parameters")
|
||||
async def create_workflow_run_parameters(
|
||||
self,
|
||||
workflow_run_id: str,
|
||||
workflow_parameter_values: list[tuple[WorkflowParameter, Any]],
|
||||
) -> list[WorkflowRunParameter]:
|
||||
if not workflow_parameter_values:
|
||||
return []
|
||||
|
||||
workflow_run_parameters = [
|
||||
WorkflowRunParameterModel(
|
||||
workflow_run_id=workflow_run_id,
|
||||
workflow_parameter_id=workflow_parameter.workflow_parameter_id,
|
||||
value=value,
|
||||
)
|
||||
for workflow_parameter, value in workflow_parameter_values
|
||||
]
|
||||
|
||||
async with self.Session() as session:
|
||||
session.add_all(workflow_run_parameters)
|
||||
await session.flush()
|
||||
converted = [
|
||||
convert_to_workflow_run_parameter(workflow_run_parameter, workflow_parameter, self.debug_enabled)
|
||||
for workflow_run_parameter, (workflow_parameter, _) in zip(
|
||||
workflow_run_parameters, workflow_parameter_values, strict=True
|
||||
)
|
||||
]
|
||||
await session.commit()
|
||||
return converted
|
||||
|
||||
@db_operation("get_workflow_run_parameters")
|
||||
async def get_workflow_run_parameters(
|
||||
|
|
|
|||
|
|
@ -891,9 +891,42 @@ class WorkflowRunsRepository(BaseRepository):
|
|||
value=value,
|
||||
)
|
||||
session.add(workflow_run_parameter)
|
||||
await session.flush()
|
||||
converted = convert_to_workflow_run_parameter(
|
||||
workflow_run_parameter, workflow_parameter, self.debug_enabled
|
||||
)
|
||||
await session.commit()
|
||||
await session.refresh(workflow_run_parameter)
|
||||
return convert_to_workflow_run_parameter(workflow_run_parameter, workflow_parameter, self.debug_enabled)
|
||||
return converted
|
||||
|
||||
@db_operation("create_workflow_run_parameters")
|
||||
async def create_workflow_run_parameters(
|
||||
self,
|
||||
workflow_run_id: str,
|
||||
workflow_parameter_values: list[tuple[WorkflowParameter, Any]],
|
||||
) -> list[WorkflowRunParameter]:
|
||||
if not workflow_parameter_values:
|
||||
return []
|
||||
|
||||
workflow_run_parameters = [
|
||||
WorkflowRunParameterModel(
|
||||
workflow_run_id=workflow_run_id,
|
||||
workflow_parameter_id=workflow_parameter.workflow_parameter_id,
|
||||
value=value,
|
||||
)
|
||||
for workflow_parameter, value in workflow_parameter_values
|
||||
]
|
||||
|
||||
async with self.Session() as session:
|
||||
session.add_all(workflow_run_parameters)
|
||||
await session.flush()
|
||||
converted = [
|
||||
convert_to_workflow_run_parameter(workflow_run_parameter, workflow_parameter, self.debug_enabled)
|
||||
for workflow_run_parameter, (workflow_parameter, _) in zip(
|
||||
workflow_run_parameters, workflow_parameter_values, strict=True
|
||||
)
|
||||
]
|
||||
await session.commit()
|
||||
return converted
|
||||
|
||||
@db_operation("get_workflow_run_parameters")
|
||||
async def get_workflow_run_parameters(
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import structlog
|
|||
from fastapi import BackgroundTasks, Depends, Header, HTTPException, Request
|
||||
|
||||
from skyvern.config import settings
|
||||
from skyvern.constants import DEFAULT_LOGIN_PROMPT
|
||||
from skyvern.exceptions import MissingBrowserAddressError
|
||||
from skyvern.forge import app
|
||||
from skyvern.forge.sdk.core import skyvern_context
|
||||
|
|
@ -39,27 +40,6 @@ from skyvern.services import workflow_service
|
|||
from skyvern.utils.url_validators import prepend_scheme_and_validate_url
|
||||
|
||||
LOG = structlog.get_logger()
|
||||
DEFAULT_LOGIN_PROMPT = """\
|
||||
If you're not on the login page, navigate to the login page first.
|
||||
First, dismiss any promotional popups or cookie prompts that could block interaction with the page.
|
||||
|
||||
Log in using the credentials provided in the user details:
|
||||
1. Find the username/email input field and enter the username or email from the provided credentials.
|
||||
2. Find the password input field and enter the password from the provided credentials. \
|
||||
Some websites use a multi-step login flow where you enter the email first, click a "Continue" or "Next" button, \
|
||||
and then the password field appears on the next step. Handle this by entering the email, clicking continue, \
|
||||
then entering the password once the field is revealed.
|
||||
3. Click the login/sign-in button to submit the credentials.
|
||||
4. If a 2-factor authentication step appears, enter the authentication code.
|
||||
|
||||
Make sure you enter the username and password separately — do not paste both into the same field.
|
||||
Use your action history to determine if you already attempted to log in. \
|
||||
If you have not clicked the login button since filling in the credentials, try submitting before assuming failure.
|
||||
|
||||
If you fail to log in or can't find the login page after several trials, terminate.
|
||||
If the credentials are invalid, expired, or explicitly rejected by the website (e.g., "Invalid credentials", \
|
||||
"Wrong password"), terminate immediately and take no further actions.
|
||||
If login is completed, you're successful."""
|
||||
|
||||
|
||||
def _validate_url(url: str | None) -> str | None:
|
||||
|
|
|
|||
|
|
@ -33,7 +33,6 @@ LOG = structlog.get_logger()
|
|||
summary="Run an SDK action",
|
||||
description="Execute a single SDK action with the specified parameters",
|
||||
tags=["SDK"],
|
||||
include_in_schema=False,
|
||||
openapi_extra={
|
||||
"x-fern-sdk-method-name": "run_sdk_action",
|
||||
},
|
||||
|
|
|
|||
|
|
@ -11,6 +11,7 @@ from fastapi import Depends, HTTPException, Request, status
|
|||
from pydantic import ValidationError
|
||||
from sse_starlette import EventSourceResponse
|
||||
|
||||
from skyvern.constants import DEFAULT_LOGIN_PROMPT
|
||||
from skyvern.forge import app
|
||||
from skyvern.forge.prompts import prompt_engine
|
||||
from skyvern.forge.sdk.api.llm.api_handler import LLMAPIHandler
|
||||
|
|
@ -19,7 +20,6 @@ from skyvern.forge.sdk.artifact.models import Artifact, ArtifactType
|
|||
from skyvern.forge.sdk.experimentation.llm_prompt_config import get_llm_handler_for_prompt_type
|
||||
from skyvern.forge.sdk.routes.event_source_stream import EventSourceStream, FastAPIEventSourceStream
|
||||
from skyvern.forge.sdk.routes.routers import base_router
|
||||
from skyvern.forge.sdk.routes.run_blocks import DEFAULT_LOGIN_PROMPT
|
||||
from skyvern.forge.sdk.schemas.organizations import Organization
|
||||
from skyvern.forge.sdk.schemas.workflow_copilot import (
|
||||
WorkflowCopilotChatHistoryMessage,
|
||||
|
|
|
|||
|
|
@ -4285,6 +4285,9 @@ class HumanInteractionBlock(BaseTaskBlock):
|
|||
|
||||
app_url = f"{settings.SKYVERN_APP_URL}/runs/{workflow_run_id}/overview"
|
||||
body = f"{self.body}\n\nKindly visit {app_url}\n\n{self.instructions}\n\n"
|
||||
if browser_session_id:
|
||||
browser_session_url = f"{settings.SKYVERN_APP_URL}/browser-session/{browser_session_id}"
|
||||
body += f"To interact with the browser session directly, visit {browser_session_url}\n\n"
|
||||
subject = f"{self.subject} - Workflow Run ID: {workflow_run_id}"
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -709,6 +709,7 @@ class WorkflowService:
|
|||
all_workflow_parameters = await self.get_workflow_parameters(workflow_id=workflow.workflow_id)
|
||||
try:
|
||||
missing_parameters: list[str] = []
|
||||
workflow_parameter_values: list[tuple[WorkflowParameter, Any]] = []
|
||||
for workflow_parameter in all_workflow_parameters:
|
||||
if workflow_request.data and workflow_parameter.key in workflow_request.data:
|
||||
request_body_value = workflow_request.data[workflow_parameter.key]
|
||||
|
|
@ -723,19 +724,7 @@ class WorkflowService:
|
|||
if not isinstance(request_body_value, str):
|
||||
raise InvalidCredentialId(f"<non-string value of type {type(request_body_value).__name__}>")
|
||||
await self._validate_credential_id(request_body_value, organization)
|
||||
try:
|
||||
await self.create_workflow_run_parameter(
|
||||
workflow_run_id=workflow_run.workflow_run_id,
|
||||
workflow_parameter=workflow_parameter,
|
||||
value=request_body_value,
|
||||
)
|
||||
except SQLAlchemyError as parameter_error:
|
||||
raise WorkflowRunParameterPersistenceError(
|
||||
parameter_key=workflow_parameter.key,
|
||||
workflow_id=workflow.workflow_permanent_id,
|
||||
workflow_run_id=workflow_run.workflow_run_id,
|
||||
reason=self._format_parameter_persistence_error(parameter_error),
|
||||
) from parameter_error
|
||||
workflow_parameter_values.append((workflow_parameter, request_body_value))
|
||||
elif workflow_parameter.default_value is not None:
|
||||
if workflow_parameter.workflow_parameter_type == WorkflowParameterType.CREDENTIAL_ID:
|
||||
if not isinstance(workflow_parameter.default_value, str):
|
||||
|
|
@ -743,19 +732,7 @@ class WorkflowService:
|
|||
f"<non-string value of type {type(workflow_parameter.default_value).__name__}>"
|
||||
)
|
||||
await self._validate_credential_id(workflow_parameter.default_value, organization)
|
||||
try:
|
||||
await self.create_workflow_run_parameter(
|
||||
workflow_run_id=workflow_run.workflow_run_id,
|
||||
workflow_parameter=workflow_parameter,
|
||||
value=workflow_parameter.default_value,
|
||||
)
|
||||
except SQLAlchemyError as parameter_error:
|
||||
raise WorkflowRunParameterPersistenceError(
|
||||
parameter_key=workflow_parameter.key,
|
||||
workflow_id=workflow.workflow_permanent_id,
|
||||
workflow_run_id=workflow_run.workflow_run_id,
|
||||
reason=self._format_parameter_persistence_error(parameter_error),
|
||||
) from parameter_error
|
||||
workflow_parameter_values.append((workflow_parameter, workflow_parameter.default_value))
|
||||
else:
|
||||
missing_parameters.append(workflow_parameter.key)
|
||||
|
||||
|
|
@ -766,6 +743,35 @@ class WorkflowService:
|
|||
workflow_id=workflow.workflow_permanent_id,
|
||||
workflow_run_id=workflow_run.workflow_run_id,
|
||||
)
|
||||
|
||||
if workflow_parameter_values:
|
||||
try:
|
||||
await self.create_workflow_run_parameters(
|
||||
workflow_run_id=workflow_run.workflow_run_id,
|
||||
workflow_parameter_values=workflow_parameter_values,
|
||||
)
|
||||
except SQLAlchemyError as batch_error:
|
||||
# Batch failed — retry one-by-one to identify the exact failing parameter
|
||||
for workflow_parameter, value in workflow_parameter_values:
|
||||
try:
|
||||
await self.create_workflow_run_parameter(
|
||||
workflow_run_id=workflow_run.workflow_run_id,
|
||||
workflow_parameter=workflow_parameter,
|
||||
value=value,
|
||||
)
|
||||
except SQLAlchemyError as parameter_error:
|
||||
raise WorkflowRunParameterPersistenceError(
|
||||
parameter_key=workflow_parameter.key,
|
||||
workflow_id=workflow.workflow_permanent_id,
|
||||
workflow_run_id=workflow_run.workflow_run_id,
|
||||
reason=self._format_parameter_persistence_error(parameter_error),
|
||||
) from parameter_error
|
||||
# All individual inserts succeeded — the batch failure was transient
|
||||
LOG.warning(
|
||||
"Batch parameter insert failed but individual inserts succeeded",
|
||||
workflow_run_id=workflow_run.workflow_run_id,
|
||||
batch_error=str(batch_error),
|
||||
)
|
||||
except Exception as e:
|
||||
LOG.exception(
|
||||
f"Error while setting up workflow run {workflow_run.workflow_run_id}",
|
||||
|
|
@ -1404,7 +1410,6 @@ class WorkflowService:
|
|||
run_level_run_with=workflow_run.run_with,
|
||||
workflow_level_run_with=workflow.run_with,
|
||||
code_version=workflow.code_version,
|
||||
adaptive_caching=workflow.adaptive_caching,
|
||||
ai_fallback=workflow_run.ai_fallback,
|
||||
should_run_script=is_script_run,
|
||||
has_script=script is not None,
|
||||
|
|
@ -3920,9 +3925,7 @@ class WorkflowService:
|
|||
workflow_parameter: WorkflowParameter,
|
||||
value: Any,
|
||||
) -> WorkflowRunParameter:
|
||||
value = json.dumps(value) if isinstance(value, (dict, list)) else value
|
||||
# InvalidWorkflowParameter will be raised if the validation fails
|
||||
workflow_parameter.workflow_parameter_type.convert_value(value)
|
||||
value = self._serialize_workflow_run_parameter_value(workflow_parameter, value)
|
||||
|
||||
return await app.DATABASE.create_workflow_run_parameter(
|
||||
workflow_run_id=workflow_run_id,
|
||||
|
|
@ -3930,6 +3933,28 @@ class WorkflowService:
|
|||
value=value,
|
||||
)
|
||||
|
||||
async def create_workflow_run_parameters(
|
||||
self,
|
||||
workflow_run_id: str,
|
||||
workflow_parameter_values: list[tuple[WorkflowParameter, Any]],
|
||||
) -> list[WorkflowRunParameter]:
|
||||
serialized_workflow_parameter_values = [
|
||||
(workflow_parameter, self._serialize_workflow_run_parameter_value(workflow_parameter, value))
|
||||
for workflow_parameter, value in workflow_parameter_values
|
||||
]
|
||||
|
||||
return await app.DATABASE.create_workflow_run_parameters(
|
||||
workflow_run_id=workflow_run_id,
|
||||
workflow_parameter_values=serialized_workflow_parameter_values,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _serialize_workflow_run_parameter_value(workflow_parameter: WorkflowParameter, value: Any) -> Any:
|
||||
value = json.dumps(value) if isinstance(value, (dict, list)) else value
|
||||
# InvalidWorkflowParameter will be raised if the validation fails
|
||||
workflow_parameter.workflow_parameter_type.convert_value(value)
|
||||
return value
|
||||
|
||||
async def get_workflow_run_parameter_tuples(
|
||||
self, workflow_run_id: str
|
||||
) -> list[tuple[WorkflowParameter, WorkflowRunParameter]]:
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ from typing import Any, cast
|
|||
import structlog
|
||||
|
||||
from skyvern.config import settings
|
||||
from skyvern.constants import DEFAULT_LOGIN_COMPLETE_CRITERION
|
||||
from skyvern.constants import DEFAULT_LOGIN_COMPLETE_CRITERION, DEFAULT_LOGIN_PROMPT
|
||||
from skyvern.forge.sdk.db.enums import TaskType
|
||||
from skyvern.forge.sdk.db.id import (
|
||||
generate_aws_secret_parameter_id,
|
||||
|
|
@ -638,6 +638,9 @@ def block_yaml_to_block(
|
|||
# This guides the LLM to check for actual logged-in indicators (username in header,
|
||||
# account menu, logout button) rather than relying on page location, which fails on sites
|
||||
# that redirect to the homepage after successful login.
|
||||
login_navigation_goal = block_yaml.navigation_goal
|
||||
if not login_navigation_goal or not login_navigation_goal.strip():
|
||||
login_navigation_goal = DEFAULT_LOGIN_PROMPT
|
||||
login_complete_criterion = block_yaml.complete_criterion
|
||||
if not login_complete_criterion or not login_complete_criterion.strip():
|
||||
login_complete_criterion = DEFAULT_LOGIN_COMPLETE_CRITERION
|
||||
|
|
@ -647,7 +650,7 @@ def block_yaml_to_block(
|
|||
title=block_yaml.title,
|
||||
engine=block_yaml.engine,
|
||||
parameters=login_block_parameters,
|
||||
navigation_goal=block_yaml.navigation_goal,
|
||||
navigation_goal=login_navigation_goal,
|
||||
error_code_mapping=block_yaml.error_code_mapping,
|
||||
max_steps_per_run=block_yaml.max_steps_per_run,
|
||||
max_retries=block_yaml.max_retries,
|
||||
|
|
|
|||
|
|
@ -420,6 +420,7 @@ class ActionHandler:
|
|||
)
|
||||
)
|
||||
initial_page_count = 0
|
||||
page_url_before_download = page.url
|
||||
# get the initial page count
|
||||
if browser_state:
|
||||
initial_page_count = len(await browser_state.list_valid_pages())
|
||||
|
|
@ -525,6 +526,25 @@ class ActionHandler:
|
|||
# close the extra page
|
||||
await pages_after_download[-1].close()
|
||||
|
||||
# After a print/download action the working page sometimes navigates to
|
||||
# about:blank (e.g. when the browser follows a download URL that yields no
|
||||
# renderable content). Detect this and navigate back to the original URL so
|
||||
# subsequent steps are not stuck on a blank page.
|
||||
blank_page_urls = {"about:blank", ":"}
|
||||
if page.url in blank_page_urls and page_url_before_download not in blank_page_urls:
|
||||
LOG.warning(
|
||||
"Working page navigated to blank after download action, navigating back to original URL",
|
||||
original_url=page_url_before_download,
|
||||
)
|
||||
try:
|
||||
await browser_state.navigate_to_url(page=page, url=page_url_before_download)
|
||||
except Exception:
|
||||
LOG.warning(
|
||||
"Failed to navigate back to original URL after blank page from download",
|
||||
original_url=page_url_before_download,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
persisted_action = await app.DATABASE.create_action(action=action)
|
||||
action.action_id = persisted_action.action_id
|
||||
|
||||
|
|
@ -2607,22 +2627,27 @@ async def chain_click(
|
|||
dom=DomUtil(scraped_page=scraped_page, page=page)
|
||||
)
|
||||
if blocking_element is None:
|
||||
if not blocked:
|
||||
if blocked:
|
||||
LOG.info(
|
||||
"Chain click: exit since the element is not blocking by any element",
|
||||
"Chain click: element is blocked by a non-interactable element, try to click by the coordinates",
|
||||
action=action,
|
||||
element=str(skyvern_element),
|
||||
locator=locator,
|
||||
)
|
||||
else:
|
||||
# Element is visible and elementFromPoint returns the target itself,
|
||||
# but Playwright's click still failed (e.g. element transiently
|
||||
# unstable due to React re-render or CSS animation). Fall through
|
||||
# to coordinate click which bypasses Playwright's actionability
|
||||
# checks while still dispatching a real mouse event.
|
||||
LOG.info(
|
||||
"Chain click: element is visible and not blocked, but Playwright click failed — trying coordinate click",
|
||||
action=action,
|
||||
element=str(skyvern_element),
|
||||
locator=locator,
|
||||
)
|
||||
return action_results
|
||||
|
||||
try:
|
||||
LOG.info(
|
||||
"Chain click: element is blocked by an non-interactable element, try to click by the coordinates",
|
||||
action=action,
|
||||
element=str(skyvern_element),
|
||||
locator=locator,
|
||||
)
|
||||
await skyvern_element.coordinate_click(page=page)
|
||||
action_results.append(ActionSuccess())
|
||||
return action_results
|
||||
|
|
@ -2632,7 +2657,7 @@ async def chain_click(
|
|||
)
|
||||
|
||||
LOG.info(
|
||||
"Chain click: element is blocked by an non-interactable element, going to use javascript click instead of playwright click",
|
||||
"Chain click: coordinate click failed, going to use javascript click instead of playwright click",
|
||||
action=action,
|
||||
element=str(skyvern_element),
|
||||
locator=locator,
|
||||
|
|
@ -2860,7 +2885,17 @@ async def choose_auto_completion_dropdown(
|
|||
if await locator.count() == 0:
|
||||
raise MissingElement(element_id=element_id)
|
||||
|
||||
await locator.click(timeout=settings.BROWSER_ACTION_TIMEOUT_MS)
|
||||
# Use SkyvernElement.click() so we get the full fallback chain
|
||||
# (Playwright click → coordinate click → JavaScript click). Plain
|
||||
# locator.click() can fail when the item or one of its ancestors has
|
||||
# pointer-events:none, which is common in React/Vue dropdown lists.
|
||||
selected_element = SkyvernElement(
|
||||
locator=locator,
|
||||
frame=current_frame,
|
||||
static_element=incremental_scraped.id_to_element_dict.get(element_id, {}),
|
||||
)
|
||||
await selected_element.scroll_into_view()
|
||||
await selected_element.click(page=page)
|
||||
clear_input = False
|
||||
return result
|
||||
|
||||
|
|
|
|||
|
|
@ -159,13 +159,21 @@ class SkyvernElement:
|
|||
return True
|
||||
|
||||
autocomplete: str | None = await self.get_attr("aria-autocomplete")
|
||||
if autocomplete and autocomplete.lower() == "list":
|
||||
if autocomplete and autocomplete.lower() in ("list", "both", "inline"):
|
||||
return True
|
||||
|
||||
class_name: str | None = await self.get_attr("class")
|
||||
if class_name and "autocomplete-input" in class_name.lower():
|
||||
return True
|
||||
|
||||
# Combobox inputs (role="combobox") present a list of options — treat as
|
||||
# auto-completion so the agent uses the dropdown-selection flow instead of
|
||||
# the Tab hack. This covers account-search fields in many apps that use
|
||||
# role="combobox" without setting aria-autocomplete.
|
||||
role: str | None = await self.get_attr("role")
|
||||
if role and role.lower() == "combobox":
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def is_custom_option(self) -> bool:
|
||||
|
|
|
|||
156
tests/unit/forge/sdk/db/test_workflow_runs_repository.py
Normal file
156
tests/unit/forge/sdk/db/test_workflow_runs_repository.py
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
"""Tests for WorkflowRunsRepository.create_workflow_run_parameters batch method."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from skyvern.forge.sdk.db.repositories.workflow_runs import WorkflowRunsRepository
|
||||
from skyvern.forge.sdk.workflow.models.parameter import WorkflowParameter, WorkflowParameterType
|
||||
|
||||
|
||||
def _make_workflow_parameter(
|
||||
key: str,
|
||||
*,
|
||||
workflow_parameter_type: WorkflowParameterType = WorkflowParameterType.STRING,
|
||||
default_value: str | int | float | bool | dict | list | None = None,
|
||||
) -> WorkflowParameter:
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
return WorkflowParameter(
|
||||
workflow_parameter_id=f"wp_{key}",
|
||||
workflow_id="wf_test",
|
||||
key=key,
|
||||
workflow_parameter_type=workflow_parameter_type,
|
||||
default_value=default_value,
|
||||
created_at=now,
|
||||
modified_at=now,
|
||||
)
|
||||
|
||||
|
||||
class _SessionContext:
|
||||
def __init__(self, session: MagicMock) -> None:
|
||||
self._session = session
|
||||
|
||||
async def __aenter__(self) -> MagicMock:
|
||||
return self._session
|
||||
|
||||
async def __aexit__(self, exc_type, exc, tb) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_batch_create_uses_add_all_flush_commit_not_refresh() -> None:
|
||||
"""Batch insert should use add_all + flush + commit and never call refresh."""
|
||||
tracked_models: list = []
|
||||
session = MagicMock()
|
||||
session.add_all = MagicMock(side_effect=lambda models: tracked_models.extend(models))
|
||||
|
||||
async def _flush() -> None:
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
for model in tracked_models:
|
||||
model.created_at = now
|
||||
|
||||
session.flush = AsyncMock(side_effect=_flush)
|
||||
session.commit = AsyncMock()
|
||||
session.refresh = AsyncMock()
|
||||
|
||||
repo = WorkflowRunsRepository(session_factory=lambda: _SessionContext(session), debug_enabled=False)
|
||||
|
||||
string_param = _make_workflow_parameter("url")
|
||||
int_param = _make_workflow_parameter("count", workflow_parameter_type=WorkflowParameterType.INTEGER)
|
||||
|
||||
created = await repo.create_workflow_run_parameters(
|
||||
workflow_run_id="wr_test",
|
||||
workflow_parameter_values=[
|
||||
(string_param, "https://example.com"),
|
||||
(int_param, "7"),
|
||||
],
|
||||
)
|
||||
|
||||
session.add_all.assert_called_once()
|
||||
session.flush.assert_awaited_once()
|
||||
session.commit.assert_awaited_once()
|
||||
session.refresh.assert_not_awaited()
|
||||
|
||||
assert [p.workflow_parameter_id for p in created] == [
|
||||
string_param.workflow_parameter_id,
|
||||
int_param.workflow_parameter_id,
|
||||
]
|
||||
assert [p.value for p in created] == ["https://example.com", 7]
|
||||
assert all(p.created_at is not None for p in created)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_batch_create_with_empty_list_returns_empty() -> None:
|
||||
"""create_workflow_run_parameters with an empty list should short-circuit and return []."""
|
||||
session = MagicMock()
|
||||
repo = WorkflowRunsRepository(session_factory=lambda: _SessionContext(session), debug_enabled=False)
|
||||
|
||||
result = await repo.create_workflow_run_parameters(
|
||||
workflow_run_id="wr_test",
|
||||
workflow_parameter_values=[],
|
||||
)
|
||||
|
||||
assert result == []
|
||||
session.add_all.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_batch_create_propagates_sqlalchemy_error_from_flush() -> None:
|
||||
"""When flush() raises an IntegrityError, it should propagate without being swallowed."""
|
||||
db_error = IntegrityError("INSERT", {}, Exception("UNIQUE constraint failed"))
|
||||
session = MagicMock()
|
||||
session.add_all = MagicMock()
|
||||
session.flush = AsyncMock(side_effect=db_error)
|
||||
session.commit = AsyncMock()
|
||||
|
||||
repo = WorkflowRunsRepository(session_factory=lambda: _SessionContext(session), debug_enabled=False)
|
||||
|
||||
param = _make_workflow_parameter("url")
|
||||
|
||||
with pytest.raises(IntegrityError) as exc_info:
|
||||
await repo.create_workflow_run_parameters(
|
||||
workflow_run_id="wr_test",
|
||||
workflow_parameter_values=[(param, "https://example.com")],
|
||||
)
|
||||
|
||||
assert exc_info.value is db_error
|
||||
session.add_all.assert_called_once()
|
||||
session.flush.assert_awaited_once()
|
||||
# commit should NOT be called when flush fails
|
||||
session.commit.assert_not_awaited()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_batch_create_propagates_sqlalchemy_error_from_commit() -> None:
|
||||
"""When commit() raises an IntegrityError, it should propagate without being swallowed."""
|
||||
db_error = IntegrityError("INSERT", {}, Exception("FK constraint failed"))
|
||||
tracked_models: list = []
|
||||
|
||||
session = MagicMock()
|
||||
session.add_all = MagicMock(side_effect=lambda models: tracked_models.extend(models))
|
||||
|
||||
async def _flush() -> None:
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
for model in tracked_models:
|
||||
model.created_at = now
|
||||
|
||||
session.flush = AsyncMock(side_effect=_flush)
|
||||
session.commit = AsyncMock(side_effect=db_error)
|
||||
|
||||
repo = WorkflowRunsRepository(session_factory=lambda: _SessionContext(session), debug_enabled=False)
|
||||
|
||||
param = _make_workflow_parameter("url")
|
||||
|
||||
with pytest.raises(IntegrityError) as exc_info:
|
||||
await repo.create_workflow_run_parameters(
|
||||
workflow_run_id="wr_test",
|
||||
workflow_parameter_values=[(param, "https://example.com")],
|
||||
)
|
||||
|
||||
assert exc_info.value is db_error
|
||||
session.flush.assert_awaited_once()
|
||||
session.commit.assert_awaited_once()
|
||||
|
|
@ -6,8 +6,8 @@ from unittest.mock import AsyncMock, MagicMock, patch
|
|||
import pytest
|
||||
|
||||
from skyvern.forge.sdk.models import StepStatus
|
||||
from skyvern.webeye.actions.actions import DownloadFileAction
|
||||
from skyvern.webeye.actions.handler import handle_download_file_action
|
||||
from skyvern.webeye.actions.actions import ClickAction, DownloadFileAction
|
||||
from skyvern.webeye.actions.handler import ActionHandler, handle_download_file_action
|
||||
from skyvern.webeye.actions.responses import ActionFailure, ActionSuccess
|
||||
from skyvern.webeye.scraper.scraped_page import ScrapedPage
|
||||
from tests.unit.helpers import make_organization, make_step, make_task
|
||||
|
|
@ -389,3 +389,155 @@ async def test_handle_download_file_action_download_url_err_aborted_swallowed()
|
|||
|
||||
assert len(result) == 1
|
||||
assert isinstance(result[0], ActionSuccess)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_handle_action_navigates_back_from_blank_page_after_download() -> None:
|
||||
"""After a print/download click the working page sometimes navigates to about:blank.
|
||||
handle_action should detect this and navigate back to the original URL so the
|
||||
next step is not stuck on a blank page."""
|
||||
now = datetime.now(UTC)
|
||||
organization = make_organization(now)
|
||||
task = make_task(now, organization)
|
||||
step = make_step(now, task, step_id="step-1", status=StepStatus.created, order=0, output=None)
|
||||
|
||||
original_url = "https://example.com/document/123"
|
||||
|
||||
# Page starts at a real URL; the mocked action will navigate it to about:blank
|
||||
page = MagicMock()
|
||||
page.url = original_url
|
||||
|
||||
browser_state = MagicMock()
|
||||
# Same page count before and after (no extra tab opened by the print action)
|
||||
browser_state.list_valid_pages = AsyncMock(return_value=[page])
|
||||
browser_state.navigate_to_url = AsyncMock()
|
||||
|
||||
scraped_page = ScrapedPage(
|
||||
elements=[],
|
||||
element_tree=[],
|
||||
element_tree_trimmed=[],
|
||||
_browser_state=browser_state,
|
||||
_clean_up_func=AsyncMock(return_value=[]),
|
||||
_scrape_exclude=None,
|
||||
)
|
||||
|
||||
action = ClickAction(
|
||||
element_id="btn-print",
|
||||
download=True,
|
||||
organization_id=task.organization_id,
|
||||
task_id=task.task_id,
|
||||
step_id=step.step_id,
|
||||
)
|
||||
|
||||
# _handle_action simulates the page navigating to about:blank during the print download
|
||||
async def mock_inner_handle_action(*args, **kwargs) -> list[ActionSuccess]:
|
||||
page.url = "about:blank"
|
||||
return [ActionSuccess()]
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
dummy_file = os.path.join(temp_dir, "doc.pdf")
|
||||
with open(dummy_file, "w") as f:
|
||||
f.write("dummy")
|
||||
|
||||
# list_files_in_directory: empty before action, one file after action
|
||||
list_files_side_effect = [[], [dummy_file]]
|
||||
|
||||
mock_app = MagicMock()
|
||||
mock_app.BROWSER_MANAGER.get_for_task.return_value = browser_state
|
||||
mock_app.DATABASE.create_action = AsyncMock(return_value=action)
|
||||
mock_app.STORAGE = MagicMock()
|
||||
|
||||
with (
|
||||
patch.object(ActionHandler, "_handle_action", side_effect=mock_inner_handle_action),
|
||||
patch("skyvern.webeye.actions.handler.list_files_in_directory", side_effect=list_files_side_effect),
|
||||
patch("skyvern.webeye.actions.handler.get_download_dir", return_value=temp_dir),
|
||||
patch("skyvern.webeye.actions.handler.skyvern_context.current", return_value=None),
|
||||
patch(
|
||||
"skyvern.webeye.actions.handler.check_downloading_files_and_wait_for_download_to_complete",
|
||||
new=AsyncMock(),
|
||||
),
|
||||
patch("skyvern.webeye.actions.handler.app", mock_app),
|
||||
):
|
||||
await ActionHandler.handle_action(
|
||||
scraped_page=scraped_page,
|
||||
task=task,
|
||||
step=step,
|
||||
page=page,
|
||||
action=action,
|
||||
)
|
||||
|
||||
# The blank-page recovery should have navigated back to the original URL
|
||||
browser_state.navigate_to_url.assert_called_once_with(page=page, url=original_url)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_handle_action_does_not_navigate_back_when_page_url_unchanged() -> None:
|
||||
"""When the page URL does not change to blank after a download, navigate_to_url should NOT be called."""
|
||||
now = datetime.now(UTC)
|
||||
organization = make_organization(now)
|
||||
task = make_task(now, organization)
|
||||
step = make_step(now, task, step_id="step-1", status=StepStatus.created, order=0, output=None)
|
||||
|
||||
original_url = "https://example.com/document/123"
|
||||
|
||||
page = MagicMock()
|
||||
page.url = original_url # URL stays the same after download
|
||||
|
||||
browser_state = MagicMock()
|
||||
browser_state.list_valid_pages = AsyncMock(return_value=[page])
|
||||
browser_state.navigate_to_url = AsyncMock()
|
||||
|
||||
scraped_page = ScrapedPage(
|
||||
elements=[],
|
||||
element_tree=[],
|
||||
element_tree_trimmed=[],
|
||||
_browser_state=browser_state,
|
||||
_clean_up_func=AsyncMock(return_value=[]),
|
||||
_scrape_exclude=None,
|
||||
)
|
||||
|
||||
action = ClickAction(
|
||||
element_id="btn-print",
|
||||
download=True,
|
||||
organization_id=task.organization_id,
|
||||
task_id=task.task_id,
|
||||
step_id=step.step_id,
|
||||
)
|
||||
|
||||
# _handle_action does NOT change the page URL (normal case)
|
||||
async def mock_inner_handle_action(*args, **kwargs) -> list[ActionSuccess]:
|
||||
return [ActionSuccess()]
|
||||
|
||||
with tempfile.TemporaryDirectory() as temp_dir:
|
||||
dummy_file = os.path.join(temp_dir, "doc.pdf")
|
||||
with open(dummy_file, "w") as f:
|
||||
f.write("dummy")
|
||||
|
||||
list_files_side_effect = [[], [dummy_file]]
|
||||
|
||||
mock_app = MagicMock()
|
||||
mock_app.BROWSER_MANAGER.get_for_task.return_value = browser_state
|
||||
mock_app.DATABASE.create_action = AsyncMock(return_value=action)
|
||||
mock_app.STORAGE = MagicMock()
|
||||
|
||||
with (
|
||||
patch.object(ActionHandler, "_handle_action", side_effect=mock_inner_handle_action),
|
||||
patch("skyvern.webeye.actions.handler.list_files_in_directory", side_effect=list_files_side_effect),
|
||||
patch("skyvern.webeye.actions.handler.get_download_dir", return_value=temp_dir),
|
||||
patch("skyvern.webeye.actions.handler.skyvern_context.current", return_value=None),
|
||||
patch(
|
||||
"skyvern.webeye.actions.handler.check_downloading_files_and_wait_for_download_to_complete",
|
||||
new=AsyncMock(),
|
||||
),
|
||||
patch("skyvern.webeye.actions.handler.app", mock_app),
|
||||
):
|
||||
await ActionHandler.handle_action(
|
||||
scraped_page=scraped_page,
|
||||
task=task,
|
||||
step=step,
|
||||
page=page,
|
||||
action=action,
|
||||
)
|
||||
|
||||
# Page URL is unchanged; no navigation back should occur
|
||||
browser_state.navigate_to_url.assert_not_called()
|
||||
|
|
|
|||
78
tests/unit/test_login_block_default_navigation_goal.py
Normal file
78
tests/unit/test_login_block_default_navigation_goal.py
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
"""Tests for LoginBlock default navigation_goal in workflow_definition_converter.
|
||||
|
||||
Regression test for SKY-8637: MCP-built workflows omit prompt for login block.
|
||||
When a login block has no navigation_goal, the converter must apply
|
||||
DEFAULT_LOGIN_PROMPT so the agent knows to fill credentials.
|
||||
"""
|
||||
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from skyvern.constants import DEFAULT_LOGIN_PROMPT
|
||||
from skyvern.forge.sdk.workflow.models.block import LoginBlock
|
||||
from skyvern.forge.sdk.workflow.models.parameter import OutputParameter
|
||||
from skyvern.forge.sdk.workflow.workflow_definition_converter import block_yaml_to_block
|
||||
from skyvern.schemas.workflows import LoginBlockYAML
|
||||
|
||||
_NOW = datetime.now(UTC)
|
||||
|
||||
|
||||
def _make_output_parameter(label: str) -> OutputParameter:
|
||||
return OutputParameter(
|
||||
parameter_type="output",
|
||||
key=f"{label}_output",
|
||||
workflow_id="test_wf",
|
||||
output_parameter_id=f"op_{label}",
|
||||
created_at=_NOW,
|
||||
modified_at=_NOW,
|
||||
)
|
||||
|
||||
|
||||
def _convert_login_block(block_yaml: LoginBlockYAML) -> LoginBlock:
|
||||
output_param = _make_output_parameter(block_yaml.label)
|
||||
parameters = {output_param.key: output_param}
|
||||
block = block_yaml_to_block(block_yaml, parameters)
|
||||
assert isinstance(block, LoginBlock)
|
||||
return block
|
||||
|
||||
|
||||
class TestLoginBlockDefaultNavigationGoal:
|
||||
"""LoginBlocks without navigation_goal must get DEFAULT_LOGIN_PROMPT."""
|
||||
|
||||
def test_no_navigation_goal_gets_default(self) -> None:
|
||||
block_yaml = LoginBlockYAML(label="login")
|
||||
assert block_yaml.navigation_goal is None
|
||||
|
||||
block = _convert_login_block(block_yaml)
|
||||
|
||||
assert block.navigation_goal == DEFAULT_LOGIN_PROMPT
|
||||
|
||||
def test_empty_string_navigation_goal_gets_default(self) -> None:
|
||||
block_yaml = LoginBlockYAML(label="login", navigation_goal="")
|
||||
|
||||
block = _convert_login_block(block_yaml)
|
||||
|
||||
assert block.navigation_goal == DEFAULT_LOGIN_PROMPT
|
||||
|
||||
def test_whitespace_only_navigation_goal_gets_default(self) -> None:
|
||||
block_yaml = LoginBlockYAML(label="login", navigation_goal=" ")
|
||||
|
||||
block = _convert_login_block(block_yaml)
|
||||
|
||||
assert block.navigation_goal == DEFAULT_LOGIN_PROMPT
|
||||
|
||||
def test_explicit_navigation_goal_preserved(self) -> None:
|
||||
custom_goal = "Navigate to the admin panel and log in"
|
||||
block_yaml = LoginBlockYAML(label="login", navigation_goal=custom_goal)
|
||||
|
||||
block = _convert_login_block(block_yaml)
|
||||
|
||||
assert block.navigation_goal == custom_goal
|
||||
|
||||
def test_default_prompt_mentions_credentials(self) -> None:
|
||||
block_yaml = LoginBlockYAML(label="login")
|
||||
|
||||
block = _convert_login_block(block_yaml)
|
||||
|
||||
assert "credentials" in block.navigation_goal.lower()
|
||||
assert "password" in block.navigation_goal.lower()
|
||||
assert "login" in block.navigation_goal.lower()
|
||||
172
tests/unit/test_workflow_run_parameter_batching.py
Normal file
172
tests/unit/test_workflow_run_parameter_batching.py
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
"""Tests for WorkflowService.setup_workflow_run batch parameter persistence.
|
||||
|
||||
Verifies that setup_workflow_run collects all parameter values first and
|
||||
persists them in a single batch insert, and that validation failures
|
||||
(missing params, invalid credentials, DB errors) are handled correctly.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timezone
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from skyvern.exceptions import InvalidCredentialId, MissingValueForParameter, WorkflowRunParameterPersistenceError
|
||||
from skyvern.forge.sdk.workflow.models.parameter import WorkflowParameter, WorkflowParameterType
|
||||
from skyvern.forge.sdk.workflow.models.workflow import WorkflowRequestBody
|
||||
from skyvern.forge.sdk.workflow.service import WorkflowService
|
||||
|
||||
|
||||
def _make_workflow_parameter(
|
||||
key: str,
|
||||
*,
|
||||
workflow_parameter_type: WorkflowParameterType = WorkflowParameterType.STRING,
|
||||
default_value: str | int | float | bool | dict | list | None = None,
|
||||
) -> WorkflowParameter:
|
||||
now = datetime.now(tz=timezone.utc)
|
||||
return WorkflowParameter(
|
||||
workflow_parameter_id=f"wp_{key}",
|
||||
workflow_id="wf_test",
|
||||
key=key,
|
||||
workflow_parameter_type=workflow_parameter_type,
|
||||
default_value=default_value,
|
||||
created_at=now,
|
||||
modified_at=now,
|
||||
)
|
||||
|
||||
|
||||
def _make_service_with_mocks(
|
||||
*,
|
||||
workflow_parameters: list[WorkflowParameter],
|
||||
batch_side_effect: Exception | None = None,
|
||||
single_side_effect: Exception | None = None,
|
||||
) -> tuple[WorkflowService, SimpleNamespace, SimpleNamespace]:
|
||||
"""Helper to build a WorkflowService with mocked internals for setup_workflow_run tests."""
|
||||
service = WorkflowService()
|
||||
workflow = SimpleNamespace(
|
||||
workflow_id="wf_test",
|
||||
workflow_permanent_id="wpid_test",
|
||||
organization_id="org_test",
|
||||
proxy_location=None,
|
||||
webhook_callback_url=None,
|
||||
extra_http_headers=None,
|
||||
run_with="agent",
|
||||
code_version=None,
|
||||
adaptive_caching=False,
|
||||
sequential_key=None,
|
||||
)
|
||||
workflow_run = SimpleNamespace(workflow_run_id="wr_test", workflow_permanent_id="wpid_test")
|
||||
|
||||
service.get_workflow_by_permanent_id = AsyncMock(return_value=workflow) # type: ignore[method-assign]
|
||||
service.create_workflow_run = AsyncMock(return_value=workflow_run) # type: ignore[method-assign]
|
||||
service.get_workflow_parameters = AsyncMock(return_value=workflow_parameters) # type: ignore[method-assign]
|
||||
if batch_side_effect:
|
||||
service.create_workflow_run_parameters = AsyncMock(side_effect=batch_side_effect) # type: ignore[method-assign]
|
||||
else:
|
||||
service.create_workflow_run_parameters = AsyncMock(return_value=[]) # type: ignore[method-assign]
|
||||
if single_side_effect:
|
||||
service.create_workflow_run_parameter = AsyncMock(side_effect=single_side_effect) # type: ignore[method-assign]
|
||||
else:
|
||||
service.create_workflow_run_parameter = AsyncMock() # type: ignore[method-assign]
|
||||
service.mark_workflow_run_as_failed = AsyncMock(return_value=workflow_run) # type: ignore[method-assign]
|
||||
|
||||
organization = SimpleNamespace(organization_id="org_test", organization_name="Test Org")
|
||||
return service, organization, workflow_run
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_setup_workflow_run_raises_on_missing_required_parameters() -> None:
|
||||
"""When required parameters have no value and no default, setup should raise MissingValueForParameter."""
|
||||
required_param = _make_workflow_parameter("api_key") # no default_value
|
||||
service, organization, _ = _make_service_with_mocks(workflow_parameters=[required_param])
|
||||
|
||||
request = WorkflowRequestBody(data={}) # no data for api_key
|
||||
|
||||
with patch("skyvern.forge.sdk.workflow.service.app") as mock_app:
|
||||
mock_app.EXPERIMENTATION_PROVIDER.is_feature_enabled_cached = AsyncMock(return_value=False)
|
||||
|
||||
with pytest.raises(MissingValueForParameter):
|
||||
await service.setup_workflow_run(
|
||||
request_id="req_test",
|
||||
workflow_request=request,
|
||||
workflow_permanent_id="wpid_test",
|
||||
organization=organization,
|
||||
)
|
||||
|
||||
service.create_workflow_run_parameters.assert_not_awaited()
|
||||
service.mark_workflow_run_as_failed.assert_awaited_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_setup_workflow_run_persistence_error_identifies_specific_failing_parameter() -> None:
|
||||
"""When batch fails with multiple params, fallback to one-by-one should pinpoint the failing key."""
|
||||
params = [
|
||||
_make_workflow_parameter(
|
||||
"alpha_count", workflow_parameter_type=WorkflowParameterType.INTEGER, default_value="1"
|
||||
),
|
||||
_make_workflow_parameter("middle_label", default_value="mid"),
|
||||
_make_workflow_parameter("zebra_url", default_value="https://zebra.example.com"),
|
||||
]
|
||||
batch_error = IntegrityError("INSERT", {}, Exception("constraint failed"))
|
||||
single_error = IntegrityError("INSERT", {}, Exception("NOT NULL constraint on middle_label"))
|
||||
|
||||
# Single insert succeeds for alpha_count, fails on middle_label
|
||||
async def _single_insert_side_effect(
|
||||
*, workflow_run_id: str, workflow_parameter: WorkflowParameter, value: object
|
||||
) -> None:
|
||||
if workflow_parameter.key == "middle_label":
|
||||
raise single_error
|
||||
|
||||
service, organization, _ = _make_service_with_mocks(
|
||||
workflow_parameters=params,
|
||||
batch_side_effect=batch_error,
|
||||
)
|
||||
service.create_workflow_run_parameter = AsyncMock(side_effect=_single_insert_side_effect) # type: ignore[method-assign]
|
||||
|
||||
request = WorkflowRequestBody(data={"alpha_count": 5, "middle_label": "test", "zebra_url": "https://z.com"})
|
||||
|
||||
with patch("skyvern.forge.sdk.workflow.service.app") as mock_app:
|
||||
mock_app.EXPERIMENTATION_PROVIDER.is_feature_enabled_cached = AsyncMock(return_value=False)
|
||||
|
||||
with pytest.raises(WorkflowRunParameterPersistenceError) as exc_info:
|
||||
await service.setup_workflow_run(
|
||||
request_id="req_test",
|
||||
workflow_request=request,
|
||||
workflow_permanent_id="wpid_test",
|
||||
organization=organization,
|
||||
)
|
||||
|
||||
error_message = str(exc_info.value)
|
||||
# Should identify only the failing parameter, not all three
|
||||
assert "middle_label" in error_message
|
||||
assert "alpha_count" not in error_message
|
||||
assert "zebra_url" not in error_message
|
||||
assert exc_info.value.__cause__ is single_error
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_setup_workflow_run_raises_on_non_string_credential_id() -> None:
|
||||
"""Credential ID parameters must be strings. Passing an int should raise InvalidCredentialId."""
|
||||
cred_param = _make_workflow_parameter(
|
||||
"credential",
|
||||
workflow_parameter_type=WorkflowParameterType.CREDENTIAL_ID,
|
||||
)
|
||||
service, organization, _ = _make_service_with_mocks(workflow_parameters=[cred_param])
|
||||
|
||||
request = WorkflowRequestBody(data={"credential": 12345}) # not a string
|
||||
|
||||
with patch("skyvern.forge.sdk.workflow.service.app") as mock_app:
|
||||
mock_app.EXPERIMENTATION_PROVIDER.is_feature_enabled_cached = AsyncMock(return_value=False)
|
||||
|
||||
with pytest.raises(InvalidCredentialId):
|
||||
await service.setup_workflow_run(
|
||||
request_id="req_test",
|
||||
workflow_request=request,
|
||||
workflow_permanent_id="wpid_test",
|
||||
organization=organization,
|
||||
)
|
||||
|
||||
service.create_workflow_run_parameters.assert_not_awaited()
|
||||
Loading…
Add table
Add a link
Reference in a new issue