Fix ReDoS in script_reviewer string literal regex (#5364)

This commit is contained in:
Shuchang Zheng 2026-04-02 15:39:47 -07:00 committed by GitHub
parent a36e107d20
commit 4358f9c695
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
65 changed files with 5240 additions and 3471 deletions

View file

@ -110,6 +110,13 @@ PORT=8000
# ANALYTICS_ID: Distinct analytics ID (a UUID is generated if left blank). # ANALYTICS_ID: Distinct analytics ID (a UUID is generated if left blank).
ANALYTICS_ID="anonymous" ANALYTICS_ID="anonymous"
# LAMINAR
# Skyvern's backend runs on port 8000 by default. Consider updating your self-hosted laminar to env vars to avoid conflicts
# LMNR_HTTP_PORT=8010
# LMNR_GRPC_PORT=8011
# LMNR_BASE_URL=http://localhost
# LMNR_PROJECT_API_KEY=<your-laminar-project-api-key>
# 1Password Integration # 1Password Integration
# OP_SERVICE_ACCOUNT_TOKEN: API token for 1Password integration # OP_SERVICE_ACCOUNT_TOKEN: API token for 1Password integration
OP_SERVICE_ACCOUNT_TOKEN="" OP_SERVICE_ACCOUNT_TOKEN=""

View file

@ -184,9 +184,18 @@ If you're running Skyvern on your own infrastructure, add these to your server's
LMNR_PROJECT_API_KEY=your-laminar-api-key LMNR_PROJECT_API_KEY=your-laminar-api-key
``` ```
Skyvern's server includes a built-in `LaminarTrace` integration that initializes Laminar with the LiteLLM callback, capturing every LLM call, token count, and cost. It disables the automatic Skyvern/Patchright instrumentors (to avoid conflicts) and uses Laminar's `@observe` decorator on internal methods instead. Skyvern's server initializes Laminar at startup, which auto-instruments LiteLLM to capture every LLM call, token count, and cost. No manual callback setup is needed.
No code changes needed — once the env var is set, traces appear in your Laminar project automatically. If you're running a self-hosted Laminar instance, also set the base URL and ports:
```bash .env
LMNR_PROJECT_API_KEY=your-laminar-api-key
LMNR_BASE_URL=http://localhost
LMNR_GRPC_PORT=8011
LMNR_HTTP_PORT=8010
```
No code changes needed — once the env vars are set, traces appear in your Laminar project automatically.
--- ---

View file

@ -18,24 +18,18 @@ Copy of this guide is available in the [Laminar documentation](https://docs.lmnr
## Quickstart ## Quickstart
To trace Skyvern workflows with Laminar, **initialize Laminar and configure LiteLLM callbacks at the top of your project**. This will automatically capture all LLM calls, browser session recordings, and workflow execution details. To trace Skyvern workflows with Laminar, **initialize Laminar at the top of your project**. This will automatically instrument LiteLLM and capture all LLM calls, browser session recordings, and workflow execution details. No manual callback setup is needed.
```python {3-4} {8-12} {14-15} ```python {3} {7-8}
from skyvern import Skyvern from skyvern import Skyvern
import asyncio import asyncio
import litellm from lmnr import Laminar
from lmnr import Laminar, LaminarLiteLLMCallback, Instruments
from dotenv import load_dotenv from dotenv import load_dotenv
load_dotenv() load_dotenv()
# Initialize Laminar # Initialize Laminar — automatically instruments LiteLLM
# This will automatically trace all Skyvern functions Laminar.initialize()
# Disable OpenAI to avoid double instrumentation of LLM calls
Laminar.initialize(disabled_instruments=set([Instruments.OPENAI]))
# Configure LiteLLM to trace all LLM calls made by Skyvern
litellm.callbacks = [LaminarLiteLLMCallback()]
skyvern = Skyvern(api_key="YOUR_API_KEY") skyvern = Skyvern(api_key="YOUR_API_KEY")
@ -49,6 +43,10 @@ if __name__ == "__main__":
asyncio.run(main()) asyncio.run(main())
``` ```
<Warning>
`LaminarLiteLLMCallback` and manually setting `litellm.callbacks` are deprecated. `Laminar.initialize()` handles LiteLLM instrumentation automatically.
</Warning>
## Viewing Traces ## Viewing Traces
You can view traces in the Laminar UI by navigating to the traces tab in your project. When you select a trace, you can see: You can view traces in the Laminar UI by navigating to the traces tab in your project. When you select a trace, you can see:

View file

@ -3907,6 +3907,69 @@
} }
} }
} }
},
"/v1/sdk/run_action": {
"post": {
"tags": [
"SDK"
],
"summary": "Run an SDK action",
"description": "Execute a single SDK action with the specified parameters",
"operationId": "run_sdk_action_v1_sdk_run_action_post",
"parameters": [
{
"name": "x-api-key",
"in": "header",
"required": false,
"schema": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings.",
"title": "X-Api-Key"
},
"description": "Skyvern API key for authentication. API key can be found at https://app.skyvern.com/settings."
}
],
"requestBody": {
"required": true,
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/RunSdkActionRequest"
}
}
}
},
"responses": {
"200": {
"description": "Successful Response",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/RunSdkActionResponse"
}
}
}
},
"422": {
"description": "Validation Error",
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/HTTPValidationError"
}
}
}
}
},
"x-fern-sdk-method-name": "run_sdk_action"
}
} }
}, },
"components": { "components": {
@ -4015,6 +4078,41 @@
], ],
"title": "AWSSecretParameterYAML" "title": "AWSSecretParameterYAML"
}, },
"ActAction": {
"properties": {
"type": {
"type": "string",
"const": "ai_act",
"title": "Type",
"default": "ai_act"
},
"intention": {
"type": "string",
"title": "Intention",
"description": "Natural language prompt for the action",
"default": ""
},
"data": {
"anyOf": [
{
"type": "string"
},
{
"additionalProperties": true,
"type": "object"
},
{
"type": "null"
}
],
"title": "Data",
"description": "Additional context data"
}
},
"type": "object",
"title": "ActAction",
"description": "AI act action parameters."
},
"Action": { "Action": {
"properties": { "properties": {
"action_type": { "action_type": {
@ -6353,6 +6451,60 @@
"title": "BrowserSessionResponse", "title": "BrowserSessionResponse",
"description": "Response model for browser session information." "description": "Response model for browser session information."
}, },
"ClickAction": {
"properties": {
"type": {
"type": "string",
"const": "ai_click",
"title": "Type",
"default": "ai_click"
},
"selector": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Selector",
"description": "CSS selector for the element",
"default": ""
},
"intention": {
"type": "string",
"title": "Intention",
"description": "The intention or goal of the click",
"default": ""
},
"data": {
"anyOf": [
{
"type": "string"
},
{
"additionalProperties": true,
"type": "object"
},
{
"type": "null"
}
],
"title": "Data",
"description": "Additional context data"
},
"timeout": {
"type": "number",
"title": "Timeout",
"description": "Timeout in milliseconds",
"default": 10000
}
},
"type": "object",
"title": "ClickAction",
"description": "Click action parameters."
},
"ClickContext": { "ClickContext": {
"properties": { "properties": {
"thought": { "thought": {
@ -7742,6 +7894,88 @@
], ],
"title": "Extensions" "title": "Extensions"
}, },
"ExtractAction": {
"properties": {
"type": {
"type": "string",
"const": "extract",
"title": "Type",
"default": "extract"
},
"prompt": {
"type": "string",
"title": "Prompt",
"description": "Extraction prompt",
"default": ""
},
"extract_schema": {
"anyOf": [
{
"additionalProperties": true,
"type": "object"
},
{
"items": {},
"type": "array"
},
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Extract Schema",
"description": "Schema for extraction"
},
"error_code_mapping": {
"anyOf": [
{
"additionalProperties": {
"type": "string"
},
"type": "object"
},
{
"type": "null"
}
],
"title": "Error Code Mapping",
"description": "Error code mapping for extraction"
},
"intention": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Intention",
"description": "The intention or goal of the extraction"
},
"data": {
"anyOf": [
{
"type": "string"
},
{
"additionalProperties": true,
"type": "object"
},
{
"type": "null"
}
],
"title": "Data",
"description": "Additional context data"
}
},
"type": "object",
"title": "ExtractAction",
"description": "Extract data action parameters."
},
"ExtractionBlock": { "ExtractionBlock": {
"properties": { "properties": {
"label": { "label": {
@ -10776,6 +11010,97 @@
"type": "object", "type": "object",
"title": "InputOrSelectContext" "title": "InputOrSelectContext"
}, },
"InputTextAction": {
"properties": {
"type": {
"type": "string",
"const": "ai_input_text",
"title": "Type",
"default": "ai_input_text"
},
"selector": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Selector",
"description": "CSS selector for the element",
"default": ""
},
"value": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Value",
"description": "Value to input",
"default": ""
},
"intention": {
"type": "string",
"title": "Intention",
"description": "The intention or goal of the input",
"default": ""
},
"data": {
"anyOf": [
{
"type": "string"
},
{
"additionalProperties": true,
"type": "object"
},
{
"type": "null"
}
],
"title": "Data",
"description": "Additional context data"
},
"totp_identifier": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Totp Identifier",
"description": "TOTP identifier for input_text actions"
},
"totp_url": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Totp Url",
"description": "TOTP URL for input_text actions"
},
"timeout": {
"type": "number",
"title": "Timeout",
"description": "Timeout in milliseconds",
"default": 10000
}
},
"type": "object",
"title": "InputTextAction",
"description": "Input text action parameters."
},
"JinjaBranchCriteria": { "JinjaBranchCriteria": {
"properties": { "properties": {
"criteria_type": { "criteria_type": {
@ -10807,6 +11132,25 @@
"title": "JinjaBranchCriteria", "title": "JinjaBranchCriteria",
"description": "Jinja2-templated branch criteria (only supported criteria type for now)." "description": "Jinja2-templated branch criteria (only supported criteria type for now)."
}, },
"LocateElementAction": {
"properties": {
"type": {
"type": "string",
"const": "locate_element",
"title": "Type",
"default": "locate_element"
},
"prompt": {
"type": "string",
"title": "Prompt",
"description": "Natural language prompt to locate an element",
"default": ""
}
},
"type": "object",
"title": "LocateElementAction",
"description": "Locate element action parameters."
},
"LoginBlock": { "LoginBlock": {
"properties": { "properties": {
"label": { "label": {
@ -12859,6 +13203,53 @@
], ],
"title": "PrintPageBlockYAML" "title": "PrintPageBlockYAML"
}, },
"PromptAction": {
"properties": {
"type": {
"type": "string",
"const": "prompt",
"title": "Type",
"default": "prompt"
},
"prompt": {
"type": "string",
"title": "Prompt",
"description": "The prompt to send to the LLM"
},
"response_schema": {
"anyOf": [
{
"additionalProperties": true,
"type": "object"
},
{
"type": "null"
}
],
"title": "Response Schema",
"description": "Optional JSON schema to structure the response"
},
"model": {
"anyOf": [
{
"additionalProperties": true,
"type": "object"
},
{
"type": "null"
}
],
"title": "Model",
"description": "Optional model configuration"
}
},
"type": "object",
"required": [
"prompt"
],
"title": "PromptAction",
"description": "Prompt action parameters."
},
"PromptBranchCriteria": { "PromptBranchCriteria": {
"properties": { "properties": {
"criteria_type": { "criteria_type": {
@ -12952,6 +13343,130 @@
], ],
"title": "RunEngine" "title": "RunEngine"
}, },
"RunSdkActionRequest": {
"properties": {
"url": {
"type": "string",
"title": "Url",
"description": "The URL where the action should be executed"
},
"browser_session_id": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Browser Session Id",
"description": "The browser session ID"
},
"browser_address": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Browser Address",
"description": "The browser address"
},
"workflow_run_id": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Workflow Run Id",
"description": "Optional workflow run ID to continue an existing workflow run"
},
"action": {
"oneOf": [
{
"$ref": "#/components/schemas/ClickAction"
},
{
"$ref": "#/components/schemas/InputTextAction"
},
{
"$ref": "#/components/schemas/SelectOptionAction"
},
{
"$ref": "#/components/schemas/UploadFileAction"
},
{
"$ref": "#/components/schemas/ActAction"
},
{
"$ref": "#/components/schemas/ExtractAction"
},
{
"$ref": "#/components/schemas/LocateElementAction"
},
{
"$ref": "#/components/schemas/ValidateAction"
},
{
"$ref": "#/components/schemas/PromptAction"
}
],
"title": "Action",
"description": "The action to execute with its specific parameters",
"discriminator": {
"propertyName": "type",
"mapping": {
"ai_act": "#/components/schemas/ActAction",
"ai_click": "#/components/schemas/ClickAction",
"ai_input_text": "#/components/schemas/InputTextAction",
"ai_select_option": "#/components/schemas/SelectOptionAction",
"ai_upload_file": "#/components/schemas/UploadFileAction",
"extract": "#/components/schemas/ExtractAction",
"locate_element": "#/components/schemas/LocateElementAction",
"prompt": "#/components/schemas/PromptAction",
"validate": "#/components/schemas/ValidateAction"
}
}
}
},
"type": "object",
"required": [
"url",
"action"
],
"title": "RunSdkActionRequest",
"description": "Request to run a single SDK action."
},
"RunSdkActionResponse": {
"properties": {
"workflow_run_id": {
"type": "string",
"title": "Workflow Run Id",
"description": "The workflow run ID used for this action"
},
"result": {
"anyOf": [
{},
{
"type": "null"
}
],
"title": "Result",
"description": "The result from the action (e.g., selector, value, extracted data)"
}
},
"type": "object",
"required": [
"workflow_run_id"
],
"title": "RunSdkActionResponse",
"description": "Response from running an SDK action."
},
"RunStatus": { "RunStatus": {
"type": "string", "type": "string",
"enum": [ "enum": [
@ -13178,6 +13693,73 @@
"type": "object", "type": "object",
"title": "SelectOption" "title": "SelectOption"
}, },
"SelectOptionAction": {
"properties": {
"type": {
"type": "string",
"const": "ai_select_option",
"title": "Type",
"default": "ai_select_option"
},
"selector": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Selector",
"description": "CSS selector for the element",
"default": ""
},
"value": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Value",
"description": "Value to select",
"default": ""
},
"intention": {
"type": "string",
"title": "Intention",
"description": "The intention or goal of the selection",
"default": ""
},
"data": {
"anyOf": [
{
"type": "string"
},
{
"additionalProperties": true,
"type": "object"
},
{
"type": "null"
}
],
"title": "Data",
"description": "Additional context data"
},
"timeout": {
"type": "number",
"title": "Timeout",
"description": "Timeout in milliseconds",
"default": 10000
}
},
"type": "object",
"title": "SelectOptionAction",
"description": "Select option action parameters."
},
"SendEmailBlock": { "SendEmailBlock": {
"properties": { "properties": {
"label": { "label": {
@ -15609,6 +16191,73 @@
"title": "UpdateWorkflowFolderRequest", "title": "UpdateWorkflowFolderRequest",
"description": "Request model for updating a workflow's folder assignment" "description": "Request model for updating a workflow's folder assignment"
}, },
"UploadFileAction": {
"properties": {
"type": {
"type": "string",
"const": "ai_upload_file",
"title": "Type",
"default": "ai_upload_file"
},
"selector": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "Selector",
"description": "CSS selector for the element",
"default": ""
},
"file_url": {
"anyOf": [
{
"type": "string"
},
{
"type": "null"
}
],
"title": "File Url",
"description": "File URL for upload",
"default": ""
},
"intention": {
"type": "string",
"title": "Intention",
"description": "The intention or goal of the upload",
"default": ""
},
"data": {
"anyOf": [
{
"type": "string"
},
{
"additionalProperties": true,
"type": "object"
},
{
"type": "null"
}
],
"title": "Data",
"description": "Additional context data"
},
"timeout": {
"type": "number",
"title": "Timeout",
"description": "Timeout in milliseconds",
"default": 10000
}
},
"type": "object",
"title": "UploadFileAction",
"description": "Upload file action parameters."
},
"UploadFileResponse": { "UploadFileResponse": {
"properties": { "properties": {
"s3_uri": { "s3_uri": {
@ -16144,6 +16793,40 @@
], ],
"title": "UserDefinedError" "title": "UserDefinedError"
}, },
"ValidateAction": {
"properties": {
"type": {
"type": "string",
"const": "validate",
"title": "Type",
"default": "validate"
},
"prompt": {
"type": "string",
"title": "Prompt",
"description": "Validation criteria or condition to check"
},
"model": {
"anyOf": [
{
"additionalProperties": true,
"type": "object"
},
{
"type": "null"
}
],
"title": "Model",
"description": "Optional model configuration"
}
},
"type": "object",
"required": [
"prompt"
],
"title": "ValidateAction",
"description": "Validate action parameters."
},
"ValidationBlock": { "ValidationBlock": {
"properties": { "properties": {
"label": { "label": {

View file

@ -1,6 +1,6 @@
[project] [project]
name = "skyvern" name = "skyvern"
version = "1.0.28" version = "1.0.29"
description = "" description = ""
authors = [{ name = "Skyvern AI", email = "info@skyvern.com" }] authors = [{ name = "Skyvern AI", email = "info@skyvern.com" }]
requires-python = ">=3.11,<3.14" requires-python = ">=3.11,<3.14"

View file

@ -753,10 +753,10 @@ function CredentialsModal({
? updateCredentialMutation ? updateCredentialMutation
: createCredentialMutation; : createCredentialMutation;
const handleRenameOnly = (name: string, hasMetadataChanges: boolean) => { const handleRenameOnly = (name: string) => {
if (!editingCredential) return; if (!editingCredential) return;
// Skip the API call if nothing actually changed // Skip the API call if the name hasn't actually changed
if (name === editingCredential.name && !hasMetadataChanges) { if (name === editingCredential.name) {
reset(); reset();
setIsOpen(false); setIsOpen(false);
return; return;
@ -764,9 +764,6 @@ function CredentialsModal({
renameCredentialMutation.mutate({ renameCredentialMutation.mutate({
id: editingCredential.credential_id, id: editingCredential.credential_id,
name, name,
tested_url: testUrl.trim() || undefined,
user_context: userContext.trim() || null,
save_browser_session_intent: testAndSave,
}); });
}; };
@ -788,26 +785,14 @@ function CredentialsModal({
// In edit mode, use editingGroups to determine what changed (type-agnostic) // In edit mode, use editingGroups to determine what changed (type-agnostic)
if (isEditMode && editingCredential) { if (isEditMode && editingCredential) {
const hasMetadataChanges =
testUrl.trim() !== (editingCredential.tested_url ?? "") ||
testAndSave !==
(editingCredential.save_browser_session_intent ??
!!editingCredential.browser_profile_id) ||
userContext.trim() !== (editingCredential.user_context ?? "");
if (!editingGroups.name && !editingGroups.values) { if (!editingGroups.name && !editingGroups.values) {
if (!hasMetadataChanges) { // Nothing was edited — close silently
// Nothing was edited — close silently reset();
reset(); setIsOpen(false);
setIsOpen(false);
return;
}
// Only metadata changed (no auth values edited) — save via PATCH
handleRenameOnly(name, hasMetadataChanges);
return; return;
} }
if (editingGroups.name && !editingGroups.values) { if (editingGroups.name && !editingGroups.values) {
handleRenameOnly(name, hasMetadataChanges); handleRenameOnly(name);
return; return;
} }
} }

View file

@ -20,11 +20,17 @@ from skyvern.cli.core.artifacts import save_artifact
from skyvern.cli.core.browser_ops import ( from skyvern.cli.core.browser_ops import (
do_act, do_act,
do_extract, do_extract,
do_find,
do_frame_list, do_frame_list,
do_frame_main, do_frame_main,
do_frame_switch, do_frame_switch,
do_get_html,
do_get_styles,
do_get_value,
do_navigate, do_navigate,
do_screenshot, do_screenshot,
do_state_load,
do_state_save,
) )
from skyvern.cli.core.client import get_skyvern from skyvern.cli.core.client import get_skyvern
from skyvern.cli.core.guards import ( from skyvern.cli.core.guards import (
@ -42,12 +48,17 @@ from skyvern.cli.core.ngrok import check_ngrok_auth, detect_ngrok, offer_install
from skyvern.cli.core.session_ops import do_session_close, do_session_create, do_session_list from skyvern.cli.core.session_ops import do_session_close, do_session_create, do_session_list
from skyvern.cli.mcp_tools.browser import skyvern_login as tool_login from skyvern.cli.mcp_tools.browser import skyvern_login as tool_login
from skyvern.cli.mcp_tools.browser import skyvern_run_task as tool_run_task from skyvern.cli.mcp_tools.browser import skyvern_run_task as tool_run_task
from skyvern.cli.mcp_tools.inspection import skyvern_har_start, skyvern_har_stop
browser_app = typer.Typer(help="Browser automation commands.", no_args_is_help=True) browser_app = typer.Typer(help="Browser automation commands.", no_args_is_help=True)
session_app = typer.Typer(help="Manage browser sessions.", no_args_is_help=True) session_app = typer.Typer(help="Manage browser sessions.", no_args_is_help=True)
frame_app = typer.Typer(help="Manage iframe context.", no_args_is_help=True) frame_app = typer.Typer(help="Manage iframe context.", no_args_is_help=True)
state_app = typer.Typer(help="Save and load browser auth state.", no_args_is_help=True)
storage_app = typer.Typer(help="Read, write, and clear web storage.", no_args_is_help=True)
browser_app.add_typer(session_app, name="session") browser_app.add_typer(session_app, name="session")
browser_app.add_typer(frame_app, name="frame") browser_app.add_typer(frame_app, name="frame")
browser_app.add_typer(state_app, name="state")
browser_app.add_typer(storage_app, name="storage")
@dataclass(frozen=True) @dataclass(frozen=True)
@ -1519,3 +1530,356 @@ def frame_list_cmd(
output(data, action="frame_list", json_mode=json_output) output(data, action="frame_list", json_mode=json_output)
except Exception as e: except Exception as e:
output_error(str(e), json_mode=json_output) output_error(str(e), json_mode=json_output)
# ── State persistence commands ──────────────────────────────────────
@state_app.command("save")
def state_save_cmd(
file_path: str = typer.Argument(help="Path to save state file (JSON)."),
session: str | None = typer.Option(None, help="Browser session ID."),
cdp: str | None = typer.Option(None, "--cdp", help="CDP WebSocket URL."),
json_output: bool = typer.Option(False, "--json", help="Output as JSON."),
) -> None:
"""Save browser auth state (cookies + localStorage + sessionStorage) to a file."""
from skyvern.cli.mcp_tools.state import _validate_state_path
async def _run() -> dict:
resolved = _validate_state_path(file_path)
connection = _resolve_connection(session, cdp)
browser = await _connect_browser(connection)
page = await browser.get_working_page()
result = await do_state_save(page.page, browser, resolved)
return {
"file_path": result.file_path,
"cookie_count": result.cookie_count,
"local_storage_count": result.local_storage_count,
"session_storage_count": result.session_storage_count,
"url": result.url,
}
try:
data = asyncio.run(_run())
output(data, action="state_save", json_mode=json_output)
except Exception as e:
output_error(str(e), json_mode=json_output)
@state_app.command("load")
def state_load_cmd(
file_path: str = typer.Argument(help="Path to state file (JSON) from state save."),
session: str | None = typer.Option(None, help="Browser session ID."),
cdp: str | None = typer.Option(None, "--cdp", help="CDP WebSocket URL."),
json_output: bool = typer.Option(False, "--json", help="Output as JSON."),
) -> None:
"""Load browser auth state (cookies + localStorage + sessionStorage) from a file."""
from urllib.parse import urlparse
from skyvern.cli.mcp_tools.state import _validate_state_path
async def _run() -> dict:
resolved = _validate_state_path(file_path, must_exist=True)
connection = _resolve_connection(session, cdp)
browser = await _connect_browser(connection)
page = await browser.get_working_page()
current_domain = urlparse(page.page.url).hostname or ""
result = await do_state_load(page.page, browser, resolved, current_domain)
return {
"cookie_count": result.cookie_count,
"local_storage_count": result.local_storage_count,
"session_storage_count": result.session_storage_count,
"source_url": result.source_url,
"skipped_cookies": result.skipped_cookies,
}
try:
data = asyncio.run(_run())
output(data, action="state_load", json_mode=json_output)
except Exception as e:
output_error(str(e), json_mode=json_output)
# ── Web storage commands ────────────────────────────────────────────
@storage_app.command("get-session")
def storage_get_session_cmd(
keys: list[str] | None = typer.Argument(None, help="Specific keys to retrieve. Omit for all."),
session: str | None = typer.Option(None, help="Browser session ID."),
cdp: str | None = typer.Option(None, "--cdp", help="CDP WebSocket URL."),
json_output: bool = typer.Option(False, "--json", help="Output as JSON."),
) -> None:
"""Read sessionStorage values from the current page."""
async def _run() -> dict:
connection = _resolve_connection(session, cdp)
browser = await _connect_browser(connection)
page = await browser.get_working_page()
if keys:
items = {}
for key in keys:
val = await page.page.evaluate(f"() => window.sessionStorage.getItem({json.dumps(key)})")
if val is not None:
items[key] = val
else:
items = await page.page.evaluate("() => Object.fromEntries(Object.entries(window.sessionStorage))")
return {"items": items, "count": len(items)}
try:
data = asyncio.run(_run())
output(data, action="get_session_storage", json_mode=json_output)
except Exception as e:
output_error(str(e), json_mode=json_output)
@storage_app.command("set-session")
def storage_set_session_cmd(
key: str = typer.Argument(help="The key to set."),
value: str = typer.Argument(help="The value to store."),
session: str | None = typer.Option(None, help="Browser session ID."),
cdp: str | None = typer.Option(None, "--cdp", help="CDP WebSocket URL."),
json_output: bool = typer.Option(False, "--json", help="Output as JSON."),
) -> None:
"""Set a sessionStorage key-value pair."""
async def _run() -> dict:
connection = _resolve_connection(session, cdp)
browser = await _connect_browser(connection)
page = await browser.get_working_page()
await page.page.evaluate("(args) => window.sessionStorage.setItem(args[0], args[1])", [key, value])
return {"key": key, "value_length": len(value)}
try:
data = asyncio.run(_run())
output(data, action="set_session_storage", json_mode=json_output)
except Exception as e:
output_error(str(e), json_mode=json_output)
@storage_app.command("clear-session")
def storage_clear_session_cmd(
session: str | None = typer.Option(None, help="Browser session ID."),
cdp: str | None = typer.Option(None, "--cdp", help="CDP WebSocket URL."),
json_output: bool = typer.Option(False, "--json", help="Output as JSON."),
) -> None:
"""Clear all sessionStorage entries."""
async def _run() -> dict:
connection = _resolve_connection(session, cdp)
browser = await _connect_browser(connection)
page = await browser.get_working_page()
count = await page.page.evaluate(
"() => { const n = window.sessionStorage.length; window.sessionStorage.clear(); return n; }"
)
return {"cleared_count": count}
try:
data = asyncio.run(_run())
output(data, action="clear_session_storage", json_mode=json_output)
except Exception as e:
output_error(str(e), json_mode=json_output)
@storage_app.command("clear-local")
def storage_clear_local_cmd(
session: str | None = typer.Option(None, help="Browser session ID."),
cdp: str | None = typer.Option(None, "--cdp", help="CDP WebSocket URL."),
json_output: bool = typer.Option(False, "--json", help="Output as JSON."),
) -> None:
"""Clear all localStorage entries."""
async def _run() -> dict:
connection = _resolve_connection(session, cdp)
browser = await _connect_browser(connection)
page = await browser.get_working_page()
count = await page.page.evaluate(
"() => { const n = window.localStorage.length; window.localStorage.clear(); return n; }"
)
return {"cleared_count": count}
try:
data = asyncio.run(_run())
output(data, action="clear_local_storage", json_mode=json_output)
except Exception as e:
output_error(str(e), json_mode=json_output)
# ── Page JS errors command ───────────────────────────────────────────
@browser_app.command("get-errors")
def get_errors_cmd(
text: str | None = typer.Option(None, "--text", help="Filter by substring match (case-insensitive)."),
clear: bool = typer.Option(False, "--clear", help="Clear the buffer after reading."),
session: str | None = typer.Option(None, help="Browser session ID."),
cdp: str | None = typer.Option(None, "--cdp", help="CDP WebSocket URL."),
json_output: bool = typer.Option(False, "--json", help="Output as JSON."),
) -> None:
"""Read uncaught JavaScript errors from the browser page."""
from skyvern.cli.mcp_tools.inspection import skyvern_get_errors
async def _run() -> dict:
return await skyvern_get_errors(text=text, clear=clear, session_id=session, cdp_url=cdp)
try:
result = asyncio.run(_run())
if result.get("ok"):
output(result["data"], action="get_errors", json_mode=json_output)
else:
output_error(result.get("error", {}).get("message", "Unknown error"), json_mode=json_output)
except Exception as e:
output_error(str(e), json_mode=json_output)
# ── HAR recording commands ───────────────────────────────────────────
@browser_app.command("har-start")
def har_start_cmd(
session: str | None = typer.Option(None, help="Browser session ID."),
cdp: str | None = typer.Option(None, "--cdp", help="CDP WebSocket URL."),
json_output: bool = typer.Option(False, "--json", help="Output as JSON."),
) -> None:
"""Start recording network traffic in HAR format."""
async def _run() -> dict:
return await skyvern_har_start(session_id=session, cdp_url=cdp)
try:
result = asyncio.run(_run())
if result.get("ok"):
output(result["data"], action="har_start", json_mode=json_output)
else:
output_error(result.get("error", {}).get("message", "Unknown error"), json_mode=json_output)
except Exception as e:
output_error(str(e), json_mode=json_output)
@browser_app.command("har-stop")
def har_stop_cmd(
session: str | None = typer.Option(None, help="Browser session ID."),
cdp: str | None = typer.Option(None, "--cdp", help="CDP WebSocket URL."),
json_output: bool = typer.Option(False, "--json", help="Output as JSON."),
) -> None:
"""Stop HAR recording and return captured traffic."""
async def _run() -> dict:
return await skyvern_har_stop(session_id=session, cdp_url=cdp)
try:
result = asyncio.run(_run())
if result.get("ok"):
output(result["data"], action="har_stop", json_mode=json_output)
else:
output_error(result.get("error", {}).get("message", "Unknown error"), json_mode=json_output)
except Exception as e:
output_error(str(e), json_mode=json_output)
# ── DOM Inspection commands ──────────────────────────────────────────
@browser_app.command("get-html")
def get_html_cmd(
selector: str = typer.Argument(help="CSS or XPath selector for the element."),
outer: bool = typer.Option(False, "--outer", help="Return outerHTML instead of innerHTML."),
session: str | None = typer.Option(None, help="Browser session ID."),
cdp: str | None = typer.Option(None, "--cdp", help="CDP WebSocket URL."),
json_output: bool = typer.Option(False, "--json", help="Output as JSON."),
) -> None:
"""Get the HTML content of a DOM element."""
async def _run() -> dict:
connection = _resolve_connection(session, cdp)
browser = await _connect_browser(connection)
page = await browser.get_working_page()
html = await do_get_html(page.page, selector, outer=outer)
return {"html": html, "selector": selector, "outer": outer, "length": len(html)}
try:
data = asyncio.run(_run())
output(data, action="get_html", json_mode=json_output)
except Exception as e:
output_error(str(e), json_mode=json_output)
@browser_app.command("get-value")
def get_value_cmd(
selector: str = typer.Argument(help="CSS or XPath selector for the input element."),
session: str | None = typer.Option(None, help="Browser session ID."),
cdp: str | None = typer.Option(None, "--cdp", help="CDP WebSocket URL."),
json_output: bool = typer.Option(False, "--json", help="Output as JSON."),
) -> None:
"""Get the current value of a form input element."""
async def _run() -> dict:
connection = _resolve_connection(session, cdp)
browser = await _connect_browser(connection)
page = await browser.get_working_page()
value = await do_get_value(page.page, selector)
return {"value": value, "selector": selector}
try:
data = asyncio.run(_run())
output(data, action="get_value", json_mode=json_output)
except Exception as e:
output_error(str(e), json_mode=json_output)
@browser_app.command("get-styles")
def get_styles_cmd(
selector: str = typer.Argument(help="CSS or XPath selector for the element."),
properties: list[str] | None = typer.Argument(None, help="Specific CSS properties (e.g. color font-size)."),
session: str | None = typer.Option(None, help="Browser session ID."),
cdp: str | None = typer.Option(None, "--cdp", help="CDP WebSocket URL."),
json_output: bool = typer.Option(False, "--json", help="Output as JSON."),
) -> None:
"""Get computed CSS styles from a DOM element."""
async def _run() -> dict:
connection = _resolve_connection(session, cdp)
browser = await _connect_browser(connection)
page = await browser.get_working_page()
styles = await do_get_styles(page.page, selector, properties=properties)
return {"styles": styles, "selector": selector, "count": len(styles)}
try:
data = asyncio.run(_run())
output(data, action="get_styles", json_mode=json_output)
except Exception as e:
output_error(str(e), json_mode=json_output)
# -- Semantic locator command --
@browser_app.command("find")
def find_cmd(
by: str = typer.Argument(help="Locator type: role, text, label, placeholder, alt, testid."),
value: str = typer.Argument(help="The text/role/label to match."),
session: str | None = typer.Option(None, help="Browser session ID."),
cdp: str | None = typer.Option(None, "--cdp", help="CDP WebSocket URL."),
json_output: bool = typer.Option(False, "--json", help="Output as JSON."),
) -> None:
"""Find elements using Playwright semantic locators (role, text, label, etc.)."""
from skyvern.cli.core.browser_ops import LOCATOR_TYPES
if by not in LOCATOR_TYPES:
output_error(
f"Invalid locator type: {by!r}. Must be one of: {', '.join(sorted(LOCATOR_TYPES))}", json_mode=json_output
)
raise typer.Exit(code=2)
async def _run() -> dict:
connection = _resolve_connection(session, cdp)
browser = await _connect_browser(connection)
page = await browser.get_working_page()
result = await do_find(page, by=by, value=value)
return asdict(result)
try:
data = asyncio.run(_run())
output(data, action="find", json_mode=json_output)
except Exception as e:
output_error(str(e), json_mode=json_output)

View file

@ -7,7 +7,10 @@ Session resolution and output formatting are caller responsibilities.
from __future__ import annotations from __future__ import annotations
import json import json
import os
from dataclasses import dataclass from dataclasses import dataclass
from datetime import datetime, timezone
from pathlib import Path
from typing import Any from typing import Any
from .guards import GuardError from .guards import GuardError
@ -87,6 +90,48 @@ async def do_extract(
return ExtractResult(extracted=extracted) return ExtractResult(extracted=extracted)
# -- Semantic locators --
@dataclass
class FindResult:
selector: str
count: int
first_text: str | None
first_visible: bool
locator_map: dict[str, str] = {
"role": "get_by_role",
"text": "get_by_text",
"label": "get_by_label",
"placeholder": "get_by_placeholder",
"alt": "get_by_alt_text",
"testid": "get_by_test_id",
}
LOCATOR_TYPES = frozenset(locator_map.keys())
async def do_find(page: Any, by: str, value: str) -> FindResult:
"""Locate elements using Playwright's semantic locator API."""
if by not in locator_map:
raise GuardError(
f"Invalid locator type: {by!r}. Must be one of: {', '.join(sorted(LOCATOR_TYPES))}",
f"Use one of: {', '.join(sorted(LOCATOR_TYPES))}",
)
locator = getattr(page, locator_map[by])(value)
count = await locator.count()
first_text = await locator.first.text_content() if count > 0 else None
first_visible = await locator.first.is_visible() if count > 0 else False
return FindResult(
selector=f"{locator_map[by]}({value!r})",
count=count,
first_text=first_text,
first_visible=first_visible,
)
# -- Frame operations -- # -- Frame operations --
@ -131,3 +176,157 @@ def do_frame_main(page: Any) -> None:
async def do_frame_list(page: Any) -> list[FrameInfo]: async def do_frame_list(page: Any) -> list[FrameInfo]:
frames = await page.frame_list() frames = await page.frame_list()
return [FrameInfo(index=f["index"], name=f["name"], url=f["url"], is_main=f["is_main"]) for f in frames] return [FrameInfo(index=f["index"], name=f["name"], url=f["url"], is_main=f["is_main"]) for f in frames]
# -- Auth state persistence --
@dataclass
class StateSaveResult:
file_path: str
cookie_count: int
local_storage_count: int
session_storage_count: int
url: str
@dataclass
class StateLoadResult:
cookie_count: int
local_storage_count: int
session_storage_count: int
source_url: str
skipped_cookies: int
def _cookie_domain_matches(cookie_domain: str, page_domain: str) -> bool:
"""Check if a cookie's domain matches the current page domain per RFC 6265.
Handles leading dots (wildcard subdomains).
Rejects suffix attacks: 'evil-example.com' must NOT match 'example.com'.
"""
if not cookie_domain or not page_domain:
return False
cd = cookie_domain.lstrip(".")
if not cd:
return False
return page_domain == cd or page_domain.endswith("." + cd)
async def do_state_save(page: Any, browser: Any, file_path: Path) -> StateSaveResult:
"""Save browser auth state to a JSON file.
``page`` is the raw Playwright Page (not SkyvernBrowserPage).
``browser`` is a SkyvernBrowser cookies accessed via ``browser._browser_context``.
"""
pw_context = browser._browser_context
cookies = await pw_context.cookies()
local_storage = await page.evaluate("() => Object.fromEntries(Object.entries(window.localStorage))")
session_storage = await page.evaluate("() => Object.fromEntries(Object.entries(window.sessionStorage))")
state = {
"version": 1,
"url": page.url,
"timestamp": datetime.now(timezone.utc).isoformat(),
"cookies": cookies,
"local_storage": local_storage,
"session_storage": session_storage,
}
file_path.parent.mkdir(parents=True, exist_ok=True)
fd = os.open(str(file_path), os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0o600)
with os.fdopen(fd, "w") as f:
json.dump(state, f, indent=2)
return StateSaveResult(
file_path=str(file_path),
cookie_count=len(cookies),
local_storage_count=len(local_storage),
session_storage_count=len(session_storage),
url=page.url,
)
async def do_state_load(
page: Any,
browser: Any,
file_path: Path,
current_domain: str,
) -> StateLoadResult:
"""Load browser auth state from a JSON file.
Validates JSON schema version. Filters cookies to only apply those matching
``current_domain`` to prevent cross-domain session injection.
"""
raw = file_path.read_text()
state = json.loads(raw)
if state.get("version") != 1:
raise ValueError(f"Unsupported state file version: {state.get('version')}")
pw_context = browser._browser_context
all_cookies = state.get("cookies", [])
safe_cookies = [c for c in all_cookies if _cookie_domain_matches(c.get("domain", ""), current_domain)]
skipped = len(all_cookies) - len(safe_cookies)
if safe_cookies:
await pw_context.add_cookies(safe_cookies)
local_storage = state.get("local_storage", {})
for k, v in local_storage.items():
await page.evaluate(
"(args) => window.localStorage.setItem(args[0], args[1])",
[k, v],
)
session_storage = state.get("session_storage", {})
for k, v in session_storage.items():
await page.evaluate(
"(args) => window.sessionStorage.setItem(args[0], args[1])",
[k, v],
)
return StateLoadResult(
cookie_count=len(safe_cookies),
local_storage_count=len(local_storage),
session_storage_count=len(session_storage),
source_url=state.get("url", ""),
skipped_cookies=skipped,
)
# -- DOM inspection --
async def do_get_html(page: Any, selector: str, outer: bool = False) -> str:
"""Get innerHTML or outerHTML from an element. ``page`` is raw Playwright Page."""
prop = "outerHTML" if outer else "innerHTML"
return await page.locator(selector).evaluate(f"el => el.{prop}")
async def do_get_value(page: Any, selector: str) -> str | None:
"""Get the current value of a form input element."""
return await page.locator(selector).input_value()
async def do_get_styles(page: Any, selector: str, properties: list[str] | None = None) -> dict[str, str]:
"""Get computed CSS styles from an element."""
if properties is not None:
if not properties:
return {}
return await page.locator(selector).evaluate(
"""(el, props) => {
const styles = window.getComputedStyle(el);
return Object.fromEntries(props.map(p => [p, styles.getPropertyValue(p)]));
}""",
properties,
)
return await page.locator(selector).evaluate(
"""el => {
const styles = window.getComputedStyle(el);
const result = {};
for (let i = 0; i < Math.min(styles.length, 100); i++) {
result[styles[i]] = styles.getPropertyValue(styles[i]);
}
return result;
}"""
)

View file

@ -31,8 +31,10 @@ class SessionState:
console_messages: deque[dict[str, Any]] = field(default_factory=lambda: deque(maxlen=1000)) console_messages: deque[dict[str, Any]] = field(default_factory=lambda: deque(maxlen=1000))
network_requests: deque[dict[str, Any]] = field(default_factory=lambda: deque(maxlen=1000)) network_requests: deque[dict[str, Any]] = field(default_factory=lambda: deque(maxlen=1000))
dialog_events: deque[dict[str, Any]] = field(default_factory=lambda: deque(maxlen=1000)) dialog_events: deque[dict[str, Any]] = field(default_factory=lambda: deque(maxlen=1000))
page_errors: deque[dict[str, Any]] = field(default_factory=lambda: deque(maxlen=1000))
tracing_active: bool = False tracing_active: bool = False
har_enabled: bool = False har_enabled: bool = False
_har_entries: deque[dict[str, Any]] = field(default_factory=lambda: deque(maxlen=5000))
# -- Active page tracking (tab management) -- # -- Active page tracking (tab management) --
_active_page: Page | None = None _active_page: Page | None = None
# -- Page event buffer for tab_wait_for_new -- # -- Page event buffer for tab_wait_for_new --

View file

@ -19,6 +19,7 @@ from .browser import (
skyvern_evaluate, skyvern_evaluate,
skyvern_extract, skyvern_extract,
skyvern_file_upload, skyvern_file_upload,
skyvern_find,
skyvern_frame_list, skyvern_frame_list,
skyvern_frame_main, skyvern_frame_main,
skyvern_frame_switch, skyvern_frame_switch,
@ -48,7 +49,13 @@ from .folder import (
) )
from .inspection import ( from .inspection import (
skyvern_console_messages, skyvern_console_messages,
skyvern_get_errors,
skyvern_get_html,
skyvern_get_styles,
skyvern_get_value,
skyvern_handle_dialog, skyvern_handle_dialog,
skyvern_har_start,
skyvern_har_stop,
skyvern_network_requests, skyvern_network_requests,
) )
from .prompts import build_workflow, debug_automation, extract_data, qa_test from .prompts import build_workflow, debug_automation, extract_data, qa_test
@ -66,6 +73,13 @@ from .session import (
skyvern_browser_session_get, skyvern_browser_session_get,
skyvern_browser_session_list, skyvern_browser_session_list,
) )
from .state import skyvern_state_load, skyvern_state_save
from .storage import (
skyvern_clear_local_storage,
skyvern_clear_session_storage,
skyvern_get_session_storage,
skyvern_set_session_storage,
)
from .tabs import ( from .tabs import (
skyvern_tab_close, skyvern_tab_close,
skyvern_tab_list, skyvern_tab_list,
@ -157,6 +171,9 @@ targeted test cases, open a browser against the dev server, and report pass/fail
| "Switch to [tab]" / "Go to tab [N]" | skyvern_tab_switch | Change active tab | | "Switch to [tab]" / "Go to tab [N]" | skyvern_tab_switch | Change active tab |
| "Close tab" / "Close this tab" | skyvern_tab_close | Close tab by ID or index | | "Close tab" / "Close this tab" | skyvern_tab_close | Close tab by ID or index |
| "Wait for popup" / "A new tab should open" | skyvern_tab_wait_for_new | Waits for popup/new tab | | "Wait for popup" / "A new tab should open" | skyvern_tab_wait_for_new | Waits for popup/new tab |
| "Save login state" / "Remember this session" | skyvern_state_save | Persists cookies + storage to file |
| "Restore login" / "Load saved state" | skyvern_state_load | Restores cookies + storage from file |
| "Find button" / "Locate element by role/text" | skyvern_find | Semantic locator: find by role, text, label, placeholder, alt, testid |
## Critical Rules ## Critical Rules
1. Use Skyvern for all browser tasks. curl/wget/requests are fine for APIs and file downloads. 1. Use Skyvern for all browser tasks. curl/wget/requests are fine for APIs and file downloads.
@ -378,6 +395,7 @@ mcp.tool(tags={"browser_primitive"}, annotations=_MUT)(skyvern_scroll)
mcp.tool(tags={"browser_primitive"}, annotations=_MUT)(skyvern_select_option) mcp.tool(tags={"browser_primitive"}, annotations=_MUT)(skyvern_select_option)
mcp.tool(tags={"browser_primitive"}, annotations=_MUT)(skyvern_press_key) mcp.tool(tags={"browser_primitive"}, annotations=_MUT)(skyvern_press_key)
mcp.tool(tags={"browser_primitive"}, annotations=_MUT)(skyvern_wait) mcp.tool(tags={"browser_primitive"}, annotations=_MUT)(skyvern_wait)
mcp.tool(tags={"browser_primitive"}, annotations=_RO)(skyvern_find)
# -- Tab management (multi-tab) -- # -- Tab management (multi-tab) --
mcp.tool(tags={"tab_management"}, annotations=_RO)(skyvern_tab_list) mcp.tool(tags={"tab_management"}, annotations=_RO)(skyvern_tab_list)
@ -391,10 +409,26 @@ mcp.tool(tags={"browser_primitive"}, annotations=_MUT)(skyvern_frame_switch)
mcp.tool(tags={"browser_primitive"}, annotations=_MUT)(skyvern_frame_main) mcp.tool(tags={"browser_primitive"}, annotations=_MUT)(skyvern_frame_main)
mcp.tool(tags={"browser_primitive"}, annotations=_RO)(skyvern_frame_list) mcp.tool(tags={"browser_primitive"}, annotations=_RO)(skyvern_frame_list)
# -- Inspection tools (console, network, dialog) -- # -- Auth state persistence --
mcp.tool(tags={"state"}, annotations=_MUT)(skyvern_state_save)
mcp.tool(tags={"state"}, annotations=_MUT)(skyvern_state_load)
# -- Inspection tools (console, network, dialog, page errors, DOM) --
mcp.tool(tags={"inspection"}, annotations=_RO)(skyvern_console_messages) mcp.tool(tags={"inspection"}, annotations=_RO)(skyvern_console_messages)
mcp.tool(tags={"inspection"}, annotations=_RO)(skyvern_network_requests) mcp.tool(tags={"inspection"}, annotations=_RO)(skyvern_network_requests)
mcp.tool(tags={"inspection"}, annotations=_RO)(skyvern_handle_dialog) mcp.tool(tags={"inspection"}, annotations=_RO)(skyvern_handle_dialog)
mcp.tool(tags={"inspection"}, annotations=_RO)(skyvern_get_errors)
mcp.tool(tags={"inspection"}, annotations=_MUT)(skyvern_har_start)
mcp.tool(tags={"inspection"}, annotations=_MUT)(skyvern_har_stop)
mcp.tool(tags={"inspection"}, annotations=_RO)(skyvern_get_html)
mcp.tool(tags={"inspection"}, annotations=_RO)(skyvern_get_value)
mcp.tool(tags={"inspection"}, annotations=_RO)(skyvern_get_styles)
# -- Web storage (sessionStorage + localStorage) --
mcp.tool(tags={"storage"}, annotations=_RO)(skyvern_get_session_storage)
mcp.tool(tags={"storage"}, annotations=_MUT)(skyvern_set_session_storage)
mcp.tool(tags={"storage"}, annotations=_DEST)(skyvern_clear_session_storage)
mcp.tool(tags={"storage"}, annotations=_DEST)(skyvern_clear_local_storage)
# -- Block discovery + validation (no browser needed) -- # -- Block discovery + validation (no browser needed) --
mcp.tool(tags={"block_discovery"}, annotations=_RO)(skyvern_block_schema) mcp.tool(tags={"block_discovery"}, annotations=_RO)(skyvern_block_schema)
@ -463,6 +497,7 @@ __all__ = [
"skyvern_select_option", "skyvern_select_option",
"skyvern_press_key", "skyvern_press_key",
"skyvern_wait", "skyvern_wait",
"skyvern_find",
# Tab management # Tab management
"skyvern_tab_list", "skyvern_tab_list",
"skyvern_tab_new", "skyvern_tab_new",
@ -473,10 +508,21 @@ __all__ = [
"skyvern_frame_switch", "skyvern_frame_switch",
"skyvern_frame_main", "skyvern_frame_main",
"skyvern_frame_list", "skyvern_frame_list",
# Inspection (console, network, dialog) # Inspection (console, network, dialog, page errors, DOM)
"skyvern_console_messages", "skyvern_console_messages",
"skyvern_network_requests", "skyvern_network_requests",
"skyvern_handle_dialog", "skyvern_handle_dialog",
"skyvern_get_errors",
"skyvern_har_start",
"skyvern_har_stop",
"skyvern_get_html",
"skyvern_get_value",
"skyvern_get_styles",
# Web storage
"skyvern_get_session_storage",
"skyvern_set_session_storage",
"skyvern_clear_session_storage",
"skyvern_clear_local_storage",
# Block discovery + validation # Block discovery + validation
"skyvern_block_schema", "skyvern_block_schema",
"skyvern_block_validate", "skyvern_block_validate",
@ -506,6 +552,9 @@ __all__ = [
"skyvern_script_versions", "skyvern_script_versions",
"skyvern_script_fallback_episodes", "skyvern_script_fallback_episodes",
"skyvern_script_deploy", "skyvern_script_deploy",
# Auth state persistence
"skyvern_state_save",
"skyvern_state_load",
# Prompts # Prompts
"build_workflow", "build_workflow",
"debug_automation", "debug_automation",

View file

@ -14,6 +14,7 @@ from pydantic import Field
from skyvern.cli.core.browser_ops import ( from skyvern.cli.core.browser_ops import (
do_act, do_act,
do_extract, do_extract,
do_find,
do_frame_list, do_frame_list,
do_frame_main, do_frame_main,
do_frame_switch, do_frame_switch,
@ -1795,3 +1796,69 @@ async def skyvern_frame_list(
}, },
timing_ms=timer.timing_ms, timing_ms=timer.timing_ms,
) )
async def skyvern_find(
by: Annotated[
str,
Field(description="Locator type: role, text, label, placeholder, alt, testid"),
],
value: Annotated[
str,
Field(description="The text, role, label, placeholder, alt text, or test ID to match"),
],
session_id: Annotated[str | None, Field(description="Browser session ID (pbs_...)")] = None,
cdp_url: Annotated[str | None, Field(description="CDP WebSocket URL")] = None,
) -> dict[str, Any]:
"""Find elements using Playwright's semantic locator API.
Locates elements by accessibility role, visible text, label, placeholder, alt text, or test ID.
Returns the match count, first element's text content, and visibility status.
Use this to verify elements exist before interacting with them, or to inspect element state.
Locator types:
- role: ARIA role (button, link, heading, textbox, etc.)
- text: Visible text content
- label: Associated label text (for form inputs)
- placeholder: Placeholder attribute text
- alt: Alt text (for images)
- testid: data-testid attribute value
"""
try:
page, ctx = await get_page(session_id=session_id, cdp_url=cdp_url)
except BrowserNotAvailableError:
return make_result("skyvern_find", ok=False, error=no_browser_error())
with Timer() as timer:
try:
result = await do_find(page, by=by, value=value)
timer.mark("find")
except GuardError as e:
return make_result(
"skyvern_find",
ok=False,
browser_context=ctx,
timing_ms=timer.timing_ms,
error=make_error(ErrorCode.INVALID_INPUT, str(e), e.hint),
)
except Exception as e:
return make_result(
"skyvern_find",
ok=False,
browser_context=ctx,
timing_ms=timer.timing_ms,
error=make_error(ErrorCode.ACTION_FAILED, str(e), "Check the locator type and value"),
)
return make_result(
"skyvern_find",
browser_context=ctx,
data={
"selector": result.selector,
"count": result.count,
"first_text": result.first_text,
"first_visible": result.first_visible,
"sdk_equivalent": f"page.{result.selector}",
},
timing_ms=timer.timing_ms,
)

View file

@ -3,7 +3,9 @@ from __future__ import annotations
import asyncio import asyncio
import re import re
import time import time
from datetime import datetime, timedelta, timezone
from typing import Annotated, Any from typing import Annotated, Any
from urllib.parse import parse_qsl, urlparse
import structlog import structlog
from pydantic import Field from pydantic import Field
@ -32,6 +34,9 @@ _SECRET_QUERY_PARAMS = frozenset(
} }
) )
_REDACTED_HEADERS = frozenset({"authorization", "cookie", "set-cookie", "proxy-authorization"})
_SECRET_QS_NAMES = frozenset(p.lower() for p in _SECRET_QUERY_PARAMS)
_STATELESS_ERROR_MSG = ( _STATELESS_ERROR_MSG = (
"Inspection tools are not supported in stateless HTTP mode. " "Inspection tools are not supported in stateless HTTP mode. "
"Event buffers are not persisted across requests in this transport. " "Event buffers are not persisted across requests in this transport. "
@ -74,7 +79,7 @@ def _redact_url(url: str) -> str:
def _make_page_handlers(state: Any, raw_page: Any) -> dict[str, Any]: def _make_page_handlers(state: Any, raw_page: Any) -> dict[str, Any]:
"""Create console/network/dialog handlers bound to a specific page.""" """Create console/network/dialog/pageerror handlers bound to a specific page."""
def _on_console(msg: Any) -> None: def _on_console(msg: Any) -> None:
try: try:
@ -105,6 +110,10 @@ def _make_page_handlers(state: Any, raw_page: Any) -> dict[str, Any]:
pass pass
content_length = response.headers.get("content-length") content_length = response.headers.get("content-length")
try:
response_size = int(content_length) if content_length is not None else None
except (ValueError, TypeError):
response_size = None
state.network_requests.append( state.network_requests.append(
{ {
"url": _redact_url(response.url), "url": _redact_url(response.url),
@ -112,11 +121,72 @@ def _make_page_handlers(state: Any, raw_page: Any) -> dict[str, Any]:
"status": response.status, "status": response.status,
"content_type": response.headers.get("content-type", ""), "content_type": response.headers.get("content-type", ""),
"timing_ms": round(timing, 1), "timing_ms": round(timing, 1),
"response_size": int(content_length) if content_length is not None else None, "response_size": response_size,
"page_url": raw_page.url, "page_url": raw_page.url,
"tab_id": str(id(raw_page)), "tab_id": str(id(raw_page)),
} }
) )
# HAR recording: capture enhanced entry when enabled
if state.har_enabled:
req_headers = []
try:
for k, v in response.request.headers.items():
if k.lower() not in _REDACTED_HEADERS:
req_headers.append({"name": k, "value": v})
except Exception:
pass
resp_headers = []
try:
for k, v in response.headers.items():
if k.lower() not in _REDACTED_HEADERS:
resp_headers.append({"name": k, "value": v})
except Exception:
pass
# Approximate request start from response time minus elapsed
started = datetime.now(timezone.utc) - timedelta(milliseconds=timing)
qs = [
{"name": n, "value": "REDACTED" if n.lower() in _SECRET_QS_NAMES else v}
for n, v in parse_qsl(urlparse(response.url).query)
]
state._har_entries.append(
{
"startedDateTime": started.isoformat(),
"time": round(timing, 1),
"request": {
"method": response.request.method,
"url": _redact_url(response.url),
"httpVersion": "HTTP/1.1",
"headers": req_headers,
"queryString": qs,
"cookies": [],
"headersSize": -1,
"bodySize": -1,
},
"response": {
"status": response.status,
"statusText": response.status_text if hasattr(response, "status_text") else "",
"httpVersion": "HTTP/1.1",
"headers": resp_headers,
"content": {
"size": response_size if response_size is not None else -1,
"mimeType": response.headers.get("content-type", ""),
},
"redirectURL": "",
"headersSize": -1,
"bodySize": -1,
"cookies": [],
},
"timings": {
"send": -1,
"wait": round(timing, 1),
"receive": -1,
},
}
)
except Exception: except Exception:
pass pass
@ -146,7 +216,24 @@ def _make_page_handlers(state: Any, raw_page: Any) -> dict[str, Any]:
else: else:
event_record["action_taken"] = "dismissed" event_record["action_taken"] = "dismissed"
return {"console": _on_console, "response": _on_response, "dialog": _on_dialog} def _on_pageerror(error: Any) -> None:
try:
try:
message = str(error)
except Exception:
message = "<unserializable error>"
state.page_errors.append(
{
"message": message,
"timestamp": time.time(),
"page_url": raw_page.url,
"tab_id": str(id(raw_page)),
}
)
except Exception:
pass
return {"console": _on_console, "response": _on_response, "dialog": _on_dialog, "pageerror": _on_pageerror}
def _register_hooks_on_page(state: Any, raw_page: Any) -> None: def _register_hooks_on_page(state: Any, raw_page: Any) -> None:
@ -159,6 +246,7 @@ def _register_hooks_on_page(state: Any, raw_page: Any) -> None:
raw_page.on("console", handlers["console"]) raw_page.on("console", handlers["console"])
raw_page.on("response", handlers["response"]) raw_page.on("response", handlers["response"])
raw_page.on("dialog", handlers["dialog"]) raw_page.on("dialog", handlers["dialog"])
raw_page.on("pageerror", handlers["pageerror"])
state._hooked_page_ids.add(page_id) state._hooked_page_ids.add(page_id)
state._hooked_handlers_map[page_id] = handlers state._hooked_handlers_map[page_id] = handlers
@ -386,3 +474,306 @@ async def skyvern_handle_dialog(
"count": len(entries), "count": len(entries),
}, },
) )
# -- Page JS error tool --
async def skyvern_get_errors(
session_id: Annotated[str | None, Field(description="Browser session ID (pbs_...)")] = None,
cdp_url: Annotated[str | None, Field(description="CDP WebSocket URL")] = None,
text: Annotated[
str | None,
Field(description="Filter by substring match in error message. Case-insensitive."),
] = None,
clear: Annotated[
bool,
Field(description="Clear the buffer after reading. Default false."),
] = False,
) -> dict[str, Any]:
"""Read uncaught JavaScript errors (exceptions) from the browser page.
Captures unhandled errors thrown by page scripts (window onerror / unhandledrejection).
These are distinct from console.error() messages use skyvern_console_messages(level='error') for those.
Use text='...' to search for specific error messages.
"""
from skyvern.cli.core.session_manager import is_stateless_http_mode
if is_stateless_http_mode():
return make_result(
"skyvern_get_errors",
ok=False,
error=make_error(ErrorCode.ACTION_FAILED, _STATELESS_ERROR_MSG, _STATELESS_HINT),
)
try:
page, ctx = await get_page(session_id=session_id, cdp_url=cdp_url)
except BrowserNotAvailableError:
return make_result("skyvern_get_errors", ok=False, error=no_browser_error())
state = get_current_session()
has_filter = text is not None
entries = list(state.page_errors)
if text:
text_lower = text.lower()
entries = [e for e in entries if text_lower in e.get("message", "").lower()]
if clear:
if has_filter:
matched = {id(e) for e in entries}
state.page_errors = type(state.page_errors)(
(e for e in state.page_errors if id(e) not in matched),
maxlen=state.page_errors.maxlen,
)
else:
state.page_errors.clear()
return make_result(
"skyvern_get_errors",
browser_context=ctx,
data={
"errors": entries,
"count": len(entries),
"buffer_size": len(state.page_errors),
},
)
# -- HAR recording tools --
async def skyvern_har_start(
session_id: Annotated[str | None, Field(description="Browser session ID (pbs_...)")] = None,
cdp_url: Annotated[str | None, Field(description="CDP WebSocket URL")] = None,
) -> dict[str, Any]:
"""Start recording network traffic in HAR format.
All HTTP requests/responses will be captured until skyvern_har_stop is called.
The HAR buffer is cleared on start. Only one recording can be active at a time.
Use skyvern_har_stop to retrieve the HAR data.
"""
from skyvern.cli.core.session_manager import is_stateless_http_mode
if is_stateless_http_mode():
return make_result(
"skyvern_har_start",
ok=False,
error=make_error(ErrorCode.ACTION_FAILED, _STATELESS_ERROR_MSG, _STATELESS_HINT),
)
try:
_, ctx = await get_page(session_id=session_id, cdp_url=cdp_url)
except BrowserNotAvailableError:
return make_result("skyvern_har_start", ok=False, error=no_browser_error())
state = get_current_session()
if state.har_enabled:
return make_result(
"skyvern_har_start",
ok=False,
browser_context=ctx,
error=make_error(
ErrorCode.ACTION_FAILED,
"HAR recording is already active",
"Call skyvern_har_stop first to stop the current recording",
),
)
state._har_entries.clear()
state.har_enabled = True
return make_result(
"skyvern_har_start",
browser_context=ctx,
data={
"recording": True,
"message": "HAR recording started. Network traffic is being captured.",
},
)
async def skyvern_har_stop(
session_id: Annotated[str | None, Field(description="Browser session ID (pbs_...)")] = None,
cdp_url: Annotated[str | None, Field(description="CDP WebSocket URL")] = None,
) -> dict[str, Any]:
"""Stop HAR recording and return the captured traffic as HAR 1.2 JSON.
Returns a complete HAR archive with all HTTP requests/responses captured since skyvern_har_start.
The HAR data can be imported into browser DevTools, Charles Proxy, or other HTTP analysis tools.
"""
from skyvern.cli.core.session_manager import is_stateless_http_mode
if is_stateless_http_mode():
return make_result(
"skyvern_har_stop",
ok=False,
error=make_error(ErrorCode.ACTION_FAILED, _STATELESS_ERROR_MSG, _STATELESS_HINT),
)
try:
_, ctx = await get_page(session_id=session_id, cdp_url=cdp_url)
except BrowserNotAvailableError:
return make_result("skyvern_har_stop", ok=False, error=no_browser_error())
state = get_current_session()
if not state.har_enabled:
return make_result(
"skyvern_har_stop",
ok=False,
browser_context=ctx,
error=make_error(
ErrorCode.ACTION_FAILED,
"No active HAR recording",
"Call skyvern_har_start first to begin recording",
),
)
entries = list(state._har_entries)
state.har_enabled = False
state._har_entries.clear()
har = {
"log": {
"version": "1.2",
"creator": {"name": "Skyvern", "version": "1.0"},
"pages": [],
"entries": entries,
},
}
return make_result(
"skyvern_har_stop",
browser_context=ctx,
data={
"har": har,
"entry_count": len(entries),
},
)
# -- DOM inspection tools --
async def skyvern_get_html(
selector: Annotated[str, Field(description="CSS or XPath selector for the element.")],
outer: Annotated[
bool,
Field(description="If true, return outerHTML (includes the element itself). Default false (innerHTML)."),
] = False,
session_id: Annotated[str | None, Field(description="Browser session ID (pbs_...)")] = None,
cdp_url: Annotated[str | None, Field(description="CDP WebSocket URL")] = None,
) -> dict[str, Any]:
"""Get the HTML content of a DOM element.
Returns innerHTML by default (children only). Set outer=true for outerHTML (includes the element tag).
Useful for inspecting page structure, checking rendered content, or debugging element contents.
"""
from skyvern.cli.core.browser_ops import do_get_html
try:
page, ctx = await get_page(session_id=session_id, cdp_url=cdp_url)
except BrowserNotAvailableError:
return make_result("skyvern_get_html", ok=False, error=no_browser_error())
try:
html = await do_get_html(page, selector, outer=outer)
return make_result(
"skyvern_get_html",
browser_context=ctx,
data={
"html": html,
"selector": selector,
"outer": outer,
"length": len(html),
},
)
except Exception as e:
return make_result(
"skyvern_get_html",
ok=False,
browser_context=ctx,
error=make_error(ErrorCode.ACTION_FAILED, str(e), "Check that the selector matches an element on the page"),
)
async def skyvern_get_value(
selector: Annotated[str, Field(description="CSS or XPath selector for the input element.")],
session_id: Annotated[str | None, Field(description="Browser session ID (pbs_...)")] = None,
cdp_url: Annotated[str | None, Field(description="CDP WebSocket URL")] = None,
) -> dict[str, Any]:
"""Get the current value of a form input element.
Works with <input>, <textarea>, and <select> elements.
Returns the current value (what the user typed or selected), not the placeholder or label.
"""
from skyvern.cli.core.browser_ops import do_get_value
try:
page, ctx = await get_page(session_id=session_id, cdp_url=cdp_url)
except BrowserNotAvailableError:
return make_result("skyvern_get_value", ok=False, error=no_browser_error())
try:
value = await do_get_value(page, selector)
return make_result(
"skyvern_get_value",
browser_context=ctx,
data={
"value": value,
"selector": selector,
},
)
except Exception as e:
return make_result(
"skyvern_get_value",
ok=False,
browser_context=ctx,
error=make_error(
ErrorCode.ACTION_FAILED, str(e), "Check that the selector matches an input/textarea/select element"
),
)
async def skyvern_get_styles(
selector: Annotated[str, Field(description="CSS or XPath selector for the element.")],
properties: Annotated[
list[str] | None,
Field(description="Specific CSS properties to retrieve (e.g. ['color', 'font-size']). Omit for all (max 100)."),
] = None,
session_id: Annotated[str | None, Field(description="Browser session ID (pbs_...)")] = None,
cdp_url: Annotated[str | None, Field(description="CDP WebSocket URL")] = None,
) -> dict[str, Any]:
"""Get computed CSS styles from a DOM element.
Returns the browser's computed style values (after CSS cascade + inheritance).
Specify properties for targeted lookup, or omit to get the first 100 computed properties.
Useful for verifying visual styling, checking visibility, or debugging layout issues.
"""
from skyvern.cli.core.browser_ops import do_get_styles
try:
page, ctx = await get_page(session_id=session_id, cdp_url=cdp_url)
except BrowserNotAvailableError:
return make_result("skyvern_get_styles", ok=False, error=no_browser_error())
try:
styles = await do_get_styles(page, selector, properties=properties)
return make_result(
"skyvern_get_styles",
browser_context=ctx,
data={
"styles": styles,
"selector": selector,
"count": len(styles),
},
)
except Exception as e:
return make_result(
"skyvern_get_styles",
ok=False,
browser_context=ctx,
error=make_error(ErrorCode.ACTION_FAILED, str(e), "Check that the selector matches an element on the page"),
)

View file

@ -0,0 +1,182 @@
"""MCP tools for browser auth state persistence (save/load).
Save and restore cookies, localStorage, and sessionStorage across sessions.
"""
from __future__ import annotations
import json
from pathlib import Path
from typing import Annotated, Any
from urllib.parse import urlparse
import structlog
from pydantic import Field
from ._common import ErrorCode, Timer, make_error, make_result
from ._session import BrowserNotAvailableError, get_current_session, get_page, no_browser_error
LOG = structlog.get_logger(__name__)
def _validate_state_path(file_path: str, *, must_exist: bool = False) -> Path:
"""Validate and resolve state file path. Prevents path traversal.
Restricts paths to the current working directory or ~/.skyvern/.
Rejects symlinks to prevent TOCTOU attacks.
"""
raw = Path(file_path)
if raw.is_symlink():
raise ValueError(f"Symlinks not allowed for state files: {raw}")
resolved = raw.resolve()
allowed_roots = [Path.cwd().resolve(), (Path.home() / ".skyvern").resolve()]
if not any(resolved == root or str(resolved).startswith(str(root) + "/") for root in allowed_roots):
raise ValueError(f"State file must be under working directory or ~/.skyvern/: {resolved}")
if must_exist and not resolved.exists():
raise FileNotFoundError(f"State file not found: {resolved}")
if resolved.suffix not in (".json", ""):
raise ValueError(f"State file must have .json extension or no extension: {resolved}")
return resolved
async def skyvern_state_save(
file_path: Annotated[
str,
Field(description="Path to save state file (JSON). Must be under cwd or ~/.skyvern/."),
],
session_id: Annotated[str | None, Field(description="Browser session ID (pbs_...).")] = None,
cdp_url: Annotated[str | None, Field(description="CDP WebSocket URL.")] = None,
) -> dict[str, Any]:
"""Save browser auth state (cookies + localStorage + sessionStorage) to a JSON file.
Use this to persist login sessions so you can restore them later with state_load,
avoiding repeated login flows. The state file is saved on the MCP server's local disk.
Common workflow: log in state_save close session create new session navigate state_load.
"""
try:
page, ctx = await get_page(session_id=session_id, cdp_url=cdp_url)
except BrowserNotAvailableError:
return make_result("state_save", ok=False, error=no_browser_error())
with Timer() as timer:
try:
resolved = _validate_state_path(file_path)
resolved.parent.mkdir(parents=True, exist_ok=True)
session = get_current_session()
browser = session.browser
if browser is None:
return make_result(
"state_save",
ok=False,
browser_context=ctx,
error=make_error(ErrorCode.NO_ACTIVE_BROWSER, "No browser available", "Create a session first"),
)
from skyvern.cli.core.browser_ops import do_state_save
result = await do_state_save(page.page, browser, resolved)
timer.mark("sdk")
return make_result(
"state_save",
browser_context=ctx,
data={
"file_path": result.file_path,
"cookie_count": result.cookie_count,
"local_storage_count": result.local_storage_count,
"session_storage_count": result.session_storage_count,
"url": result.url,
},
timing_ms=timer.timing_ms,
)
except (ValueError, OSError) as e:
return make_result(
"state_save",
ok=False,
browser_context=ctx,
timing_ms=timer.timing_ms,
error=make_error(ErrorCode.ACTION_FAILED, str(e), "Check file path is valid and writable"),
)
except Exception as e:
LOG.exception("state_save failed", error=str(e))
return make_result(
"state_save",
ok=False,
browser_context=ctx,
timing_ms=timer.timing_ms,
error=make_error(ErrorCode.ACTION_FAILED, str(e), "Unexpected error during state save"),
)
async def skyvern_state_load(
file_path: Annotated[
str,
Field(description="Path to state file (JSON) previously created by state_save."),
],
session_id: Annotated[str | None, Field(description="Browser session ID (pbs_...).")] = None,
cdp_url: Annotated[str | None, Field(description="CDP WebSocket URL.")] = None,
) -> dict[str, Any]:
"""Load browser auth state (cookies + localStorage + sessionStorage) from a JSON file.
Restores a previously saved login session. Cookies are filtered to only apply those
matching the current page's domain (prevents cross-domain injection).
Navigate to the target site BEFORE calling state_load so cookie domain filtering works correctly.
Common workflow: create session navigate to site state_load refresh page you're logged in.
"""
try:
page, ctx = await get_page(session_id=session_id, cdp_url=cdp_url)
except BrowserNotAvailableError:
return make_result("state_load", ok=False, error=no_browser_error())
with Timer() as timer:
try:
resolved = _validate_state_path(file_path, must_exist=True)
session = get_current_session()
browser = session.browser
if browser is None:
return make_result(
"state_load",
ok=False,
browser_context=ctx,
error=make_error(ErrorCode.NO_ACTIVE_BROWSER, "No browser available", "Create a session first"),
)
from skyvern.cli.core.browser_ops import do_state_load
current_domain = urlparse(page.page.url).hostname or ""
result = await do_state_load(page.page, browser, resolved, current_domain)
timer.mark("sdk")
return make_result(
"state_load",
browser_context=ctx,
data={
"cookie_count": result.cookie_count,
"local_storage_count": result.local_storage_count,
"session_storage_count": result.session_storage_count,
"source_url": result.source_url,
"skipped_cookies": result.skipped_cookies,
},
timing_ms=timer.timing_ms,
)
except (ValueError, FileNotFoundError, json.JSONDecodeError) as e:
return make_result(
"state_load",
ok=False,
browser_context=ctx,
timing_ms=timer.timing_ms,
error=make_error(ErrorCode.ACTION_FAILED, str(e), "Check file path and file format"),
)
except Exception as e:
LOG.exception("state_load failed", error=str(e))
return make_result(
"state_load",
ok=False,
browser_context=ctx,
timing_ms=timer.timing_ms,
error=make_error(ErrorCode.ACTION_FAILED, str(e), "Unexpected error during state load"),
)

View file

@ -0,0 +1,168 @@
"""MCP tools for web storage management (sessionStorage + localStorage clear).
Inline pattern trivial page.evaluate wrappers, no do_* functions.
"""
from __future__ import annotations
import json
from typing import Annotated, Any
import structlog
from pydantic import Field
from ._common import ErrorCode, Timer, make_error, make_result
from ._session import BrowserNotAvailableError, get_page, no_browser_error
LOG = structlog.get_logger(__name__)
async def skyvern_get_session_storage(
keys: Annotated[list[str] | None, Field(description="Specific keys to retrieve. Omit to get all.")] = None,
session_id: Annotated[str | None, Field(description="Browser session ID (pbs_...).")] = None,
cdp_url: Annotated[str | None, Field(description="CDP WebSocket URL.")] = None,
) -> dict[str, Any]:
"""Read sessionStorage values from the current page.
Returns all key-value pairs, or specific keys if provided.
Useful for reading auth tokens, user preferences, or temporary state stored by web apps.
"""
try:
page, ctx = await get_page(session_id=session_id, cdp_url=cdp_url)
except BrowserNotAvailableError:
return make_result("get_session_storage", ok=False, error=no_browser_error())
with Timer() as timer:
try:
if keys:
items = {}
for key in keys:
val = await page.evaluate(f"() => window.sessionStorage.getItem({json.dumps(key)})")
if val is not None:
items[key] = val
else:
items = await page.evaluate("() => Object.fromEntries(Object.entries(window.sessionStorage))")
timer.mark("sdk")
return make_result(
"get_session_storage",
browser_context=ctx,
data={"items": items, "count": len(items)},
timing_ms=timer.timing_ms,
)
except Exception as e:
return make_result(
"get_session_storage",
ok=False,
browser_context=ctx,
timing_ms=timer.timing_ms,
error=make_error(ErrorCode.ACTION_FAILED, str(e), "Check page has loaded"),
)
async def skyvern_set_session_storage(
key: Annotated[str, Field(description="The key to set.")],
value: Annotated[str, Field(description="The value to store.")],
session_id: Annotated[str | None, Field(description="Browser session ID (pbs_...).")] = None,
cdp_url: Annotated[str | None, Field(description="CDP WebSocket URL.")] = None,
) -> dict[str, Any]:
"""Set a sessionStorage key-value pair on the current page.
sessionStorage persists only for the current tab/session and is cleared when the tab closes.
"""
try:
page, ctx = await get_page(session_id=session_id, cdp_url=cdp_url)
except BrowserNotAvailableError:
return make_result("set_session_storage", ok=False, error=no_browser_error())
with Timer() as timer:
try:
await page.evaluate(
"(args) => window.sessionStorage.setItem(args[0], args[1])",
[key, value],
)
timer.mark("sdk")
return make_result(
"set_session_storage",
browser_context=ctx,
data={"key": key, "value_length": len(value)},
timing_ms=timer.timing_ms,
)
except Exception as e:
return make_result(
"set_session_storage",
ok=False,
browser_context=ctx,
timing_ms=timer.timing_ms,
error=make_error(ErrorCode.ACTION_FAILED, str(e), "Check page has loaded"),
)
async def skyvern_clear_session_storage(
session_id: Annotated[str | None, Field(description="Browser session ID (pbs_...).")] = None,
cdp_url: Annotated[str | None, Field(description="CDP WebSocket URL.")] = None,
) -> dict[str, Any]:
"""Clear all sessionStorage entries on the current page.
This removes all key-value pairs from sessionStorage. Cannot be undone.
"""
try:
page, ctx = await get_page(session_id=session_id, cdp_url=cdp_url)
except BrowserNotAvailableError:
return make_result("clear_session_storage", ok=False, error=no_browser_error())
with Timer() as timer:
try:
count = await page.evaluate(
"() => { const n = window.sessionStorage.length; window.sessionStorage.clear(); return n; }"
)
timer.mark("sdk")
return make_result(
"clear_session_storage",
browser_context=ctx,
data={"cleared_count": count},
timing_ms=timer.timing_ms,
)
except Exception as e:
return make_result(
"clear_session_storage",
ok=False,
browser_context=ctx,
timing_ms=timer.timing_ms,
error=make_error(ErrorCode.ACTION_FAILED, str(e), "Check page has loaded"),
)
async def skyvern_clear_local_storage(
session_id: Annotated[str | None, Field(description="Browser session ID (pbs_...).")] = None,
cdp_url: Annotated[str | None, Field(description="CDP WebSocket URL.")] = None,
) -> dict[str, Any]:
"""Clear all localStorage entries on the current page.
This removes all key-value pairs from localStorage. Cannot be undone.
Use with caution localStorage often contains login tokens and user preferences.
"""
try:
page, ctx = await get_page(session_id=session_id, cdp_url=cdp_url)
except BrowserNotAvailableError:
return make_result("clear_local_storage", ok=False, error=no_browser_error())
with Timer() as timer:
try:
count = await page.evaluate(
"() => { const n = window.localStorage.length; window.localStorage.clear(); return n; }"
)
timer.mark("sdk")
return make_result(
"clear_local_storage",
browser_context=ctx,
data={"cleared_count": count},
timing_ms=timer.timing_ms,
)
except Exception as e:
return make_result(
"clear_local_storage",
ok=False,
browser_context=ctx,
timing_ms=timer.timing_ms,
error=make_error(ErrorCode.ACTION_FAILED, str(e), "Check page has loaded"),
)

View file

@ -34,7 +34,6 @@ if typing.TYPE_CHECKING:
AzureSecretParameter, AzureSecretParameter,
AzureVaultCredentialParameter, AzureVaultCredentialParameter,
AzureVaultCredentialParameterYaml, AzureVaultCredentialParameterYaml,
BillingStateResponse,
BitwardenCreditCardDataParameter, BitwardenCreditCardDataParameter,
BitwardenCreditCardDataParameterYaml, BitwardenCreditCardDataParameterYaml,
BitwardenLoginCredentialParameter, BitwardenLoginCredentialParameter,
@ -51,8 +50,6 @@ if typing.TYPE_CHECKING:
BranchCriteriaYamlCriteriaType, BranchCriteriaYamlCriteriaType,
BrowserProfile, BrowserProfile,
BrowserSessionResponse, BrowserSessionResponse,
ChangeTierResponse,
CheckoutSessionResponse,
ClickAction, ClickAction,
ClickActionData, ClickActionData,
ClickContext, ClickContext,
@ -95,6 +92,7 @@ if typing.TYPE_CHECKING:
CredentialResponse, CredentialResponse,
CredentialResponseCredential, CredentialResponseCredential,
CredentialTypeOutput, CredentialTypeOutput,
CredentialVaultType,
CreditCardCredentialResponse, CreditCardCredentialResponse,
DownloadToS3Block, DownloadToS3Block,
DownloadToS3BlockYaml, DownloadToS3BlockYaml,
@ -286,8 +284,6 @@ if typing.TYPE_CHECKING:
NonEmptyPasswordCredential, NonEmptyPasswordCredential,
OnePasswordCredentialParameter, OnePasswordCredentialParameter,
OnePasswordCredentialParameterYaml, OnePasswordCredentialParameterYaml,
OrganizationScheduleItem,
OrganizationScheduleListResponse,
OtpType, OtpType,
OutputParameter, OutputParameter,
OutputParameterYaml, OutputParameterYaml,
@ -296,8 +292,6 @@ if typing.TYPE_CHECKING:
PdfParserBlock, PdfParserBlock,
PdfParserBlockYaml, PdfParserBlockYaml,
PersistentBrowserType, PersistentBrowserType,
PlanTier,
PortalSessionResponse,
PrintPageBlock, PrintPageBlock,
PrintPageBlockParametersItem, PrintPageBlockParametersItem,
PrintPageBlockParametersItem_AwsSecret, PrintPageBlockParametersItem_AwsSecret,
@ -357,6 +351,7 @@ if typing.TYPE_CHECKING:
TaskBlockParametersItem_Workflow, TaskBlockParametersItem_Workflow,
TaskBlockYaml, TaskBlockYaml,
TaskBlockYamlDataSchema, TaskBlockYamlDataSchema,
TaskRunListItem,
TaskRunRequest, TaskRunRequest,
TaskRunRequestDataExtractionSchema, TaskRunRequestDataExtractionSchema,
TaskRunRequestProxyLocation, TaskRunRequestProxyLocation,
@ -535,10 +530,6 @@ if typing.TYPE_CHECKING:
WorkflowRunTimeline, WorkflowRunTimeline,
WorkflowRunTimelineType, WorkflowRunTimelineType,
WorkflowRunTriggerType, WorkflowRunTriggerType,
WorkflowSchedule,
WorkflowScheduleListResponse,
WorkflowScheduleResponse,
WorkflowScheduleUpsertRequest,
WorkflowStatus, WorkflowStatus,
WorkflowTriggerBlock, WorkflowTriggerBlock,
WorkflowTriggerBlockParametersItem, WorkflowTriggerBlockParametersItem,
@ -556,8 +547,7 @@ if typing.TYPE_CHECKING:
WorkflowTriggerBlockYaml, WorkflowTriggerBlockYaml,
) )
from .errors import BadRequestError, ConflictError, ForbiddenError, NotFoundError, UnprocessableEntityError from .errors import BadRequestError, ConflictError, ForbiddenError, NotFoundError, UnprocessableEntityError
from . import agent, artifacts, scripts from . import artifacts, scripts
from .agent import ListOrganizationSchedulesApiV1SchedulesGetRequestStatus
from .client import AsyncSkyvern, Skyvern from .client import AsyncSkyvern, Skyvern
from .environment import SkyvernEnvironment from .environment import SkyvernEnvironment
from .version import __version__ from .version import __version__
@ -591,7 +581,6 @@ _dynamic_imports: typing.Dict[str, str] = {
"AzureVaultCredentialParameter": ".types", "AzureVaultCredentialParameter": ".types",
"AzureVaultCredentialParameterYaml": ".types", "AzureVaultCredentialParameterYaml": ".types",
"BadRequestError": ".errors", "BadRequestError": ".errors",
"BillingStateResponse": ".types",
"BitwardenCreditCardDataParameter": ".types", "BitwardenCreditCardDataParameter": ".types",
"BitwardenCreditCardDataParameterYaml": ".types", "BitwardenCreditCardDataParameterYaml": ".types",
"BitwardenLoginCredentialParameter": ".types", "BitwardenLoginCredentialParameter": ".types",
@ -608,8 +597,6 @@ _dynamic_imports: typing.Dict[str, str] = {
"BranchCriteriaYamlCriteriaType": ".types", "BranchCriteriaYamlCriteriaType": ".types",
"BrowserProfile": ".types", "BrowserProfile": ".types",
"BrowserSessionResponse": ".types", "BrowserSessionResponse": ".types",
"ChangeTierResponse": ".types",
"CheckoutSessionResponse": ".types",
"ClickAction": ".types", "ClickAction": ".types",
"ClickActionData": ".types", "ClickActionData": ".types",
"ClickContext": ".types", "ClickContext": ".types",
@ -653,6 +640,7 @@ _dynamic_imports: typing.Dict[str, str] = {
"CredentialResponse": ".types", "CredentialResponse": ".types",
"CredentialResponseCredential": ".types", "CredentialResponseCredential": ".types",
"CredentialTypeOutput": ".types", "CredentialTypeOutput": ".types",
"CredentialVaultType": ".types",
"CreditCardCredentialResponse": ".types", "CreditCardCredentialResponse": ".types",
"DownloadToS3Block": ".types", "DownloadToS3Block": ".types",
"DownloadToS3BlockYaml": ".types", "DownloadToS3BlockYaml": ".types",
@ -810,7 +798,6 @@ _dynamic_imports: typing.Dict[str, str] = {
"InputTextAction": ".types", "InputTextAction": ".types",
"InputTextActionData": ".types", "InputTextActionData": ".types",
"JinjaBranchCriteria": ".types", "JinjaBranchCriteria": ".types",
"ListOrganizationSchedulesApiV1SchedulesGetRequestStatus": ".agent",
"LocateElementAction": ".types", "LocateElementAction": ".types",
"LoginBlock": ".types", "LoginBlock": ".types",
"LoginBlockDataSchema": ".types", "LoginBlockDataSchema": ".types",
@ -847,8 +834,6 @@ _dynamic_imports: typing.Dict[str, str] = {
"NotFoundError": ".errors", "NotFoundError": ".errors",
"OnePasswordCredentialParameter": ".types", "OnePasswordCredentialParameter": ".types",
"OnePasswordCredentialParameterYaml": ".types", "OnePasswordCredentialParameterYaml": ".types",
"OrganizationScheduleItem": ".types",
"OrganizationScheduleListResponse": ".types",
"OtpType": ".types", "OtpType": ".types",
"OutputParameter": ".types", "OutputParameter": ".types",
"OutputParameterYaml": ".types", "OutputParameterYaml": ".types",
@ -857,8 +842,6 @@ _dynamic_imports: typing.Dict[str, str] = {
"PdfParserBlock": ".types", "PdfParserBlock": ".types",
"PdfParserBlockYaml": ".types", "PdfParserBlockYaml": ".types",
"PersistentBrowserType": ".types", "PersistentBrowserType": ".types",
"PlanTier": ".types",
"PortalSessionResponse": ".types",
"PrintPageBlock": ".types", "PrintPageBlock": ".types",
"PrintPageBlockParametersItem": ".types", "PrintPageBlockParametersItem": ".types",
"PrintPageBlockParametersItem_AwsSecret": ".types", "PrintPageBlockParametersItem_AwsSecret": ".types",
@ -920,6 +903,7 @@ _dynamic_imports: typing.Dict[str, str] = {
"TaskBlockParametersItem_Workflow": ".types", "TaskBlockParametersItem_Workflow": ".types",
"TaskBlockYaml": ".types", "TaskBlockYaml": ".types",
"TaskBlockYamlDataSchema": ".types", "TaskBlockYamlDataSchema": ".types",
"TaskRunListItem": ".types",
"TaskRunRequest": ".types", "TaskRunRequest": ".types",
"TaskRunRequestDataExtractionSchema": ".types", "TaskRunRequestDataExtractionSchema": ".types",
"TaskRunRequestProxyLocation": ".types", "TaskRunRequestProxyLocation": ".types",
@ -1099,10 +1083,6 @@ _dynamic_imports: typing.Dict[str, str] = {
"WorkflowRunTimeline": ".types", "WorkflowRunTimeline": ".types",
"WorkflowRunTimelineType": ".types", "WorkflowRunTimelineType": ".types",
"WorkflowRunTriggerType": ".types", "WorkflowRunTriggerType": ".types",
"WorkflowSchedule": ".types",
"WorkflowScheduleListResponse": ".types",
"WorkflowScheduleResponse": ".types",
"WorkflowScheduleUpsertRequest": ".types",
"WorkflowStatus": ".types", "WorkflowStatus": ".types",
"WorkflowTriggerBlock": ".types", "WorkflowTriggerBlock": ".types",
"WorkflowTriggerBlockParametersItem": ".types", "WorkflowTriggerBlockParametersItem": ".types",
@ -1119,7 +1099,6 @@ _dynamic_imports: typing.Dict[str, str] = {
"WorkflowTriggerBlockParametersItem_Workflow": ".types", "WorkflowTriggerBlockParametersItem_Workflow": ".types",
"WorkflowTriggerBlockYaml": ".types", "WorkflowTriggerBlockYaml": ".types",
"__version__": ".version", "__version__": ".version",
"agent": ".agent",
"artifacts": ".artifacts", "artifacts": ".artifacts",
"scripts": ".scripts", "scripts": ".scripts",
} }
@ -1176,7 +1155,6 @@ __all__ = [
"AzureVaultCredentialParameter", "AzureVaultCredentialParameter",
"AzureVaultCredentialParameterYaml", "AzureVaultCredentialParameterYaml",
"BadRequestError", "BadRequestError",
"BillingStateResponse",
"BitwardenCreditCardDataParameter", "BitwardenCreditCardDataParameter",
"BitwardenCreditCardDataParameterYaml", "BitwardenCreditCardDataParameterYaml",
"BitwardenLoginCredentialParameter", "BitwardenLoginCredentialParameter",
@ -1193,8 +1171,6 @@ __all__ = [
"BranchCriteriaYamlCriteriaType", "BranchCriteriaYamlCriteriaType",
"BrowserProfile", "BrowserProfile",
"BrowserSessionResponse", "BrowserSessionResponse",
"ChangeTierResponse",
"CheckoutSessionResponse",
"ClickAction", "ClickAction",
"ClickActionData", "ClickActionData",
"ClickContext", "ClickContext",
@ -1238,6 +1214,7 @@ __all__ = [
"CredentialResponse", "CredentialResponse",
"CredentialResponseCredential", "CredentialResponseCredential",
"CredentialTypeOutput", "CredentialTypeOutput",
"CredentialVaultType",
"CreditCardCredentialResponse", "CreditCardCredentialResponse",
"DownloadToS3Block", "DownloadToS3Block",
"DownloadToS3BlockYaml", "DownloadToS3BlockYaml",
@ -1395,7 +1372,6 @@ __all__ = [
"InputTextAction", "InputTextAction",
"InputTextActionData", "InputTextActionData",
"JinjaBranchCriteria", "JinjaBranchCriteria",
"ListOrganizationSchedulesApiV1SchedulesGetRequestStatus",
"LocateElementAction", "LocateElementAction",
"LoginBlock", "LoginBlock",
"LoginBlockDataSchema", "LoginBlockDataSchema",
@ -1432,8 +1408,6 @@ __all__ = [
"NotFoundError", "NotFoundError",
"OnePasswordCredentialParameter", "OnePasswordCredentialParameter",
"OnePasswordCredentialParameterYaml", "OnePasswordCredentialParameterYaml",
"OrganizationScheduleItem",
"OrganizationScheduleListResponse",
"OtpType", "OtpType",
"OutputParameter", "OutputParameter",
"OutputParameterYaml", "OutputParameterYaml",
@ -1442,8 +1416,6 @@ __all__ = [
"PdfParserBlock", "PdfParserBlock",
"PdfParserBlockYaml", "PdfParserBlockYaml",
"PersistentBrowserType", "PersistentBrowserType",
"PlanTier",
"PortalSessionResponse",
"PrintPageBlock", "PrintPageBlock",
"PrintPageBlockParametersItem", "PrintPageBlockParametersItem",
"PrintPageBlockParametersItem_AwsSecret", "PrintPageBlockParametersItem_AwsSecret",
@ -1505,6 +1477,7 @@ __all__ = [
"TaskBlockParametersItem_Workflow", "TaskBlockParametersItem_Workflow",
"TaskBlockYaml", "TaskBlockYaml",
"TaskBlockYamlDataSchema", "TaskBlockYamlDataSchema",
"TaskRunListItem",
"TaskRunRequest", "TaskRunRequest",
"TaskRunRequestDataExtractionSchema", "TaskRunRequestDataExtractionSchema",
"TaskRunRequestProxyLocation", "TaskRunRequestProxyLocation",
@ -1684,10 +1657,6 @@ __all__ = [
"WorkflowRunTimeline", "WorkflowRunTimeline",
"WorkflowRunTimelineType", "WorkflowRunTimelineType",
"WorkflowRunTriggerType", "WorkflowRunTriggerType",
"WorkflowSchedule",
"WorkflowScheduleListResponse",
"WorkflowScheduleResponse",
"WorkflowScheduleUpsertRequest",
"WorkflowStatus", "WorkflowStatus",
"WorkflowTriggerBlock", "WorkflowTriggerBlock",
"WorkflowTriggerBlockParametersItem", "WorkflowTriggerBlockParametersItem",
@ -1704,7 +1673,6 @@ __all__ = [
"WorkflowTriggerBlockParametersItem_Workflow", "WorkflowTriggerBlockParametersItem_Workflow",
"WorkflowTriggerBlockYaml", "WorkflowTriggerBlockYaml",
"__version__", "__version__",
"agent",
"artifacts", "artifacts",
"scripts", "scripts",
] ]

View file

@ -1,34 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
# isort: skip_file
import typing
from importlib import import_module
if typing.TYPE_CHECKING:
from .types import ListOrganizationSchedulesApiV1SchedulesGetRequestStatus
_dynamic_imports: typing.Dict[str, str] = {"ListOrganizationSchedulesApiV1SchedulesGetRequestStatus": ".types"}
def __getattr__(attr_name: str) -> typing.Any:
module_name = _dynamic_imports.get(attr_name)
if module_name is None:
raise AttributeError(f"No {attr_name} found in _dynamic_imports for module name -> {__name__}")
try:
module = import_module(module_name, __package__)
if module_name == f".{attr_name}":
return module
else:
return getattr(module, attr_name)
except ImportError as e:
raise ImportError(f"Failed to import {attr_name} from {module_name}: {e}") from e
except AttributeError as e:
raise AttributeError(f"Failed to get {attr_name} from {module_name}: {e}") from e
def __dir__():
lazy_attrs = list(_dynamic_imports.keys())
return sorted(lazy_attrs)
__all__ = ["ListOrganizationSchedulesApiV1SchedulesGetRequestStatus"]

View file

@ -1,842 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import typing
from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
from ..core.request_options import RequestOptions
from ..types.organization_schedule_list_response import OrganizationScheduleListResponse
from ..types.workflow_schedule_list_response import WorkflowScheduleListResponse
from ..types.workflow_schedule_response import WorkflowScheduleResponse
from .raw_client import AsyncRawAgentClient, RawAgentClient
from .types.list_organization_schedules_api_v1schedules_get_request_status import (
ListOrganizationSchedulesApiV1SchedulesGetRequestStatus,
)
# this is used as the default value for optional parameters
OMIT = typing.cast(typing.Any, ...)
class AgentClient:
def __init__(self, *, client_wrapper: SyncClientWrapper):
self._raw_client = RawAgentClient(client_wrapper=client_wrapper)
@property
def with_raw_response(self) -> RawAgentClient:
"""
Retrieves a raw implementation of this client that returns raw responses.
Returns
-------
RawAgentClient
"""
return self._raw_client
def list_organization_schedules(
self,
*,
page: typing.Optional[int] = None,
page_size: typing.Optional[int] = None,
status: typing.Optional[ListOrganizationSchedulesApiV1SchedulesGetRequestStatus] = None,
search: typing.Optional[str] = None,
request_options: typing.Optional[RequestOptions] = None,
) -> OrganizationScheduleListResponse:
"""
Parameters
----------
page : typing.Optional[int]
page_size : typing.Optional[int]
status : typing.Optional[ListOrganizationSchedulesApiV1SchedulesGetRequestStatus]
Filter by status: 'active' or 'paused'
search : typing.Optional[str]
Search by workflow title or schedule name
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
OrganizationScheduleListResponse
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.agent.list_organization_schedules(
page=1,
page_size=1,
status="active",
search="search",
)
"""
_response = self._raw_client.list_organization_schedules(
page=page, page_size=page_size, status=status, search=search, request_options=request_options
)
return _response.data
def list_workflow_schedules(
self, workflow_permanent_id: str, *, request_options: typing.Optional[RequestOptions] = None
) -> WorkflowScheduleListResponse:
"""
Parameters
----------
workflow_permanent_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
WorkflowScheduleListResponse
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.agent.list_workflow_schedules(
workflow_permanent_id="workflow_permanent_id",
)
"""
_response = self._raw_client.list_workflow_schedules(workflow_permanent_id, request_options=request_options)
return _response.data
def create_workflow_schedule(
self,
workflow_permanent_id: str,
*,
cron_expression: str,
timezone: str,
enabled: typing.Optional[bool] = OMIT,
parameters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
name: typing.Optional[str] = OMIT,
description: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> WorkflowScheduleResponse:
"""
Parameters
----------
workflow_permanent_id : str
cron_expression : str
timezone : str
enabled : typing.Optional[bool]
parameters : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
name : typing.Optional[str]
description : typing.Optional[str]
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
WorkflowScheduleResponse
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.agent.create_workflow_schedule(
workflow_permanent_id="workflow_permanent_id",
cron_expression="cron_expression",
timezone="timezone",
)
"""
_response = self._raw_client.create_workflow_schedule(
workflow_permanent_id,
cron_expression=cron_expression,
timezone=timezone,
enabled=enabled,
parameters=parameters,
name=name,
description=description,
request_options=request_options,
)
return _response.data
def get_workflow_schedule(
self,
workflow_permanent_id: str,
workflow_schedule_id: str,
*,
request_options: typing.Optional[RequestOptions] = None,
) -> WorkflowScheduleResponse:
"""
Parameters
----------
workflow_permanent_id : str
workflow_schedule_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
WorkflowScheduleResponse
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.agent.get_workflow_schedule(
workflow_permanent_id="workflow_permanent_id",
workflow_schedule_id="workflow_schedule_id",
)
"""
_response = self._raw_client.get_workflow_schedule(
workflow_permanent_id, workflow_schedule_id, request_options=request_options
)
return _response.data
def update_workflow_schedule(
self,
workflow_permanent_id: str,
workflow_schedule_id: str,
*,
cron_expression: str,
timezone: str,
enabled: typing.Optional[bool] = OMIT,
parameters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
name: typing.Optional[str] = OMIT,
description: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> WorkflowScheduleResponse:
"""
Parameters
----------
workflow_permanent_id : str
workflow_schedule_id : str
cron_expression : str
timezone : str
enabled : typing.Optional[bool]
parameters : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
name : typing.Optional[str]
description : typing.Optional[str]
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
WorkflowScheduleResponse
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.agent.update_workflow_schedule(
workflow_permanent_id="workflow_permanent_id",
workflow_schedule_id="workflow_schedule_id",
cron_expression="cron_expression",
timezone="timezone",
)
"""
_response = self._raw_client.update_workflow_schedule(
workflow_permanent_id,
workflow_schedule_id,
cron_expression=cron_expression,
timezone=timezone,
enabled=enabled,
parameters=parameters,
name=name,
description=description,
request_options=request_options,
)
return _response.data
def delete_workflow_schedule_route(
self,
workflow_permanent_id: str,
workflow_schedule_id: str,
*,
request_options: typing.Optional[RequestOptions] = None,
) -> typing.Dict[str, bool]:
"""
Parameters
----------
workflow_permanent_id : str
workflow_schedule_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
typing.Dict[str, bool]
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.agent.delete_workflow_schedule_route(
workflow_permanent_id="workflow_permanent_id",
workflow_schedule_id="workflow_schedule_id",
)
"""
_response = self._raw_client.delete_workflow_schedule_route(
workflow_permanent_id, workflow_schedule_id, request_options=request_options
)
return _response.data
def enable_workflow_schedule(
self,
workflow_permanent_id: str,
workflow_schedule_id: str,
*,
request_options: typing.Optional[RequestOptions] = None,
) -> WorkflowScheduleResponse:
"""
Parameters
----------
workflow_permanent_id : str
workflow_schedule_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
WorkflowScheduleResponse
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.agent.enable_workflow_schedule(
workflow_permanent_id="workflow_permanent_id",
workflow_schedule_id="workflow_schedule_id",
)
"""
_response = self._raw_client.enable_workflow_schedule(
workflow_permanent_id, workflow_schedule_id, request_options=request_options
)
return _response.data
def disable_workflow_schedule(
self,
workflow_permanent_id: str,
workflow_schedule_id: str,
*,
request_options: typing.Optional[RequestOptions] = None,
) -> WorkflowScheduleResponse:
"""
Parameters
----------
workflow_permanent_id : str
workflow_schedule_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
WorkflowScheduleResponse
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.agent.disable_workflow_schedule(
workflow_permanent_id="workflow_permanent_id",
workflow_schedule_id="workflow_schedule_id",
)
"""
_response = self._raw_client.disable_workflow_schedule(
workflow_permanent_id, workflow_schedule_id, request_options=request_options
)
return _response.data
class AsyncAgentClient:
def __init__(self, *, client_wrapper: AsyncClientWrapper):
self._raw_client = AsyncRawAgentClient(client_wrapper=client_wrapper)
@property
def with_raw_response(self) -> AsyncRawAgentClient:
"""
Retrieves a raw implementation of this client that returns raw responses.
Returns
-------
AsyncRawAgentClient
"""
return self._raw_client
async def list_organization_schedules(
self,
*,
page: typing.Optional[int] = None,
page_size: typing.Optional[int] = None,
status: typing.Optional[ListOrganizationSchedulesApiV1SchedulesGetRequestStatus] = None,
search: typing.Optional[str] = None,
request_options: typing.Optional[RequestOptions] = None,
) -> OrganizationScheduleListResponse:
"""
Parameters
----------
page : typing.Optional[int]
page_size : typing.Optional[int]
status : typing.Optional[ListOrganizationSchedulesApiV1SchedulesGetRequestStatus]
Filter by status: 'active' or 'paused'
search : typing.Optional[str]
Search by workflow title or schedule name
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
OrganizationScheduleListResponse
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.agent.list_organization_schedules(
page=1,
page_size=1,
status="active",
search="search",
)
asyncio.run(main())
"""
_response = await self._raw_client.list_organization_schedules(
page=page, page_size=page_size, status=status, search=search, request_options=request_options
)
return _response.data
async def list_workflow_schedules(
self, workflow_permanent_id: str, *, request_options: typing.Optional[RequestOptions] = None
) -> WorkflowScheduleListResponse:
"""
Parameters
----------
workflow_permanent_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
WorkflowScheduleListResponse
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.agent.list_workflow_schedules(
workflow_permanent_id="workflow_permanent_id",
)
asyncio.run(main())
"""
_response = await self._raw_client.list_workflow_schedules(
workflow_permanent_id, request_options=request_options
)
return _response.data
async def create_workflow_schedule(
self,
workflow_permanent_id: str,
*,
cron_expression: str,
timezone: str,
enabled: typing.Optional[bool] = OMIT,
parameters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
name: typing.Optional[str] = OMIT,
description: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> WorkflowScheduleResponse:
"""
Parameters
----------
workflow_permanent_id : str
cron_expression : str
timezone : str
enabled : typing.Optional[bool]
parameters : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
name : typing.Optional[str]
description : typing.Optional[str]
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
WorkflowScheduleResponse
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.agent.create_workflow_schedule(
workflow_permanent_id="workflow_permanent_id",
cron_expression="cron_expression",
timezone="timezone",
)
asyncio.run(main())
"""
_response = await self._raw_client.create_workflow_schedule(
workflow_permanent_id,
cron_expression=cron_expression,
timezone=timezone,
enabled=enabled,
parameters=parameters,
name=name,
description=description,
request_options=request_options,
)
return _response.data
async def get_workflow_schedule(
self,
workflow_permanent_id: str,
workflow_schedule_id: str,
*,
request_options: typing.Optional[RequestOptions] = None,
) -> WorkflowScheduleResponse:
"""
Parameters
----------
workflow_permanent_id : str
workflow_schedule_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
WorkflowScheduleResponse
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.agent.get_workflow_schedule(
workflow_permanent_id="workflow_permanent_id",
workflow_schedule_id="workflow_schedule_id",
)
asyncio.run(main())
"""
_response = await self._raw_client.get_workflow_schedule(
workflow_permanent_id, workflow_schedule_id, request_options=request_options
)
return _response.data
async def update_workflow_schedule(
self,
workflow_permanent_id: str,
workflow_schedule_id: str,
*,
cron_expression: str,
timezone: str,
enabled: typing.Optional[bool] = OMIT,
parameters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = OMIT,
name: typing.Optional[str] = OMIT,
description: typing.Optional[str] = OMIT,
request_options: typing.Optional[RequestOptions] = None,
) -> WorkflowScheduleResponse:
"""
Parameters
----------
workflow_permanent_id : str
workflow_schedule_id : str
cron_expression : str
timezone : str
enabled : typing.Optional[bool]
parameters : typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]]
name : typing.Optional[str]
description : typing.Optional[str]
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
WorkflowScheduleResponse
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.agent.update_workflow_schedule(
workflow_permanent_id="workflow_permanent_id",
workflow_schedule_id="workflow_schedule_id",
cron_expression="cron_expression",
timezone="timezone",
)
asyncio.run(main())
"""
_response = await self._raw_client.update_workflow_schedule(
workflow_permanent_id,
workflow_schedule_id,
cron_expression=cron_expression,
timezone=timezone,
enabled=enabled,
parameters=parameters,
name=name,
description=description,
request_options=request_options,
)
return _response.data
async def delete_workflow_schedule_route(
self,
workflow_permanent_id: str,
workflow_schedule_id: str,
*,
request_options: typing.Optional[RequestOptions] = None,
) -> typing.Dict[str, bool]:
"""
Parameters
----------
workflow_permanent_id : str
workflow_schedule_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
typing.Dict[str, bool]
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.agent.delete_workflow_schedule_route(
workflow_permanent_id="workflow_permanent_id",
workflow_schedule_id="workflow_schedule_id",
)
asyncio.run(main())
"""
_response = await self._raw_client.delete_workflow_schedule_route(
workflow_permanent_id, workflow_schedule_id, request_options=request_options
)
return _response.data
async def enable_workflow_schedule(
self,
workflow_permanent_id: str,
workflow_schedule_id: str,
*,
request_options: typing.Optional[RequestOptions] = None,
) -> WorkflowScheduleResponse:
"""
Parameters
----------
workflow_permanent_id : str
workflow_schedule_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
WorkflowScheduleResponse
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.agent.enable_workflow_schedule(
workflow_permanent_id="workflow_permanent_id",
workflow_schedule_id="workflow_schedule_id",
)
asyncio.run(main())
"""
_response = await self._raw_client.enable_workflow_schedule(
workflow_permanent_id, workflow_schedule_id, request_options=request_options
)
return _response.data
async def disable_workflow_schedule(
self,
workflow_permanent_id: str,
workflow_schedule_id: str,
*,
request_options: typing.Optional[RequestOptions] = None,
) -> WorkflowScheduleResponse:
"""
Parameters
----------
workflow_permanent_id : str
workflow_schedule_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
WorkflowScheduleResponse
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.agent.disable_workflow_schedule(
workflow_permanent_id="workflow_permanent_id",
workflow_schedule_id="workflow_schedule_id",
)
asyncio.run(main())
"""
_response = await self._raw_client.disable_workflow_schedule(
workflow_permanent_id, workflow_schedule_id, request_options=request_options
)
return _response.data

File diff suppressed because it is too large Load diff

View file

@ -1,38 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
# isort: skip_file
import typing
from importlib import import_module
if typing.TYPE_CHECKING:
from .list_organization_schedules_api_v1schedules_get_request_status import (
ListOrganizationSchedulesApiV1SchedulesGetRequestStatus,
)
_dynamic_imports: typing.Dict[str, str] = {
"ListOrganizationSchedulesApiV1SchedulesGetRequestStatus": ".list_organization_schedules_api_v1schedules_get_request_status"
}
def __getattr__(attr_name: str) -> typing.Any:
module_name = _dynamic_imports.get(attr_name)
if module_name is None:
raise AttributeError(f"No {attr_name} found in _dynamic_imports for module name -> {__name__}")
try:
module = import_module(module_name, __package__)
if module_name == f".{attr_name}":
return module
else:
return getattr(module, attr_name)
except ImportError as e:
raise ImportError(f"Failed to import {attr_name} from {module_name}: {e}") from e
except AttributeError as e:
raise AttributeError(f"Failed to get {attr_name} from {module_name}: {e}") from e
def __dir__():
lazy_attrs = list(_dynamic_imports.keys())
return sorted(lazy_attrs)
__all__ = ["ListOrganizationSchedulesApiV1SchedulesGetRequestStatus"]

View file

@ -1,5 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import typing
ListOrganizationSchedulesApiV1SchedulesGetRequestStatus = typing.Union[typing.Literal["active", "paused"], typing.Any]

View file

@ -13,30 +13,28 @@ from .environment import SkyvernEnvironment
from .raw_client import AsyncRawSkyvern, RawSkyvern from .raw_client import AsyncRawSkyvern, RawSkyvern
from .types.artifact import Artifact from .types.artifact import Artifact
from .types.artifact_type import ArtifactType from .types.artifact_type import ArtifactType
from .types.billing_state_response import BillingStateResponse
from .types.browser_profile import BrowserProfile from .types.browser_profile import BrowserProfile
from .types.browser_session_response import BrowserSessionResponse from .types.browser_session_response import BrowserSessionResponse
from .types.change_tier_response import ChangeTierResponse
from .types.checkout_session_response import CheckoutSessionResponse
from .types.create_credential_request_credential import CreateCredentialRequestCredential from .types.create_credential_request_credential import CreateCredentialRequestCredential
from .types.create_script_response import CreateScriptResponse from .types.create_script_response import CreateScriptResponse
from .types.credential_response import CredentialResponse from .types.credential_response import CredentialResponse
from .types.credential_vault_type import CredentialVaultType
from .types.extensions import Extensions from .types.extensions import Extensions
from .types.folder import Folder from .types.folder import Folder
from .types.get_run_response import GetRunResponse from .types.get_run_response import GetRunResponse
from .types.otp_type import OtpType from .types.otp_type import OtpType
from .types.persistent_browser_type import PersistentBrowserType from .types.persistent_browser_type import PersistentBrowserType
from .types.plan_tier import PlanTier
from .types.portal_session_response import PortalSessionResponse
from .types.proxy_location import ProxyLocation from .types.proxy_location import ProxyLocation
from .types.retry_run_webhook_request import RetryRunWebhookRequest from .types.retry_run_webhook_request import RetryRunWebhookRequest
from .types.run_engine import RunEngine from .types.run_engine import RunEngine
from .types.run_sdk_action_request_action import RunSdkActionRequestAction from .types.run_sdk_action_request_action import RunSdkActionRequestAction
from .types.run_sdk_action_response import RunSdkActionResponse from .types.run_sdk_action_response import RunSdkActionResponse
from .types.run_status import RunStatus
from .types.script import Script from .types.script import Script
from .types.script_file_create import ScriptFileCreate from .types.script_file_create import ScriptFileCreate
from .types.skyvern_forge_sdk_schemas_credentials_credential_type import SkyvernForgeSdkSchemasCredentialsCredentialType from .types.skyvern_forge_sdk_schemas_credentials_credential_type import SkyvernForgeSdkSchemasCredentialsCredentialType
from .types.skyvern_schemas_run_blocks_credential_type import SkyvernSchemasRunBlocksCredentialType from .types.skyvern_schemas_run_blocks_credential_type import SkyvernSchemasRunBlocksCredentialType
from .types.task_run_list_item import TaskRunListItem
from .types.task_run_request_data_extraction_schema import TaskRunRequestDataExtractionSchema from .types.task_run_request_data_extraction_schema import TaskRunRequestDataExtractionSchema
from .types.task_run_request_proxy_location import TaskRunRequestProxyLocation from .types.task_run_request_proxy_location import TaskRunRequestProxyLocation
from .types.task_run_response import TaskRunResponse from .types.task_run_response import TaskRunResponse
@ -52,7 +50,6 @@ from .types.workflow_run_timeline import WorkflowRunTimeline
from .types.workflow_status import WorkflowStatus from .types.workflow_status import WorkflowStatus
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
from .agent.client import AgentClient, AsyncAgentClient
from .artifacts.client import ArtifactsClient, AsyncArtifactsClient from .artifacts.client import ArtifactsClient, AsyncArtifactsClient
from .scripts.client import AsyncScriptsClient, ScriptsClient from .scripts.client import AsyncScriptsClient, ScriptsClient
# this is used as the default value for optional parameters # this is used as the default value for optional parameters
@ -127,7 +124,6 @@ class Skyvern:
self._raw_client = RawSkyvern(client_wrapper=self._client_wrapper) self._raw_client = RawSkyvern(client_wrapper=self._client_wrapper)
self._artifacts: typing.Optional[ArtifactsClient] = None self._artifacts: typing.Optional[ArtifactsClient] = None
self._scripts: typing.Optional[ScriptsClient] = None self._scripts: typing.Optional[ScriptsClient] = None
self._agent: typing.Optional[AgentClient] = None
@property @property
def with_raw_response(self) -> RawSkyvern: def with_raw_response(self) -> RawSkyvern:
@ -269,7 +265,7 @@ class Skyvern:
The CDP address for the task. The CDP address for the task.
run_with : typing.Optional[str] run_with : typing.Optional[str]
Whether to run the task with agent or code. Whether to run the task with agent or code. Null means use the default.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -422,7 +418,7 @@ class Skyvern:
Whether to fallback to AI if the workflow run fails. Whether to fallback to AI if the workflow run fails.
run_with : typing.Optional[str] run_with : typing.Optional[str]
Whether to run the workflow with agent, code, or code_v2 (adaptive caching). Whether to run the workflow with agent or code. Null inherits from the workflow setting.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -1007,36 +1003,6 @@ class Skyvern:
) )
return _response.data return _response.data
def get_artifact(self, artifact_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> Artifact:
"""
Get an artifact
Parameters
----------
artifact_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
Artifact
Successfully retrieved artifact
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.get_artifact(
artifact_id="artifact_id",
)
"""
_response = self._raw_client.get_artifact(artifact_id, request_options=request_options)
return _response.data
def get_run_artifacts( def get_run_artifacts(
self, self,
run_id: str, run_id: str,
@ -1151,6 +1117,53 @@ class Skyvern:
_response = self._raw_client.get_run_timeline(run_id, request_options=request_options) _response = self._raw_client.get_run_timeline(run_id, request_options=request_options)
return _response.data return _response.data
def get_runs_v2(
self,
*,
page: typing.Optional[int] = None,
page_size: typing.Optional[int] = None,
status: typing.Optional[typing.Union[RunStatus, typing.Sequence[RunStatus]]] = None,
search_key: typing.Optional[str] = None,
request_options: typing.Optional[RequestOptions] = None,
) -> typing.List[TaskRunListItem]:
"""
Parameters
----------
page : typing.Optional[int]
page_size : typing.Optional[int]
status : typing.Optional[typing.Union[RunStatus, typing.Sequence[RunStatus]]]
search_key : typing.Optional[str]
Case-insensitive substring search (min 3 chars for trigram index).
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
typing.List[TaskRunListItem]
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.get_runs_v2(
page=1,
page_size=1,
search_key="search_key",
)
"""
_response = self._raw_client.get_runs_v2(
page=page, page_size=page_size, status=status, search_key=search_key, request_options=request_options
)
return _response.data
def get_workflow_runs( def get_workflow_runs(
self, self,
*, *,
@ -1768,6 +1781,7 @@ class Skyvern:
*, *,
page: typing.Optional[int] = None, page: typing.Optional[int] = None,
page_size: typing.Optional[int] = None, page_size: typing.Optional[int] = None,
vault_type: typing.Optional[CredentialVaultType] = None,
request_options: typing.Optional[RequestOptions] = None, request_options: typing.Optional[RequestOptions] = None,
) -> typing.List[CredentialResponse]: ) -> typing.List[CredentialResponse]:
""" """
@ -1781,6 +1795,9 @@ class Skyvern:
page_size : typing.Optional[int] page_size : typing.Optional[int]
Number of items per page Number of items per page
vault_type : typing.Optional[CredentialVaultType]
Filter credentials by vault type (e.g. 'custom', 'bitwarden', 'azure_vault')
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -1799,9 +1816,12 @@ class Skyvern:
client.get_credentials( client.get_credentials(
page=1, page=1,
page_size=10, page_size=10,
vault_type="bitwarden",
) )
""" """
_response = self._raw_client.get_credentials(page=page, page_size=page_size, request_options=request_options) _response = self._raw_client.get_credentials(
page=page, page_size=page_size, vault_type=vault_type, request_options=request_options
)
return _response.data return _response.data
def create_credential( def create_credential(
@ -1810,6 +1830,7 @@ class Skyvern:
name: str, name: str,
credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType, credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType,
credential: CreateCredentialRequestCredential, credential: CreateCredentialRequestCredential,
vault_type: typing.Optional[CredentialVaultType] = OMIT,
request_options: typing.Optional[RequestOptions] = None, request_options: typing.Optional[RequestOptions] = None,
) -> CredentialResponse: ) -> CredentialResponse:
""" """
@ -1826,6 +1847,9 @@ class Skyvern:
credential : CreateCredentialRequestCredential credential : CreateCredentialRequestCredential
The credential data to store The credential data to store
vault_type : typing.Optional[CredentialVaultType]
Which vault to store this credential in. If omitted, uses the instance default. Use this to mix Skyvern-hosted and custom credentials within the same organization.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -1851,7 +1875,11 @@ class Skyvern:
) )
""" """
_response = self._raw_client.create_credential( _response = self._raw_client.create_credential(
name=name, credential_type=credential_type, credential=credential, request_options=request_options name=name,
credential_type=credential_type,
credential=credential,
vault_type=vault_type,
request_options=request_options,
) )
return _response.data return _response.data
@ -1862,6 +1890,7 @@ class Skyvern:
name: str, name: str,
credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType, credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType,
credential: CreateCredentialRequestCredential, credential: CreateCredentialRequestCredential,
vault_type: typing.Optional[CredentialVaultType] = OMIT,
request_options: typing.Optional[RequestOptions] = None, request_options: typing.Optional[RequestOptions] = None,
) -> CredentialResponse: ) -> CredentialResponse:
""" """
@ -1881,6 +1910,9 @@ class Skyvern:
credential : CreateCredentialRequestCredential credential : CreateCredentialRequestCredential
The credential data to store The credential data to store
vault_type : typing.Optional[CredentialVaultType]
Which vault to store this credential in. If omitted, uses the instance default. Use this to mix Skyvern-hosted and custom credentials within the same organization.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -1911,6 +1943,7 @@ class Skyvern:
name=name, name=name,
credential_type=credential_type, credential_type=credential_type,
credential=credential, credential=credential,
vault_type=vault_type,
request_options=request_options, request_options=request_options,
) )
return _response.data return _response.data
@ -2443,144 +2476,6 @@ class Skyvern:
) )
return _response.data return _response.data
def create_checkout_session_api_v1billing_checkout_post(
self, *, tier: PlanTier, request_options: typing.Optional[RequestOptions] = None
) -> CheckoutSessionResponse:
"""
Create a Stripe Checkout Session for subscribing to a tier.
Frontend should redirect the user to the returned URL.
After successful checkout, Stripe will send a webhook that we handle
to store the subscription and initialize billing state.
Returns 400 if org already has an active subscription (use portal instead).
Parameters
----------
tier : PlanTier
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
CheckoutSessionResponse
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.create_checkout_session_api_v1billing_checkout_post(
tier="free",
)
"""
_response = self._raw_client.create_checkout_session_api_v1billing_checkout_post(
tier=tier, request_options=request_options
)
return _response.data
def create_portal_session_api_v1billing_portal_post(
self, *, request_options: typing.Optional[RequestOptions] = None
) -> PortalSessionResponse:
"""
Create a Stripe Customer Portal session for managing subscription.
Frontend should redirect the user to the returned URL.
The portal allows users to:
- Update payment methods
- Upgrade/downgrade plans
- Cancel subscription
- View invoices
Parameters
----------
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
PortalSessionResponse
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.create_portal_session_api_v1billing_portal_post()
"""
_response = self._raw_client.create_portal_session_api_v1billing_portal_post(request_options=request_options)
return _response.data
def get_organization_billing_api_v1billing_state_get(
self, *, request_options: typing.Optional[RequestOptions] = None
) -> typing.Optional[BillingStateResponse]:
"""
Parameters
----------
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
typing.Optional[BillingStateResponse]
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.get_organization_billing_api_v1billing_state_get()
"""
_response = self._raw_client.get_organization_billing_api_v1billing_state_get(request_options=request_options)
return _response.data
def change_tier_api_v1billing_change_tier_post(
self, *, tier: PlanTier, request_options: typing.Optional[RequestOptions] = None
) -> ChangeTierResponse:
"""
Redirect to Stripe Portal for tier changes.
Portal handles proration based on configured settings:
- Upgrades: Immediate proration charge
- Downgrades: Apply at end of billing period
Parameters
----------
tier : PlanTier
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
ChangeTierResponse
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.change_tier_api_v1billing_change_tier_post(
tier="free",
)
"""
_response = self._raw_client.change_tier_api_v1billing_change_tier_post(
tier=tier, request_options=request_options
)
return _response.data
@property @property
def artifacts(self): def artifacts(self):
if self._artifacts is None: if self._artifacts is None:
@ -2597,14 +2492,6 @@ class Skyvern:
self._scripts = ScriptsClient(client_wrapper=self._client_wrapper) self._scripts = ScriptsClient(client_wrapper=self._client_wrapper)
return self._scripts return self._scripts
@property
def agent(self):
if self._agent is None:
from .agent.client import AgentClient # noqa: E402
self._agent = AgentClient(client_wrapper=self._client_wrapper)
return self._agent
class AsyncSkyvern: class AsyncSkyvern:
""" """
@ -2674,7 +2561,6 @@ class AsyncSkyvern:
self._raw_client = AsyncRawSkyvern(client_wrapper=self._client_wrapper) self._raw_client = AsyncRawSkyvern(client_wrapper=self._client_wrapper)
self._artifacts: typing.Optional[AsyncArtifactsClient] = None self._artifacts: typing.Optional[AsyncArtifactsClient] = None
self._scripts: typing.Optional[AsyncScriptsClient] = None self._scripts: typing.Optional[AsyncScriptsClient] = None
self._agent: typing.Optional[AsyncAgentClient] = None
@property @property
def with_raw_response(self) -> AsyncRawSkyvern: def with_raw_response(self) -> AsyncRawSkyvern:
@ -2816,7 +2702,7 @@ class AsyncSkyvern:
The CDP address for the task. The CDP address for the task.
run_with : typing.Optional[str] run_with : typing.Optional[str]
Whether to run the task with agent or code. Whether to run the task with agent or code. Null means use the default.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -2977,7 +2863,7 @@ class AsyncSkyvern:
Whether to fallback to AI if the workflow run fails. Whether to fallback to AI if the workflow run fails.
run_with : typing.Optional[str] run_with : typing.Optional[str]
Whether to run the workflow with agent, code, or code_v2 (adaptive caching). Whether to run the workflow with agent or code. Null inherits from the workflow setting.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -3666,46 +3552,6 @@ class AsyncSkyvern:
) )
return _response.data return _response.data
async def get_artifact(
self, artifact_id: str, *, request_options: typing.Optional[RequestOptions] = None
) -> Artifact:
"""
Get an artifact
Parameters
----------
artifact_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
Artifact
Successfully retrieved artifact
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.get_artifact(
artifact_id="artifact_id",
)
asyncio.run(main())
"""
_response = await self._raw_client.get_artifact(artifact_id, request_options=request_options)
return _response.data
async def get_run_artifacts( async def get_run_artifacts(
self, self,
run_id: str, run_id: str,
@ -3844,6 +3690,61 @@ class AsyncSkyvern:
_response = await self._raw_client.get_run_timeline(run_id, request_options=request_options) _response = await self._raw_client.get_run_timeline(run_id, request_options=request_options)
return _response.data return _response.data
async def get_runs_v2(
self,
*,
page: typing.Optional[int] = None,
page_size: typing.Optional[int] = None,
status: typing.Optional[typing.Union[RunStatus, typing.Sequence[RunStatus]]] = None,
search_key: typing.Optional[str] = None,
request_options: typing.Optional[RequestOptions] = None,
) -> typing.List[TaskRunListItem]:
"""
Parameters
----------
page : typing.Optional[int]
page_size : typing.Optional[int]
status : typing.Optional[typing.Union[RunStatus, typing.Sequence[RunStatus]]]
search_key : typing.Optional[str]
Case-insensitive substring search (min 3 chars for trigram index).
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
typing.List[TaskRunListItem]
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.get_runs_v2(
page=1,
page_size=1,
search_key="search_key",
)
asyncio.run(main())
"""
_response = await self._raw_client.get_runs_v2(
page=page, page_size=page_size, status=status, search_key=search_key, request_options=request_options
)
return _response.data
async def get_workflow_runs( async def get_workflow_runs(
self, self,
*, *,
@ -4565,6 +4466,7 @@ class AsyncSkyvern:
*, *,
page: typing.Optional[int] = None, page: typing.Optional[int] = None,
page_size: typing.Optional[int] = None, page_size: typing.Optional[int] = None,
vault_type: typing.Optional[CredentialVaultType] = None,
request_options: typing.Optional[RequestOptions] = None, request_options: typing.Optional[RequestOptions] = None,
) -> typing.List[CredentialResponse]: ) -> typing.List[CredentialResponse]:
""" """
@ -4578,6 +4480,9 @@ class AsyncSkyvern:
page_size : typing.Optional[int] page_size : typing.Optional[int]
Number of items per page Number of items per page
vault_type : typing.Optional[CredentialVaultType]
Filter credentials by vault type (e.g. 'custom', 'bitwarden', 'azure_vault')
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -4601,13 +4506,14 @@ class AsyncSkyvern:
await client.get_credentials( await client.get_credentials(
page=1, page=1,
page_size=10, page_size=10,
vault_type="bitwarden",
) )
asyncio.run(main()) asyncio.run(main())
""" """
_response = await self._raw_client.get_credentials( _response = await self._raw_client.get_credentials(
page=page, page_size=page_size, request_options=request_options page=page, page_size=page_size, vault_type=vault_type, request_options=request_options
) )
return _response.data return _response.data
@ -4617,6 +4523,7 @@ class AsyncSkyvern:
name: str, name: str,
credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType, credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType,
credential: CreateCredentialRequestCredential, credential: CreateCredentialRequestCredential,
vault_type: typing.Optional[CredentialVaultType] = OMIT,
request_options: typing.Optional[RequestOptions] = None, request_options: typing.Optional[RequestOptions] = None,
) -> CredentialResponse: ) -> CredentialResponse:
""" """
@ -4633,6 +4540,9 @@ class AsyncSkyvern:
credential : CreateCredentialRequestCredential credential : CreateCredentialRequestCredential
The credential data to store The credential data to store
vault_type : typing.Optional[CredentialVaultType]
Which vault to store this credential in. If omitted, uses the instance default. Use this to mix Skyvern-hosted and custom credentials within the same organization.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -4666,7 +4576,11 @@ class AsyncSkyvern:
asyncio.run(main()) asyncio.run(main())
""" """
_response = await self._raw_client.create_credential( _response = await self._raw_client.create_credential(
name=name, credential_type=credential_type, credential=credential, request_options=request_options name=name,
credential_type=credential_type,
credential=credential,
vault_type=vault_type,
request_options=request_options,
) )
return _response.data return _response.data
@ -4677,6 +4591,7 @@ class AsyncSkyvern:
name: str, name: str,
credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType, credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType,
credential: CreateCredentialRequestCredential, credential: CreateCredentialRequestCredential,
vault_type: typing.Optional[CredentialVaultType] = OMIT,
request_options: typing.Optional[RequestOptions] = None, request_options: typing.Optional[RequestOptions] = None,
) -> CredentialResponse: ) -> CredentialResponse:
""" """
@ -4696,6 +4611,9 @@ class AsyncSkyvern:
credential : CreateCredentialRequestCredential credential : CreateCredentialRequestCredential
The credential data to store The credential data to store
vault_type : typing.Optional[CredentialVaultType]
Which vault to store this credential in. If omitted, uses the instance default. Use this to mix Skyvern-hosted and custom credentials within the same organization.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -4734,6 +4652,7 @@ class AsyncSkyvern:
name=name, name=name,
credential_type=credential_type, credential_type=credential_type,
credential=credential, credential=credential,
vault_type=vault_type,
request_options=request_options, request_options=request_options,
) )
return _response.data return _response.data
@ -5340,180 +5259,6 @@ class AsyncSkyvern:
) )
return _response.data return _response.data
async def create_checkout_session_api_v1billing_checkout_post(
self, *, tier: PlanTier, request_options: typing.Optional[RequestOptions] = None
) -> CheckoutSessionResponse:
"""
Create a Stripe Checkout Session for subscribing to a tier.
Frontend should redirect the user to the returned URL.
After successful checkout, Stripe will send a webhook that we handle
to store the subscription and initialize billing state.
Returns 400 if org already has an active subscription (use portal instead).
Parameters
----------
tier : PlanTier
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
CheckoutSessionResponse
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.create_checkout_session_api_v1billing_checkout_post(
tier="free",
)
asyncio.run(main())
"""
_response = await self._raw_client.create_checkout_session_api_v1billing_checkout_post(
tier=tier, request_options=request_options
)
return _response.data
async def create_portal_session_api_v1billing_portal_post(
self, *, request_options: typing.Optional[RequestOptions] = None
) -> PortalSessionResponse:
"""
Create a Stripe Customer Portal session for managing subscription.
Frontend should redirect the user to the returned URL.
The portal allows users to:
- Update payment methods
- Upgrade/downgrade plans
- Cancel subscription
- View invoices
Parameters
----------
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
PortalSessionResponse
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.create_portal_session_api_v1billing_portal_post()
asyncio.run(main())
"""
_response = await self._raw_client.create_portal_session_api_v1billing_portal_post(
request_options=request_options
)
return _response.data
async def get_organization_billing_api_v1billing_state_get(
self, *, request_options: typing.Optional[RequestOptions] = None
) -> typing.Optional[BillingStateResponse]:
"""
Parameters
----------
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
typing.Optional[BillingStateResponse]
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.get_organization_billing_api_v1billing_state_get()
asyncio.run(main())
"""
_response = await self._raw_client.get_organization_billing_api_v1billing_state_get(
request_options=request_options
)
return _response.data
async def change_tier_api_v1billing_change_tier_post(
self, *, tier: PlanTier, request_options: typing.Optional[RequestOptions] = None
) -> ChangeTierResponse:
"""
Redirect to Stripe Portal for tier changes.
Portal handles proration based on configured settings:
- Upgrades: Immediate proration charge
- Downgrades: Apply at end of billing period
Parameters
----------
tier : PlanTier
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
ChangeTierResponse
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.change_tier_api_v1billing_change_tier_post(
tier="free",
)
asyncio.run(main())
"""
_response = await self._raw_client.change_tier_api_v1billing_change_tier_post(
tier=tier, request_options=request_options
)
return _response.data
@property @property
def artifacts(self): def artifacts(self):
if self._artifacts is None: if self._artifacts is None:
@ -5530,14 +5275,6 @@ class AsyncSkyvern:
self._scripts = AsyncScriptsClient(client_wrapper=self._client_wrapper) self._scripts = AsyncScriptsClient(client_wrapper=self._client_wrapper)
return self._scripts return self._scripts
@property
def agent(self):
if self._agent is None:
from .agent.client import AsyncAgentClient # noqa: E402
self._agent = AsyncAgentClient(client_wrapper=self._client_wrapper)
return self._agent
def _get_base_url(*, base_url: typing.Optional[str] = None, environment: SkyvernEnvironment) -> str: def _get_base_url(*, base_url: typing.Optional[str] = None, environment: SkyvernEnvironment) -> str:
if base_url is not None: if base_url is not None:

View file

@ -22,10 +22,10 @@ class BaseClientWrapper:
def get_headers(self) -> typing.Dict[str, str]: def get_headers(self) -> typing.Dict[str, str]:
headers: typing.Dict[str, str] = { headers: typing.Dict[str, str] = {
"User-Agent": "skyvern/1.0.28", "User-Agent": "skyvern/1.0.29",
"X-Fern-Language": "Python", "X-Fern-Language": "Python",
"X-Fern-SDK-Name": "skyvern", "X-Fern-SDK-Name": "skyvern",
"X-Fern-SDK-Version": "1.0.28", "X-Fern-SDK-Version": "1.0.29",
**(self.get_custom_headers() or {}), **(self.get_custom_headers() or {}),
} }
if self._api_key is not None: if self._api_key is not None:

View file

@ -19,30 +19,28 @@ from .errors.not_found_error import NotFoundError
from .errors.unprocessable_entity_error import UnprocessableEntityError from .errors.unprocessable_entity_error import UnprocessableEntityError
from .types.artifact import Artifact from .types.artifact import Artifact
from .types.artifact_type import ArtifactType from .types.artifact_type import ArtifactType
from .types.billing_state_response import BillingStateResponse
from .types.browser_profile import BrowserProfile from .types.browser_profile import BrowserProfile
from .types.browser_session_response import BrowserSessionResponse from .types.browser_session_response import BrowserSessionResponse
from .types.change_tier_response import ChangeTierResponse
from .types.checkout_session_response import CheckoutSessionResponse
from .types.create_credential_request_credential import CreateCredentialRequestCredential from .types.create_credential_request_credential import CreateCredentialRequestCredential
from .types.create_script_response import CreateScriptResponse from .types.create_script_response import CreateScriptResponse
from .types.credential_response import CredentialResponse from .types.credential_response import CredentialResponse
from .types.credential_vault_type import CredentialVaultType
from .types.extensions import Extensions from .types.extensions import Extensions
from .types.folder import Folder from .types.folder import Folder
from .types.get_run_response import GetRunResponse from .types.get_run_response import GetRunResponse
from .types.otp_type import OtpType from .types.otp_type import OtpType
from .types.persistent_browser_type import PersistentBrowserType from .types.persistent_browser_type import PersistentBrowserType
from .types.plan_tier import PlanTier
from .types.portal_session_response import PortalSessionResponse
from .types.proxy_location import ProxyLocation from .types.proxy_location import ProxyLocation
from .types.retry_run_webhook_request import RetryRunWebhookRequest from .types.retry_run_webhook_request import RetryRunWebhookRequest
from .types.run_engine import RunEngine from .types.run_engine import RunEngine
from .types.run_sdk_action_request_action import RunSdkActionRequestAction from .types.run_sdk_action_request_action import RunSdkActionRequestAction
from .types.run_sdk_action_response import RunSdkActionResponse from .types.run_sdk_action_response import RunSdkActionResponse
from .types.run_status import RunStatus
from .types.script import Script from .types.script import Script
from .types.script_file_create import ScriptFileCreate from .types.script_file_create import ScriptFileCreate
from .types.skyvern_forge_sdk_schemas_credentials_credential_type import SkyvernForgeSdkSchemasCredentialsCredentialType from .types.skyvern_forge_sdk_schemas_credentials_credential_type import SkyvernForgeSdkSchemasCredentialsCredentialType
from .types.skyvern_schemas_run_blocks_credential_type import SkyvernSchemasRunBlocksCredentialType from .types.skyvern_schemas_run_blocks_credential_type import SkyvernSchemasRunBlocksCredentialType
from .types.task_run_list_item import TaskRunListItem
from .types.task_run_request_data_extraction_schema import TaskRunRequestDataExtractionSchema from .types.task_run_request_data_extraction_schema import TaskRunRequestDataExtractionSchema
from .types.task_run_request_proxy_location import TaskRunRequestProxyLocation from .types.task_run_request_proxy_location import TaskRunRequestProxyLocation
from .types.task_run_response import TaskRunResponse from .types.task_run_response import TaskRunResponse
@ -194,7 +192,7 @@ class RawSkyvern:
The CDP address for the task. The CDP address for the task.
run_with : typing.Optional[str] run_with : typing.Optional[str]
Whether to run the task with agent or code. Whether to run the task with agent or code. Null means use the default.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -382,7 +380,7 @@ class RawSkyvern:
Whether to fallback to AI if the workflow run fails. Whether to fallback to AI if the workflow run fails.
run_with : typing.Optional[str] run_with : typing.Optional[str]
Whether to run the workflow with agent, code, or code_v2 (adaptive caching). Whether to run the workflow with agent or code. Null inherits from the workflow setting.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -1312,66 +1310,6 @@ class RawSkyvern:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
def get_artifact(
self, artifact_id: str, *, request_options: typing.Optional[RequestOptions] = None
) -> HttpResponse[Artifact]:
"""
Get an artifact
Parameters
----------
artifact_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
HttpResponse[Artifact]
Successfully retrieved artifact
"""
_response = self._client_wrapper.httpx_client.request(
f"v1/artifacts/{jsonable_encoder(artifact_id)}",
method="GET",
request_options=request_options,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
Artifact,
parse_obj_as(
type_=Artifact, # type: ignore
object_=_response.json(),
),
)
return HttpResponse(response=_response, data=_data)
if _response.status_code == 404:
raise NotFoundError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
def get_run_artifacts( def get_run_artifacts(
self, self,
run_id: str, run_id: str,
@ -1568,6 +1506,72 @@ class RawSkyvern:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
def get_runs_v2(
self,
*,
page: typing.Optional[int] = None,
page_size: typing.Optional[int] = None,
status: typing.Optional[typing.Union[RunStatus, typing.Sequence[RunStatus]]] = None,
search_key: typing.Optional[str] = None,
request_options: typing.Optional[RequestOptions] = None,
) -> HttpResponse[typing.List[TaskRunListItem]]:
"""
Parameters
----------
page : typing.Optional[int]
page_size : typing.Optional[int]
status : typing.Optional[typing.Union[RunStatus, typing.Sequence[RunStatus]]]
search_key : typing.Optional[str]
Case-insensitive substring search (min 3 chars for trigram index).
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
HttpResponse[typing.List[TaskRunListItem]]
Successful Response
"""
_response = self._client_wrapper.httpx_client.request(
"v1/runs",
method="GET",
params={
"page": page,
"page_size": page_size,
"status": status,
"search_key": search_key,
},
request_options=request_options,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
typing.List[TaskRunListItem],
parse_obj_as(
type_=typing.List[TaskRunListItem], # type: ignore
object_=_response.json(),
),
)
return HttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
def get_workflow_runs( def get_workflow_runs(
self, self,
*, *,
@ -2532,6 +2536,7 @@ class RawSkyvern:
*, *,
page: typing.Optional[int] = None, page: typing.Optional[int] = None,
page_size: typing.Optional[int] = None, page_size: typing.Optional[int] = None,
vault_type: typing.Optional[CredentialVaultType] = None,
request_options: typing.Optional[RequestOptions] = None, request_options: typing.Optional[RequestOptions] = None,
) -> HttpResponse[typing.List[CredentialResponse]]: ) -> HttpResponse[typing.List[CredentialResponse]]:
""" """
@ -2545,6 +2550,9 @@ class RawSkyvern:
page_size : typing.Optional[int] page_size : typing.Optional[int]
Number of items per page Number of items per page
vault_type : typing.Optional[CredentialVaultType]
Filter credentials by vault type (e.g. 'custom', 'bitwarden', 'azure_vault')
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -2559,6 +2567,7 @@ class RawSkyvern:
params={ params={
"page": page, "page": page,
"page_size": page_size, "page_size": page_size,
"vault_type": vault_type,
}, },
request_options=request_options, request_options=request_options,
) )
@ -2594,6 +2603,7 @@ class RawSkyvern:
name: str, name: str,
credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType, credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType,
credential: CreateCredentialRequestCredential, credential: CreateCredentialRequestCredential,
vault_type: typing.Optional[CredentialVaultType] = OMIT,
request_options: typing.Optional[RequestOptions] = None, request_options: typing.Optional[RequestOptions] = None,
) -> HttpResponse[CredentialResponse]: ) -> HttpResponse[CredentialResponse]:
""" """
@ -2610,6 +2620,9 @@ class RawSkyvern:
credential : CreateCredentialRequestCredential credential : CreateCredentialRequestCredential
The credential data to store The credential data to store
vault_type : typing.Optional[CredentialVaultType]
Which vault to store this credential in. If omitted, uses the instance default. Use this to mix Skyvern-hosted and custom credentials within the same organization.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -2627,6 +2640,7 @@ class RawSkyvern:
"credential": convert_and_respect_annotation_metadata( "credential": convert_and_respect_annotation_metadata(
object_=credential, annotation=CreateCredentialRequestCredential, direction="write" object_=credential, annotation=CreateCredentialRequestCredential, direction="write"
), ),
"vault_type": vault_type,
}, },
headers={ headers={
"content-type": "application/json", "content-type": "application/json",
@ -2667,6 +2681,7 @@ class RawSkyvern:
name: str, name: str,
credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType, credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType,
credential: CreateCredentialRequestCredential, credential: CreateCredentialRequestCredential,
vault_type: typing.Optional[CredentialVaultType] = OMIT,
request_options: typing.Optional[RequestOptions] = None, request_options: typing.Optional[RequestOptions] = None,
) -> HttpResponse[CredentialResponse]: ) -> HttpResponse[CredentialResponse]:
""" """
@ -2686,6 +2701,9 @@ class RawSkyvern:
credential : CreateCredentialRequestCredential credential : CreateCredentialRequestCredential
The credential data to store The credential data to store
vault_type : typing.Optional[CredentialVaultType]
Which vault to store this credential in. If omitted, uses the instance default. Use this to mix Skyvern-hosted and custom credentials within the same organization.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -2703,6 +2721,7 @@ class RawSkyvern:
"credential": convert_and_respect_annotation_metadata( "credential": convert_and_respect_annotation_metadata(
object_=credential, annotation=CreateCredentialRequestCredential, direction="write" object_=credential, annotation=CreateCredentialRequestCredential, direction="write"
), ),
"vault_type": vault_type,
}, },
headers={ headers={
"content-type": "application/json", "content-type": "application/json",
@ -3444,228 +3463,6 @@ class RawSkyvern:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
def create_checkout_session_api_v1billing_checkout_post(
self, *, tier: PlanTier, request_options: typing.Optional[RequestOptions] = None
) -> HttpResponse[CheckoutSessionResponse]:
"""
Create a Stripe Checkout Session for subscribing to a tier.
Frontend should redirect the user to the returned URL.
After successful checkout, Stripe will send a webhook that we handle
to store the subscription and initialize billing state.
Returns 400 if org already has an active subscription (use portal instead).
Parameters
----------
tier : PlanTier
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
HttpResponse[CheckoutSessionResponse]
Successful Response
"""
_response = self._client_wrapper.httpx_client.request(
"api/v1/billing/checkout",
method="POST",
json={
"tier": tier,
},
headers={
"content-type": "application/json",
},
request_options=request_options,
omit=OMIT,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
CheckoutSessionResponse,
parse_obj_as(
type_=CheckoutSessionResponse, # type: ignore
object_=_response.json(),
),
)
return HttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
def create_portal_session_api_v1billing_portal_post(
self, *, request_options: typing.Optional[RequestOptions] = None
) -> HttpResponse[PortalSessionResponse]:
"""
Create a Stripe Customer Portal session for managing subscription.
Frontend should redirect the user to the returned URL.
The portal allows users to:
- Update payment methods
- Upgrade/downgrade plans
- Cancel subscription
- View invoices
Parameters
----------
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
HttpResponse[PortalSessionResponse]
Successful Response
"""
_response = self._client_wrapper.httpx_client.request(
"api/v1/billing/portal",
method="POST",
request_options=request_options,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
PortalSessionResponse,
parse_obj_as(
type_=PortalSessionResponse, # type: ignore
object_=_response.json(),
),
)
return HttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
def get_organization_billing_api_v1billing_state_get(
self, *, request_options: typing.Optional[RequestOptions] = None
) -> HttpResponse[typing.Optional[BillingStateResponse]]:
"""
Parameters
----------
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
HttpResponse[typing.Optional[BillingStateResponse]]
Successful Response
"""
_response = self._client_wrapper.httpx_client.request(
"api/v1/billing/state",
method="GET",
request_options=request_options,
)
try:
if _response is None or not _response.text.strip():
return HttpResponse(response=_response, data=None)
if 200 <= _response.status_code < 300:
_data = typing.cast(
typing.Optional[BillingStateResponse],
parse_obj_as(
type_=typing.Optional[BillingStateResponse], # type: ignore
object_=_response.json(),
),
)
return HttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
def change_tier_api_v1billing_change_tier_post(
self, *, tier: PlanTier, request_options: typing.Optional[RequestOptions] = None
) -> HttpResponse[ChangeTierResponse]:
"""
Redirect to Stripe Portal for tier changes.
Portal handles proration based on configured settings:
- Upgrades: Immediate proration charge
- Downgrades: Apply at end of billing period
Parameters
----------
tier : PlanTier
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
HttpResponse[ChangeTierResponse]
Successful Response
"""
_response = self._client_wrapper.httpx_client.request(
"api/v1/billing/change-tier",
method="POST",
json={
"tier": tier,
},
headers={
"content-type": "application/json",
},
request_options=request_options,
omit=OMIT,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
ChangeTierResponse,
parse_obj_as(
type_=ChangeTierResponse, # type: ignore
object_=_response.json(),
),
)
return HttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
class AsyncRawSkyvern: class AsyncRawSkyvern:
def __init__(self, *, client_wrapper: AsyncClientWrapper): def __init__(self, *, client_wrapper: AsyncClientWrapper):
@ -3800,7 +3597,7 @@ class AsyncRawSkyvern:
The CDP address for the task. The CDP address for the task.
run_with : typing.Optional[str] run_with : typing.Optional[str]
Whether to run the task with agent or code. Whether to run the task with agent or code. Null means use the default.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -3988,7 +3785,7 @@ class AsyncRawSkyvern:
Whether to fallback to AI if the workflow run fails. Whether to fallback to AI if the workflow run fails.
run_with : typing.Optional[str] run_with : typing.Optional[str]
Whether to run the workflow with agent, code, or code_v2 (adaptive caching). Whether to run the workflow with agent or code. Null inherits from the workflow setting.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -4918,66 +4715,6 @@ class AsyncRawSkyvern:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
async def get_artifact(
self, artifact_id: str, *, request_options: typing.Optional[RequestOptions] = None
) -> AsyncHttpResponse[Artifact]:
"""
Get an artifact
Parameters
----------
artifact_id : str
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
AsyncHttpResponse[Artifact]
Successfully retrieved artifact
"""
_response = await self._client_wrapper.httpx_client.request(
f"v1/artifacts/{jsonable_encoder(artifact_id)}",
method="GET",
request_options=request_options,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
Artifact,
parse_obj_as(
type_=Artifact, # type: ignore
object_=_response.json(),
),
)
return AsyncHttpResponse(response=_response, data=_data)
if _response.status_code == 404:
raise NotFoundError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
async def get_run_artifacts( async def get_run_artifacts(
self, self,
run_id: str, run_id: str,
@ -5174,6 +4911,72 @@ class AsyncRawSkyvern:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
async def get_runs_v2(
self,
*,
page: typing.Optional[int] = None,
page_size: typing.Optional[int] = None,
status: typing.Optional[typing.Union[RunStatus, typing.Sequence[RunStatus]]] = None,
search_key: typing.Optional[str] = None,
request_options: typing.Optional[RequestOptions] = None,
) -> AsyncHttpResponse[typing.List[TaskRunListItem]]:
"""
Parameters
----------
page : typing.Optional[int]
page_size : typing.Optional[int]
status : typing.Optional[typing.Union[RunStatus, typing.Sequence[RunStatus]]]
search_key : typing.Optional[str]
Case-insensitive substring search (min 3 chars for trigram index).
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
AsyncHttpResponse[typing.List[TaskRunListItem]]
Successful Response
"""
_response = await self._client_wrapper.httpx_client.request(
"v1/runs",
method="GET",
params={
"page": page,
"page_size": page_size,
"status": status,
"search_key": search_key,
},
request_options=request_options,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
typing.List[TaskRunListItem],
parse_obj_as(
type_=typing.List[TaskRunListItem], # type: ignore
object_=_response.json(),
),
)
return AsyncHttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
async def get_workflow_runs( async def get_workflow_runs(
self, self,
*, *,
@ -6138,6 +5941,7 @@ class AsyncRawSkyvern:
*, *,
page: typing.Optional[int] = None, page: typing.Optional[int] = None,
page_size: typing.Optional[int] = None, page_size: typing.Optional[int] = None,
vault_type: typing.Optional[CredentialVaultType] = None,
request_options: typing.Optional[RequestOptions] = None, request_options: typing.Optional[RequestOptions] = None,
) -> AsyncHttpResponse[typing.List[CredentialResponse]]: ) -> AsyncHttpResponse[typing.List[CredentialResponse]]:
""" """
@ -6151,6 +5955,9 @@ class AsyncRawSkyvern:
page_size : typing.Optional[int] page_size : typing.Optional[int]
Number of items per page Number of items per page
vault_type : typing.Optional[CredentialVaultType]
Filter credentials by vault type (e.g. 'custom', 'bitwarden', 'azure_vault')
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -6165,6 +5972,7 @@ class AsyncRawSkyvern:
params={ params={
"page": page, "page": page,
"page_size": page_size, "page_size": page_size,
"vault_type": vault_type,
}, },
request_options=request_options, request_options=request_options,
) )
@ -6200,6 +6008,7 @@ class AsyncRawSkyvern:
name: str, name: str,
credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType, credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType,
credential: CreateCredentialRequestCredential, credential: CreateCredentialRequestCredential,
vault_type: typing.Optional[CredentialVaultType] = OMIT,
request_options: typing.Optional[RequestOptions] = None, request_options: typing.Optional[RequestOptions] = None,
) -> AsyncHttpResponse[CredentialResponse]: ) -> AsyncHttpResponse[CredentialResponse]:
""" """
@ -6216,6 +6025,9 @@ class AsyncRawSkyvern:
credential : CreateCredentialRequestCredential credential : CreateCredentialRequestCredential
The credential data to store The credential data to store
vault_type : typing.Optional[CredentialVaultType]
Which vault to store this credential in. If omitted, uses the instance default. Use this to mix Skyvern-hosted and custom credentials within the same organization.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -6233,6 +6045,7 @@ class AsyncRawSkyvern:
"credential": convert_and_respect_annotation_metadata( "credential": convert_and_respect_annotation_metadata(
object_=credential, annotation=CreateCredentialRequestCredential, direction="write" object_=credential, annotation=CreateCredentialRequestCredential, direction="write"
), ),
"vault_type": vault_type,
}, },
headers={ headers={
"content-type": "application/json", "content-type": "application/json",
@ -6273,6 +6086,7 @@ class AsyncRawSkyvern:
name: str, name: str,
credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType, credential_type: SkyvernForgeSdkSchemasCredentialsCredentialType,
credential: CreateCredentialRequestCredential, credential: CreateCredentialRequestCredential,
vault_type: typing.Optional[CredentialVaultType] = OMIT,
request_options: typing.Optional[RequestOptions] = None, request_options: typing.Optional[RequestOptions] = None,
) -> AsyncHttpResponse[CredentialResponse]: ) -> AsyncHttpResponse[CredentialResponse]:
""" """
@ -6292,6 +6106,9 @@ class AsyncRawSkyvern:
credential : CreateCredentialRequestCredential credential : CreateCredentialRequestCredential
The credential data to store The credential data to store
vault_type : typing.Optional[CredentialVaultType]
Which vault to store this credential in. If omitted, uses the instance default. Use this to mix Skyvern-hosted and custom credentials within the same organization.
request_options : typing.Optional[RequestOptions] request_options : typing.Optional[RequestOptions]
Request-specific configuration. Request-specific configuration.
@ -6309,6 +6126,7 @@ class AsyncRawSkyvern:
"credential": convert_and_respect_annotation_metadata( "credential": convert_and_respect_annotation_metadata(
object_=credential, annotation=CreateCredentialRequestCredential, direction="write" object_=credential, annotation=CreateCredentialRequestCredential, direction="write"
), ),
"vault_type": vault_type,
}, },
headers={ headers={
"content-type": "application/json", "content-type": "application/json",
@ -7049,225 +6867,3 @@ class AsyncRawSkyvern:
except JSONDecodeError: except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
async def create_checkout_session_api_v1billing_checkout_post(
self, *, tier: PlanTier, request_options: typing.Optional[RequestOptions] = None
) -> AsyncHttpResponse[CheckoutSessionResponse]:
"""
Create a Stripe Checkout Session for subscribing to a tier.
Frontend should redirect the user to the returned URL.
After successful checkout, Stripe will send a webhook that we handle
to store the subscription and initialize billing state.
Returns 400 if org already has an active subscription (use portal instead).
Parameters
----------
tier : PlanTier
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
AsyncHttpResponse[CheckoutSessionResponse]
Successful Response
"""
_response = await self._client_wrapper.httpx_client.request(
"api/v1/billing/checkout",
method="POST",
json={
"tier": tier,
},
headers={
"content-type": "application/json",
},
request_options=request_options,
omit=OMIT,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
CheckoutSessionResponse,
parse_obj_as(
type_=CheckoutSessionResponse, # type: ignore
object_=_response.json(),
),
)
return AsyncHttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
async def create_portal_session_api_v1billing_portal_post(
self, *, request_options: typing.Optional[RequestOptions] = None
) -> AsyncHttpResponse[PortalSessionResponse]:
"""
Create a Stripe Customer Portal session for managing subscription.
Frontend should redirect the user to the returned URL.
The portal allows users to:
- Update payment methods
- Upgrade/downgrade plans
- Cancel subscription
- View invoices
Parameters
----------
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
AsyncHttpResponse[PortalSessionResponse]
Successful Response
"""
_response = await self._client_wrapper.httpx_client.request(
"api/v1/billing/portal",
method="POST",
request_options=request_options,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
PortalSessionResponse,
parse_obj_as(
type_=PortalSessionResponse, # type: ignore
object_=_response.json(),
),
)
return AsyncHttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
async def get_organization_billing_api_v1billing_state_get(
self, *, request_options: typing.Optional[RequestOptions] = None
) -> AsyncHttpResponse[typing.Optional[BillingStateResponse]]:
"""
Parameters
----------
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
AsyncHttpResponse[typing.Optional[BillingStateResponse]]
Successful Response
"""
_response = await self._client_wrapper.httpx_client.request(
"api/v1/billing/state",
method="GET",
request_options=request_options,
)
try:
if _response is None or not _response.text.strip():
return AsyncHttpResponse(response=_response, data=None)
if 200 <= _response.status_code < 300:
_data = typing.cast(
typing.Optional[BillingStateResponse],
parse_obj_as(
type_=typing.Optional[BillingStateResponse], # type: ignore
object_=_response.json(),
),
)
return AsyncHttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
async def change_tier_api_v1billing_change_tier_post(
self, *, tier: PlanTier, request_options: typing.Optional[RequestOptions] = None
) -> AsyncHttpResponse[ChangeTierResponse]:
"""
Redirect to Stripe Portal for tier changes.
Portal handles proration based on configured settings:
- Upgrades: Immediate proration charge
- Downgrades: Apply at end of billing period
Parameters
----------
tier : PlanTier
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
AsyncHttpResponse[ChangeTierResponse]
Successful Response
"""
_response = await self._client_wrapper.httpx_client.request(
"api/v1/billing/change-tier",
method="POST",
json={
"tier": tier,
},
headers={
"content-type": "application/json",
},
request_options=request_options,
omit=OMIT,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
ChangeTierResponse,
parse_obj_as(
type_=ChangeTierResponse, # type: ignore
object_=_response.json(),
),
)
return AsyncHttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)

View file

@ -35,7 +35,6 @@ if typing.TYPE_CHECKING:
from .azure_secret_parameter import AzureSecretParameter from .azure_secret_parameter import AzureSecretParameter
from .azure_vault_credential_parameter import AzureVaultCredentialParameter from .azure_vault_credential_parameter import AzureVaultCredentialParameter
from .azure_vault_credential_parameter_yaml import AzureVaultCredentialParameterYaml from .azure_vault_credential_parameter_yaml import AzureVaultCredentialParameterYaml
from .billing_state_response import BillingStateResponse
from .bitwarden_credit_card_data_parameter import BitwardenCreditCardDataParameter from .bitwarden_credit_card_data_parameter import BitwardenCreditCardDataParameter
from .bitwarden_credit_card_data_parameter_yaml import BitwardenCreditCardDataParameterYaml from .bitwarden_credit_card_data_parameter_yaml import BitwardenCreditCardDataParameterYaml
from .bitwarden_login_credential_parameter import BitwardenLoginCredentialParameter from .bitwarden_login_credential_parameter import BitwardenLoginCredentialParameter
@ -54,8 +53,6 @@ if typing.TYPE_CHECKING:
from .branch_criteria_yaml_criteria_type import BranchCriteriaYamlCriteriaType from .branch_criteria_yaml_criteria_type import BranchCriteriaYamlCriteriaType
from .browser_profile import BrowserProfile from .browser_profile import BrowserProfile
from .browser_session_response import BrowserSessionResponse from .browser_session_response import BrowserSessionResponse
from .change_tier_response import ChangeTierResponse
from .checkout_session_response import CheckoutSessionResponse
from .click_action import ClickAction from .click_action import ClickAction
from .click_action_data import ClickActionData from .click_action_data import ClickActionData
from .click_context import ClickContext from .click_context import ClickContext
@ -102,6 +99,7 @@ if typing.TYPE_CHECKING:
from .credential_response import CredentialResponse from .credential_response import CredentialResponse
from .credential_response_credential import CredentialResponseCredential from .credential_response_credential import CredentialResponseCredential
from .credential_type_output import CredentialTypeOutput from .credential_type_output import CredentialTypeOutput
from .credential_vault_type import CredentialVaultType
from .credit_card_credential_response import CreditCardCredentialResponse from .credit_card_credential_response import CreditCardCredentialResponse
from .download_to_s3block import DownloadToS3Block from .download_to_s3block import DownloadToS3Block
from .download_to_s3block_yaml import DownloadToS3BlockYaml from .download_to_s3block_yaml import DownloadToS3BlockYaml
@ -313,8 +311,6 @@ if typing.TYPE_CHECKING:
from .non_empty_password_credential import NonEmptyPasswordCredential from .non_empty_password_credential import NonEmptyPasswordCredential
from .one_password_credential_parameter import OnePasswordCredentialParameter from .one_password_credential_parameter import OnePasswordCredentialParameter
from .one_password_credential_parameter_yaml import OnePasswordCredentialParameterYaml from .one_password_credential_parameter_yaml import OnePasswordCredentialParameterYaml
from .organization_schedule_item import OrganizationScheduleItem
from .organization_schedule_list_response import OrganizationScheduleListResponse
from .otp_type import OtpType from .otp_type import OtpType
from .output_parameter import OutputParameter from .output_parameter import OutputParameter
from .output_parameter_yaml import OutputParameterYaml from .output_parameter_yaml import OutputParameterYaml
@ -323,8 +319,6 @@ if typing.TYPE_CHECKING:
from .pdf_parser_block import PdfParserBlock from .pdf_parser_block import PdfParserBlock
from .pdf_parser_block_yaml import PdfParserBlockYaml from .pdf_parser_block_yaml import PdfParserBlockYaml
from .persistent_browser_type import PersistentBrowserType from .persistent_browser_type import PersistentBrowserType
from .plan_tier import PlanTier
from .portal_session_response import PortalSessionResponse
from .print_page_block import PrintPageBlock from .print_page_block import PrintPageBlock
from .print_page_block_parameters_item import ( from .print_page_block_parameters_item import (
PrintPageBlockParametersItem, PrintPageBlockParametersItem,
@ -390,6 +384,7 @@ if typing.TYPE_CHECKING:
) )
from .task_block_yaml import TaskBlockYaml from .task_block_yaml import TaskBlockYaml
from .task_block_yaml_data_schema import TaskBlockYamlDataSchema from .task_block_yaml_data_schema import TaskBlockYamlDataSchema
from .task_run_list_item import TaskRunListItem
from .task_run_request import TaskRunRequest from .task_run_request import TaskRunRequest
from .task_run_request_data_extraction_schema import TaskRunRequestDataExtractionSchema from .task_run_request_data_extraction_schema import TaskRunRequestDataExtractionSchema
from .task_run_request_proxy_location import TaskRunRequestProxyLocation from .task_run_request_proxy_location import TaskRunRequestProxyLocation
@ -584,10 +579,6 @@ if typing.TYPE_CHECKING:
from .workflow_run_timeline import WorkflowRunTimeline from .workflow_run_timeline import WorkflowRunTimeline
from .workflow_run_timeline_type import WorkflowRunTimelineType from .workflow_run_timeline_type import WorkflowRunTimelineType
from .workflow_run_trigger_type import WorkflowRunTriggerType from .workflow_run_trigger_type import WorkflowRunTriggerType
from .workflow_schedule import WorkflowSchedule
from .workflow_schedule_list_response import WorkflowScheduleListResponse
from .workflow_schedule_response import WorkflowScheduleResponse
from .workflow_schedule_upsert_request import WorkflowScheduleUpsertRequest
from .workflow_status import WorkflowStatus from .workflow_status import WorkflowStatus
from .workflow_trigger_block import WorkflowTriggerBlock from .workflow_trigger_block import WorkflowTriggerBlock
from .workflow_trigger_block_parameters_item import ( from .workflow_trigger_block_parameters_item import (
@ -633,7 +624,6 @@ _dynamic_imports: typing.Dict[str, str] = {
"AzureSecretParameter": ".azure_secret_parameter", "AzureSecretParameter": ".azure_secret_parameter",
"AzureVaultCredentialParameter": ".azure_vault_credential_parameter", "AzureVaultCredentialParameter": ".azure_vault_credential_parameter",
"AzureVaultCredentialParameterYaml": ".azure_vault_credential_parameter_yaml", "AzureVaultCredentialParameterYaml": ".azure_vault_credential_parameter_yaml",
"BillingStateResponse": ".billing_state_response",
"BitwardenCreditCardDataParameter": ".bitwarden_credit_card_data_parameter", "BitwardenCreditCardDataParameter": ".bitwarden_credit_card_data_parameter",
"BitwardenCreditCardDataParameterYaml": ".bitwarden_credit_card_data_parameter_yaml", "BitwardenCreditCardDataParameterYaml": ".bitwarden_credit_card_data_parameter_yaml",
"BitwardenLoginCredentialParameter": ".bitwarden_login_credential_parameter", "BitwardenLoginCredentialParameter": ".bitwarden_login_credential_parameter",
@ -650,8 +640,6 @@ _dynamic_imports: typing.Dict[str, str] = {
"BranchCriteriaYamlCriteriaType": ".branch_criteria_yaml_criteria_type", "BranchCriteriaYamlCriteriaType": ".branch_criteria_yaml_criteria_type",
"BrowserProfile": ".browser_profile", "BrowserProfile": ".browser_profile",
"BrowserSessionResponse": ".browser_session_response", "BrowserSessionResponse": ".browser_session_response",
"ChangeTierResponse": ".change_tier_response",
"CheckoutSessionResponse": ".checkout_session_response",
"ClickAction": ".click_action", "ClickAction": ".click_action",
"ClickActionData": ".click_action_data", "ClickActionData": ".click_action_data",
"ClickContext": ".click_context", "ClickContext": ".click_context",
@ -694,6 +682,7 @@ _dynamic_imports: typing.Dict[str, str] = {
"CredentialResponse": ".credential_response", "CredentialResponse": ".credential_response",
"CredentialResponseCredential": ".credential_response_credential", "CredentialResponseCredential": ".credential_response_credential",
"CredentialTypeOutput": ".credential_type_output", "CredentialTypeOutput": ".credential_type_output",
"CredentialVaultType": ".credential_vault_type",
"CreditCardCredentialResponse": ".credit_card_credential_response", "CreditCardCredentialResponse": ".credit_card_credential_response",
"DownloadToS3Block": ".download_to_s3block", "DownloadToS3Block": ".download_to_s3block",
"DownloadToS3BlockYaml": ".download_to_s3block_yaml", "DownloadToS3BlockYaml": ".download_to_s3block_yaml",
@ -885,8 +874,6 @@ _dynamic_imports: typing.Dict[str, str] = {
"NonEmptyPasswordCredential": ".non_empty_password_credential", "NonEmptyPasswordCredential": ".non_empty_password_credential",
"OnePasswordCredentialParameter": ".one_password_credential_parameter", "OnePasswordCredentialParameter": ".one_password_credential_parameter",
"OnePasswordCredentialParameterYaml": ".one_password_credential_parameter_yaml", "OnePasswordCredentialParameterYaml": ".one_password_credential_parameter_yaml",
"OrganizationScheduleItem": ".organization_schedule_item",
"OrganizationScheduleListResponse": ".organization_schedule_list_response",
"OtpType": ".otp_type", "OtpType": ".otp_type",
"OutputParameter": ".output_parameter", "OutputParameter": ".output_parameter",
"OutputParameterYaml": ".output_parameter_yaml", "OutputParameterYaml": ".output_parameter_yaml",
@ -895,8 +882,6 @@ _dynamic_imports: typing.Dict[str, str] = {
"PdfParserBlock": ".pdf_parser_block", "PdfParserBlock": ".pdf_parser_block",
"PdfParserBlockYaml": ".pdf_parser_block_yaml", "PdfParserBlockYaml": ".pdf_parser_block_yaml",
"PersistentBrowserType": ".persistent_browser_type", "PersistentBrowserType": ".persistent_browser_type",
"PlanTier": ".plan_tier",
"PortalSessionResponse": ".portal_session_response",
"PrintPageBlock": ".print_page_block", "PrintPageBlock": ".print_page_block",
"PrintPageBlockParametersItem": ".print_page_block_parameters_item", "PrintPageBlockParametersItem": ".print_page_block_parameters_item",
"PrintPageBlockParametersItem_AwsSecret": ".print_page_block_parameters_item", "PrintPageBlockParametersItem_AwsSecret": ".print_page_block_parameters_item",
@ -956,6 +941,7 @@ _dynamic_imports: typing.Dict[str, str] = {
"TaskBlockParametersItem_Workflow": ".task_block_parameters_item", "TaskBlockParametersItem_Workflow": ".task_block_parameters_item",
"TaskBlockYaml": ".task_block_yaml", "TaskBlockYaml": ".task_block_yaml",
"TaskBlockYamlDataSchema": ".task_block_yaml_data_schema", "TaskBlockYamlDataSchema": ".task_block_yaml_data_schema",
"TaskRunListItem": ".task_run_list_item",
"TaskRunRequest": ".task_run_request", "TaskRunRequest": ".task_run_request",
"TaskRunRequestDataExtractionSchema": ".task_run_request_data_extraction_schema", "TaskRunRequestDataExtractionSchema": ".task_run_request_data_extraction_schema",
"TaskRunRequestProxyLocation": ".task_run_request_proxy_location", "TaskRunRequestProxyLocation": ".task_run_request_proxy_location",
@ -1134,10 +1120,6 @@ _dynamic_imports: typing.Dict[str, str] = {
"WorkflowRunTimeline": ".workflow_run_timeline", "WorkflowRunTimeline": ".workflow_run_timeline",
"WorkflowRunTimelineType": ".workflow_run_timeline_type", "WorkflowRunTimelineType": ".workflow_run_timeline_type",
"WorkflowRunTriggerType": ".workflow_run_trigger_type", "WorkflowRunTriggerType": ".workflow_run_trigger_type",
"WorkflowSchedule": ".workflow_schedule",
"WorkflowScheduleListResponse": ".workflow_schedule_list_response",
"WorkflowScheduleResponse": ".workflow_schedule_response",
"WorkflowScheduleUpsertRequest": ".workflow_schedule_upsert_request",
"WorkflowStatus": ".workflow_status", "WorkflowStatus": ".workflow_status",
"WorkflowTriggerBlock": ".workflow_trigger_block", "WorkflowTriggerBlock": ".workflow_trigger_block",
"WorkflowTriggerBlockParametersItem": ".workflow_trigger_block_parameters_item", "WorkflowTriggerBlockParametersItem": ".workflow_trigger_block_parameters_item",
@ -1205,7 +1187,6 @@ __all__ = [
"AzureSecretParameter", "AzureSecretParameter",
"AzureVaultCredentialParameter", "AzureVaultCredentialParameter",
"AzureVaultCredentialParameterYaml", "AzureVaultCredentialParameterYaml",
"BillingStateResponse",
"BitwardenCreditCardDataParameter", "BitwardenCreditCardDataParameter",
"BitwardenCreditCardDataParameterYaml", "BitwardenCreditCardDataParameterYaml",
"BitwardenLoginCredentialParameter", "BitwardenLoginCredentialParameter",
@ -1222,8 +1203,6 @@ __all__ = [
"BranchCriteriaYamlCriteriaType", "BranchCriteriaYamlCriteriaType",
"BrowserProfile", "BrowserProfile",
"BrowserSessionResponse", "BrowserSessionResponse",
"ChangeTierResponse",
"CheckoutSessionResponse",
"ClickAction", "ClickAction",
"ClickActionData", "ClickActionData",
"ClickContext", "ClickContext",
@ -1266,6 +1245,7 @@ __all__ = [
"CredentialResponse", "CredentialResponse",
"CredentialResponseCredential", "CredentialResponseCredential",
"CredentialTypeOutput", "CredentialTypeOutput",
"CredentialVaultType",
"CreditCardCredentialResponse", "CreditCardCredentialResponse",
"DownloadToS3Block", "DownloadToS3Block",
"DownloadToS3BlockYaml", "DownloadToS3BlockYaml",
@ -1457,8 +1437,6 @@ __all__ = [
"NonEmptyPasswordCredential", "NonEmptyPasswordCredential",
"OnePasswordCredentialParameter", "OnePasswordCredentialParameter",
"OnePasswordCredentialParameterYaml", "OnePasswordCredentialParameterYaml",
"OrganizationScheduleItem",
"OrganizationScheduleListResponse",
"OtpType", "OtpType",
"OutputParameter", "OutputParameter",
"OutputParameterYaml", "OutputParameterYaml",
@ -1467,8 +1445,6 @@ __all__ = [
"PdfParserBlock", "PdfParserBlock",
"PdfParserBlockYaml", "PdfParserBlockYaml",
"PersistentBrowserType", "PersistentBrowserType",
"PlanTier",
"PortalSessionResponse",
"PrintPageBlock", "PrintPageBlock",
"PrintPageBlockParametersItem", "PrintPageBlockParametersItem",
"PrintPageBlockParametersItem_AwsSecret", "PrintPageBlockParametersItem_AwsSecret",
@ -1528,6 +1504,7 @@ __all__ = [
"TaskBlockParametersItem_Workflow", "TaskBlockParametersItem_Workflow",
"TaskBlockYaml", "TaskBlockYaml",
"TaskBlockYamlDataSchema", "TaskBlockYamlDataSchema",
"TaskRunListItem",
"TaskRunRequest", "TaskRunRequest",
"TaskRunRequestDataExtractionSchema", "TaskRunRequestDataExtractionSchema",
"TaskRunRequestProxyLocation", "TaskRunRequestProxyLocation",
@ -1706,10 +1683,6 @@ __all__ = [
"WorkflowRunTimeline", "WorkflowRunTimeline",
"WorkflowRunTimelineType", "WorkflowRunTimelineType",
"WorkflowRunTriggerType", "WorkflowRunTriggerType",
"WorkflowSchedule",
"WorkflowScheduleListResponse",
"WorkflowScheduleResponse",
"WorkflowScheduleUpsertRequest",
"WorkflowStatus", "WorkflowStatus",
"WorkflowTriggerBlock", "WorkflowTriggerBlock",
"WorkflowTriggerBlockParametersItem", "WorkflowTriggerBlockParametersItem",

View file

@ -1,37 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import datetime as dt
import typing
import pydantic
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
from .plan_tier import PlanTier
class BillingStateResponse(UniversalBaseModel):
billing_id: str
organization_id: str
plan_tier: PlanTier
current_period_start: typing.Optional[dt.datetime] = None
current_period_end: typing.Optional[dt.datetime] = None
included_credits_this_period: int
credits_consumed_this_period: int
cached_credits_consumed_this_period: int
overage_enabled: bool
browser_uptime_seconds_consumed: int
topup_credits_total: int
topup_credits_used: int
topup_credits_remaining: int
credits_remaining: int
cancel_at_period_end: typing.Optional[bool] = None
created_at: dt.datetime
modified_at: dt.datetime
if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
else:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow

View file

@ -1,21 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import typing
import pydantic
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
class ChangeTierResponse(UniversalBaseModel):
status: str
tier: str
redirect_url: typing.Optional[str] = None
if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
else:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow

View file

@ -1,20 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import typing
import pydantic
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
class CheckoutSessionResponse(UniversalBaseModel):
id: str
url: str
if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
else:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow

View file

@ -5,6 +5,7 @@ import typing
import pydantic import pydantic
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
from .create_credential_request_credential import CreateCredentialRequestCredential from .create_credential_request_credential import CreateCredentialRequestCredential
from .credential_vault_type import CredentialVaultType
from .skyvern_forge_sdk_schemas_credentials_credential_type import SkyvernForgeSdkSchemasCredentialsCredentialType from .skyvern_forge_sdk_schemas_credentials_credential_type import SkyvernForgeSdkSchemasCredentialsCredentialType
@ -28,6 +29,11 @@ class CreateCredentialRequest(UniversalBaseModel):
The credential data to store The credential data to store
""" """
vault_type: typing.Optional[CredentialVaultType] = pydantic.Field(default=None)
"""
Which vault to store this credential in. If omitted, uses the instance default. Use this to mix Skyvern-hosted and custom credentials within the same organization.
"""
if IS_PYDANTIC_V2: if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
else: else:

View file

@ -6,6 +6,7 @@ import pydantic
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
from .credential_response_credential import CredentialResponseCredential from .credential_response_credential import CredentialResponseCredential
from .credential_type_output import CredentialTypeOutput from .credential_type_output import CredentialTypeOutput
from .credential_vault_type import CredentialVaultType
class CredentialResponse(UniversalBaseModel): class CredentialResponse(UniversalBaseModel):
@ -33,6 +34,11 @@ class CredentialResponse(UniversalBaseModel):
Name of the credential Name of the credential
""" """
vault_type: typing.Optional[CredentialVaultType] = pydantic.Field(default=None)
"""
Which vault stores this credential (e.g., 'bitwarden', 'azure_vault', 'custom')
"""
browser_profile_id: typing.Optional[str] = pydantic.Field(default=None) browser_profile_id: typing.Optional[str] = pydantic.Field(default=None)
""" """
Browser profile ID linked to this credential Browser profile ID linked to this credential
@ -48,6 +54,11 @@ class CredentialResponse(UniversalBaseModel):
User-provided context describing the login sequence (e.g., 'click SSO button first') User-provided context describing the login sequence (e.g., 'click SSO button first')
""" """
save_browser_session_intent: typing.Optional[bool] = pydantic.Field(default=None)
"""
Whether the user intends to save a browser session, regardless of test outcome
"""
if IS_PYDANTIC_V2: if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
else: else:

View file

@ -0,0 +1,5 @@
# This file was auto-generated by Fern from our API Definition.
import typing
CredentialVaultType = typing.Union[typing.Literal["bitwarden", "azure_vault", "custom"], typing.Any]

View file

@ -1,23 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import typing
import pydantic
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
from .organization_schedule_item import OrganizationScheduleItem
class OrganizationScheduleListResponse(UniversalBaseModel):
schedules: typing.List[OrganizationScheduleItem]
total_count: int
page: int
page_size: int
if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
else:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow

View file

@ -11,7 +11,7 @@ class PasswordCredentialResponse(UniversalBaseModel):
""" """
Response model for password credentials non-sensitive fields only. Response model for password credentials non-sensitive fields only.
SECURITY: Must NEVER include password, TOTP secret, or TOTP identifier. SECURITY: Must NEVER include password or TOTP secret.
""" """
username: str = pydantic.Field() username: str = pydantic.Field()

View file

@ -1,5 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import typing
PlanTier = typing.Union[typing.Literal["free", "hobby", "pro", "enterprise"], typing.Any]

View file

@ -1,19 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import typing
import pydantic
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
class PortalSessionResponse(UniversalBaseModel):
url: str
if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
else:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow

View file

@ -7,20 +7,21 @@ import pydantic
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
class OrganizationScheduleItem(UniversalBaseModel): class TaskRunListItem(UniversalBaseModel):
workflow_schedule_id: str """
organization_id: str Lightweight run-history item backed by the task_runs table.
workflow_permanent_id: str """
workflow_title: str
cron_expression: str task_run_id: str
timezone: str run_id: str
enabled: bool task_run_type: str
parameters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None status: str
name: typing.Optional[str] = None title: typing.Optional[str] = None
description: typing.Optional[str] = None started_at: typing.Optional[dt.datetime] = None
next_run: typing.Optional[dt.datetime] = None finished_at: typing.Optional[dt.datetime] = None
created_at: dt.datetime created_at: dt.datetime
modified_at: dt.datetime workflow_permanent_id: typing.Optional[str] = None
script_run: typing.Optional[bool] = None
if IS_PYDANTIC_V2: if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2

View file

@ -144,7 +144,7 @@ class TaskRunRequest(UniversalBaseModel):
run_with: typing.Optional[str] = pydantic.Field(default=None) run_with: typing.Optional[str] = pydantic.Field(default=None)
""" """
Whether to run the task with agent or code. Whether to run the task with agent or code. Null means use the default.
""" """
if IS_PYDANTIC_V2: if IS_PYDANTIC_V2:

View file

@ -35,6 +35,7 @@ class Workflow(UniversalBaseModel):
ai_fallback: typing.Optional[bool] = None ai_fallback: typing.Optional[bool] = None
cache_key: typing.Optional[str] = None cache_key: typing.Optional[str] = None
adaptive_caching: typing.Optional[bool] = None adaptive_caching: typing.Optional[bool] = None
code_version: typing.Optional[int] = None
generate_script_on_terminal: typing.Optional[bool] = None generate_script_on_terminal: typing.Optional[bool] = None
run_sequentially: typing.Optional[bool] = None run_sequentially: typing.Optional[bool] = None
sequential_key: typing.Optional[str] = None sequential_key: typing.Optional[str] = None

View file

@ -29,6 +29,7 @@ class WorkflowCreateYamlRequest(UniversalBaseModel):
ai_fallback: typing.Optional[bool] = None ai_fallback: typing.Optional[bool] = None
cache_key: typing.Optional[str] = None cache_key: typing.Optional[str] = None
adaptive_caching: typing.Optional[bool] = None adaptive_caching: typing.Optional[bool] = None
code_version: typing.Optional[int] = None
generate_script_on_terminal: typing.Optional[bool] = None generate_script_on_terminal: typing.Optional[bool] = None
run_sequentially: typing.Optional[bool] = None run_sequentially: typing.Optional[bool] = None
sequential_key: typing.Optional[str] = None sequential_key: typing.Optional[str] = None

View file

@ -27,6 +27,7 @@ class WorkflowRun(UniversalBaseModel):
totp_verification_url: typing.Optional[str] = None totp_verification_url: typing.Optional[str] = None
totp_identifier: typing.Optional[str] = None totp_identifier: typing.Optional[str] = None
failure_reason: typing.Optional[str] = None failure_reason: typing.Optional[str] = None
failure_category: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = None
parent_workflow_run_id: typing.Optional[str] = None parent_workflow_run_id: typing.Optional[str] = None
workflow_title: typing.Optional[str] = None workflow_title: typing.Optional[str] = None
max_screenshot_scrolls: typing.Optional[int] = None max_screenshot_scrolls: typing.Optional[int] = None

View file

@ -108,7 +108,7 @@ class WorkflowRunRequest(UniversalBaseModel):
run_with: typing.Optional[str] = pydantic.Field(default=None) run_with: typing.Optional[str] = pydantic.Field(default=None)
""" """
Whether to run the workflow with agent, code, or code_v2 (adaptive caching). Whether to run the workflow with agent or code. Null inherits from the workflow setting.
""" """
if IS_PYDANTIC_V2: if IS_PYDANTIC_V2:

View file

@ -110,7 +110,7 @@ class WorkflowRunResponse(UniversalBaseModel):
run_with: typing.Optional[str] = pydantic.Field(default=None) run_with: typing.Optional[str] = pydantic.Field(default=None)
""" """
Whether the workflow run was executed with agent, code, or code_v2 (adaptive caching) Whether the workflow run was executed with agent or code
""" """
ai_fallback: typing.Optional[bool] = pydantic.Field(default=None) ai_fallback: typing.Optional[bool] = pydantic.Field(default=None)

View file

@ -1,32 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import datetime as dt
import typing
import pydantic
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
class WorkflowSchedule(UniversalBaseModel):
workflow_schedule_id: str
organization_id: str
workflow_permanent_id: str
cron_expression: str
timezone: str
enabled: bool
parameters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None
temporal_schedule_id: typing.Optional[str] = None
name: typing.Optional[str] = None
description: typing.Optional[str] = None
created_at: dt.datetime
modified_at: dt.datetime
deleted_at: typing.Optional[dt.datetime] = None
if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
else:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow

View file

@ -1,20 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import typing
import pydantic
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
from .workflow_schedule import WorkflowSchedule
class WorkflowScheduleListResponse(UniversalBaseModel):
schedules: typing.List[WorkflowSchedule]
if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
else:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow

View file

@ -1,22 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import datetime as dt
import typing
import pydantic
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
from .workflow_schedule import WorkflowSchedule
class WorkflowScheduleResponse(UniversalBaseModel):
schedule: WorkflowSchedule
next_runs: typing.Optional[typing.List[dt.datetime]] = None
if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
else:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow

View file

@ -1,24 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import typing
import pydantic
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
class WorkflowScheduleUpsertRequest(UniversalBaseModel):
cron_expression: str
timezone: str
enabled: typing.Optional[bool] = None
parameters: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None
name: typing.Optional[str] = None
description: typing.Optional[str] = None
if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
else:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow

View file

@ -74,18 +74,46 @@
return null; return null;
} }
// Returns true for IDs that are likely stable (meaningful names like
// "email", "firstName", "address--city"). Returns false for IDs that
// look like session-specific random hashes (e.g., "gf3ag", "_7xK2",
// "4a1633a5-e29f-44d6-...") which change every page load.
// Validated against production form pages from multiple employer platforms:
// STABLE: address--city, name--legalName--firstName (semantic, contain --)
// UNSTABLE: ugy05, ugy0g (short random), 4a1633a5-... (UUID), input-4 (positional)
function isStableId(id) {
if (!id) return false;
// UUIDs
if (/^[0-9a-f]{8}-[0-9a-f]{4}-/i.test(id)) return false;
// Positional IDs: input-4, field-12, element-0, item-3, ctrl00, ctl-5
if (/^(input|field|element|item|ctrl|ctl|comp|widget)[-_]?\d+$/i.test(id))
return false;
// Purely numeric
if (/^\d+$/.test(id)) return false;
// Short random-looking strings (< 12 chars, no word-like patterns)
// Rejects: ugy05, gf3ag, _7xK2. Allows: email, firstName, source--source
if (
id.length < 12 &&
/^[a-zA-Z0-9_-]+$/.test(id) &&
!/[a-z]{3,}[A-Z]|[a-z]{3,}$|[A-Z][a-z]{3,}/.test(id)
)
return false;
return true;
}
function buildSelector(el, label) { function buildSelector(el, label) {
const tag = el.tagName.toLowerCase(); const tag = el.tagName.toLowerCase();
const elType = (el.getAttribute("type") || "").toLowerCase(); const elType = (el.getAttribute("type") || "").toLowerCase();
const vis = elType === "file" ? "" : ":visible"; const vis = elType === "file" ? "" : ":visible";
// 1. name attribute — stable across sessions
if (el.name) return tag + '[name="' + el.name + '"]' + vis; if (el.name) return tag + '[name="' + el.name + '"]' + vis;
if (el.id) return "#" + CSS.escape(el.id) + vis;
// File inputs with no name/id: no name/id, use data-automation-id or type selector // 2. data-automation-id — stable on platforms that use automation attributes
if (elType === "file") { const autoId = el.getAttribute("data-automation-id");
const autoId = el.getAttribute("data-automation-id"); if (autoId) return tag + '[data-automation-id="' + autoId + '"]' + vis;
if (autoId) return tag + '[data-automation-id="' + autoId + '"]';
return 'input[type="file"]'; // 3. Label-based semantic selectors — stable across sessions
}
if (label && label.length < 80) { if (label && label.length < 80) {
const escapedLabel = label const escapedLabel = label
.replace(/\\/g, "\\\\") .replace(/\\/g, "\\\\")
@ -102,17 +130,31 @@
return tag + '[aria-label="' + escapedLabel + '"]:visible'; return tag + '[aria-label="' + escapedLabel + '"]:visible';
} }
} }
// 4. ID — only if it looks stable (not a random session hash)
if (el.id && isStableId(el.id)) return "#" + CSS.escape(el.id) + vis;
// 5. File input fallback
if (elType === "file") {
return 'input[type="file"]';
}
return null; return null;
} }
function buildOptionSelector(el) { function buildOptionSelector(el) {
if (el.id) return "#" + CSS.escape(el.id);
const tag = el.tagName.toLowerCase(); const tag = el.tagName.toLowerCase();
const name = el.name; const name = el.name;
const value = el.value; const value = el.value;
// 1. data-automation-id — stable on platforms that use automation attributes
const autoId = el.getAttribute("data-automation-id");
if (autoId) return '[data-automation-id="' + autoId + '"]';
// 2. name + value — stable form attribute
if (name && value) if (name && value)
return tag + '[name="' + name + '"][value="' + value + '"]'; return tag + '[name="' + name + '"][value="' + value + '"]';
if (name) return tag + '[name="' + name + '"]'; if (name) return tag + '[name="' + name + '"]';
// 3. Stable ID only (skip random session hashes)
if (el.id && isStableId(el.id)) return "#" + CSS.escape(el.id);
// Anonymous <input aria-checked> — find the associated label and // Anonymous <input aria-checked> — find the associated label and
// build a selector. Structure varies: label may wrap input, be a sibling, or // build a selector. Structure varies: label may wrap input, be a sibling, or
// both may be inside a <div role="group">. // both may be inside a <div role="group">.
@ -533,9 +575,13 @@
r.getAttribute("aria-label") || r.textContent.trim() || null; r.getAttribute("aria-label") || r.textContent.trim() || null;
const optSelector = r.getAttribute("data-automation-id") const optSelector = r.getAttribute("data-automation-id")
? '[data-automation-id="' + r.getAttribute("data-automation-id") + '"]' ? '[data-automation-id="' + r.getAttribute("data-automation-id") + '"]'
: r.id : r.id && isStableId(r.id)
? "#" + CSS.escape(r.id) ? "#" + CSS.escape(r.id)
: null; : optLabel && optLabel.length < 50
? '[role="radio"][aria-label="' +
optLabel.replace(/\\/g, "\\\\").replace(/"/g, '\\"') +
'"]'
: null;
if (optSelector) { if (optSelector) {
options.push({ options.push({
label: optLabel, label: optLabel,
@ -596,9 +642,13 @@
? '[data-automation-id="' + ? '[data-automation-id="' +
r.getAttribute("data-automation-id") + r.getAttribute("data-automation-id") +
'"]' '"]'
: r.id : r.id && isStableId(r.id)
? "#" + CSS.escape(r.id) ? "#" + CSS.escape(r.id)
: null; : optLabel && optLabel.length < 50
? '[role="radio"][aria-label="' +
optLabel.replace(/\\/g, "\\\\").replace(/"/g, '\\"') +
'"]'
: null;
if (optSelector) { if (optSelector) {
options.push({ options.push({
label: optLabel, label: optLabel,
@ -628,7 +678,8 @@
); );
for (const cb of ariaCheckboxes) { for (const cb of ariaCheckboxes) {
if (!isVisible(cb)) continue; if (!isVisible(cb)) continue;
const cbId = cb.getAttribute("data-automation-id") || cb.id || null; const cbAutoId = cb.getAttribute("data-automation-id");
const cbId = cbAutoId || cb.id || null;
if (!cbId || seen.has("aria_cb_" + cbId)) continue; if (!cbId || seen.has("aria_cb_" + cbId)) continue;
seen.add("aria_cb_" + cbId); seen.add("aria_cb_" + cbId);
const label = const label =
@ -636,11 +687,15 @@
getLabel(cb) || getLabel(cb) ||
cb.textContent.trim() || cb.textContent.trim() ||
null; null;
const selector = cbId const selector = cbAutoId
? '[data-automation-id="' + cbId + '"]' ? '[data-automation-id="' + cbAutoId + '"]'
: cb.id : cb.id && isStableId(cb.id)
? "#" + CSS.escape(cb.id) ? "#" + CSS.escape(cb.id)
: null; : label && label.length < 80
? '[role="checkbox"][aria-label="' +
label.replace(/\\/g, "\\\\").replace(/"/g, '\\"') +
'"]'
: null;
if (selector) { if (selector) {
fields.push({ fields.push({
label: label, label: label,

View file

@ -1526,7 +1526,7 @@ class SkyvernPage(Page):
field_index=idx, field_index=idx,
value=str(value)[:50], value=str(value)[:50],
options_count=len(field.get("options", [])), options_count=len(field.get("options", [])),
option_labels=[o.get("label", "?")[:30] for o in field.get("options", [])], option_labels=[(o.get("label") or "?")[:30] for o in field.get("options", [])],
) )
if isinstance(value, str): if isinstance(value, str):
try: try:

View file

@ -926,6 +926,14 @@ class MissingBrowserAddressError(SkyvernException):
super().__init__(f"Browser session {browser_session_id} does not have an address.") super().__init__(f"Browser session {browser_session_id} does not have an address.")
class BrowserSessionClosed(SkyvernHTTPException):
def __init__(self, browser_session_id: str) -> None:
super().__init__(
f"Browser session {browser_session_id} is closed.",
status_code=status.HTTP_410_GONE,
)
class BrowserSessionNotFound(SkyvernHTTPException): class BrowserSessionNotFound(SkyvernHTTPException):
def __init__(self, browser_session_id: str) -> None: def __init__(self, browser_session_id: str) -> None:
super().__init__( super().__init__(

View file

@ -1,3 +1,4 @@
import os
import uuid import uuid
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from datetime import datetime from datetime import datetime
@ -211,6 +212,25 @@ def create_api_app() -> FastAPI:
forge_app_instance = start_forge_app() forge_app_instance = start_forge_app()
# Initialize Laminar tracing after ForgeApp so auto-instrumentation works.
lmnr_api_key = os.environ.get("LMNR_PROJECT_API_KEY")
if lmnr_api_key:
try:
from lmnr import Laminar # noqa: PLC0415
lmnr_base_url = os.environ.get("LMNR_BASE_URL", "http://localhost")
lmnr_grpc_port = int(os.environ.get("LMNR_GRPC_PORT", "8001"))
lmnr_http_port = int(os.environ.get("LMNR_HTTP_PORT", "8000"))
Laminar.initialize(
project_api_key=lmnr_api_key,
base_url=lmnr_base_url,
grpc_port=lmnr_grpc_port,
http_port=lmnr_http_port,
)
LOG.info("Laminar tracing initialized", base_url=lmnr_base_url, grpc_port=lmnr_grpc_port)
except Exception as e:
LOG.warning("Failed to initialize Laminar tracing", error=str(e))
fastapi_app = FastAPI(lifespan=lifespan) fastapi_app = FastAPI(lifespan=lifespan)
# Add CORS middleware # Add CORS middleware

View file

@ -41,7 +41,7 @@ from skyvern.schemas.scripts import (
WorkflowScriptSummary, WorkflowScriptSummary,
) )
from skyvern.services import script_service, workflow_script_service from skyvern.services import script_service, workflow_script_service
from skyvern.services.script_reviewer import ScriptReviewer from skyvern.services.script_reviewer import ScriptReviewer, store_review_artifacts
from skyvern.services.workflow_script_service import create_script_version_from_review from skyvern.services.workflow_script_service import create_script_version_from_review
LOG = structlog.get_logger() LOG = structlog.get_logger()
@ -1350,7 +1350,7 @@ async def review_script_with_instructions(
# Run the reviewer # Run the reviewer
reviewer = ScriptReviewer() reviewer = ScriptReviewer()
updated_blocks = await reviewer.review_with_user_instructions( review_results = await reviewer.review_with_user_instructions(
organization_id=organization_id, organization_id=organization_id,
workflow_permanent_id=workflow_permanent_id, workflow_permanent_id=workflow_permanent_id,
script_revision_id=latest_script.script_revision_id, script_revision_id=latest_script.script_revision_id,
@ -1360,7 +1360,7 @@ async def review_script_with_instructions(
run_parameter_values=run_parameter_values or None, run_parameter_values=run_parameter_values or None,
) )
if not updated_blocks: if not review_results:
return ReviewScriptResponse( return ReviewScriptResponse(
script_id=latest_script.script_id, script_id=latest_script.script_id,
version=latest_script.version, version=latest_script.version,
@ -1368,6 +1368,9 @@ async def review_script_with_instructions(
message="No changes were needed — the current code already satisfies your instructions.", message="No changes were needed — the current code already satisfies your instructions.",
) )
# Extract code-only dict for creating the script version
updated_blocks = {label: r.code for label, r in review_results.items()}
# Create a new script version with the updated blocks # Create a new script version with the updated blocks
new_script = await create_script_version_from_review( new_script = await create_script_version_from_review(
organization_id=organization_id, organization_id=organization_id,
@ -1381,19 +1384,27 @@ async def review_script_with_instructions(
if not new_script: if not new_script:
raise HTTPException(status_code=500, detail="Failed to create new script version") raise HTTPException(status_code=500, detail="Failed to create new script version")
# Store reviewer artifacts (prompt + LLM response) for each block
await store_review_artifacts(
organization_id=organization_id,
script_id=new_script.script_id,
script_version=new_script.version,
review_results=review_results,
)
LOG.info( LOG.info(
"Script reviewed with user instructions", "Script reviewed with user instructions",
organization_id=organization_id, organization_id=organization_id,
workflow_permanent_id=workflow_permanent_id, workflow_permanent_id=workflow_permanent_id,
script_id=new_script.script_id, script_id=new_script.script_id,
version=new_script.version, version=new_script.version,
updated_blocks=list(updated_blocks.keys()), updated_blocks=list(review_results.keys()),
) )
return ReviewScriptResponse( return ReviewScriptResponse(
script_id=new_script.script_id, script_id=new_script.script_id,
version=new_script.version, version=new_script.version,
updated_blocks=list(updated_blocks.keys()), updated_blocks=list(review_results.keys()),
) )

View file

@ -5438,7 +5438,7 @@ class WorkflowService:
historical_episodes: list | None = None, historical_episodes: list | None = None,
) -> None: ) -> None:
"""Run the AI Script Reviewer and create a new script version if successful.""" """Run the AI Script Reviewer and create a new script version if successful."""
from skyvern.services.script_reviewer import ScriptReviewer from skyvern.services.script_reviewer import BlockReviewResult, ScriptReviewer, store_review_artifacts
from skyvern.services.workflow_script_service import create_script_version_from_review from skyvern.services.workflow_script_service import create_script_version_from_review
LOG.info( LOG.info(
@ -5474,8 +5474,8 @@ class WorkflowService:
regular_episodes = [ep for ep in episodes if ep.fallback_type != "conditional_agent"] regular_episodes = [ep for ep in episodes if ep.fallback_type != "conditional_agent"]
conditional_episodes = [ep for ep in episodes if ep.fallback_type == "conditional_agent"] conditional_episodes = [ep for ep in episodes if ep.fallback_type == "conditional_agent"]
updated_blocks: dict[str, str] = {} review_results: dict[str, BlockReviewResult] = {}
conditional_blocks: dict[str, str] = {} conditional_code: dict[str, str] = {}
# Review regular fallback episodes (code failures, new page variants) # Review regular fallback episodes (code failures, new page variants)
if regular_episodes: if regular_episodes:
@ -5489,7 +5489,7 @@ class WorkflowService:
run_parameter_values=run_parameter_values, run_parameter_values=run_parameter_values,
) )
if regular_updates: if regular_updates:
updated_blocks.update(regular_updates) review_results.update(regular_updates)
# Review conditional blocks that ran via agent — try to convert to code # Review conditional blocks that ran via agent — try to convert to code
if conditional_episodes: if conditional_episodes:
@ -5500,8 +5500,11 @@ class WorkflowService:
run_parameter_values=run_parameter_values, run_parameter_values=run_parameter_values,
) )
if conditional_updates: if conditional_updates:
conditional_blocks.update(conditional_updates) conditional_code.update(conditional_updates)
updated_blocks.update(conditional_updates)
# Build code-only dicts for create_script_version_from_review
updated_blocks: dict[str, str] = {label: r.code for label, r in review_results.items()}
updated_blocks.update(conditional_code)
if not updated_blocks: if not updated_blocks:
LOG.info( LOG.info(
@ -5534,7 +5537,7 @@ class WorkflowService:
updated_blocks=updated_blocks, updated_blocks=updated_blocks,
workflow=workflow, workflow=workflow,
workflow_run=workflow_run, workflow_run=workflow_run,
conditional_blocks=conditional_blocks, conditional_blocks=conditional_code,
) )
if new_script: if new_script:
@ -5542,7 +5545,15 @@ class WorkflowService:
"Script reviewer created new version", "Script reviewer created new version",
workflow_permanent_id=workflow.workflow_permanent_id, workflow_permanent_id=workflow.workflow_permanent_id,
new_version=new_script.version, new_version=new_script.version,
conditional_coded=list(conditional_blocks.keys()) if conditional_blocks else [], conditional_coded=list(conditional_code.keys()) if conditional_code else [],
)
# Store reviewer prompt/response artifacts alongside the new script version
await store_review_artifacts(
organization_id=workflow.organization_id,
script_id=new_script.script_id,
script_version=new_script.version,
review_results=review_results,
) )
# Mark all episodes as reviewed # Mark all episodes as reviewed

View file

@ -1,6 +1,7 @@
from __future__ import annotations from __future__ import annotations
import asyncio import asyncio
import dataclasses
import json import json
import re import re
from typing import Literal, Sequence from typing import Literal, Sequence
@ -15,6 +16,66 @@ from skyvern.schemas.scripts import ScriptBranchHit, ScriptFallbackEpisode
LOG = structlog.get_logger() LOG = structlog.get_logger()
@dataclasses.dataclass(frozen=True)
class BlockReviewResult:
"""Result of reviewing a single block, carrying the code and LLM artifacts."""
code: str
original_prompt: str
"""The initial prompt sent to the LLM, containing full fallback episode context,
DOM snapshots, and review instructions."""
final_prompt: str
"""The prompt that produced the accepted code. Same as original_prompt on first
attempt; on retry this is the retry prompt with validation error context."""
llm_response_raw: str
async def store_review_artifacts(
organization_id: str,
script_id: str,
script_version: int,
review_results: dict[str, BlockReviewResult],
) -> None:
"""Store reviewer prompt/response artifacts for each reviewed block.
Failures are logged as warnings but never propagate artifact persistence
must not block the review pipeline.
"""
for block_label, result in review_results.items():
try:
await app.ARTIFACT_MANAGER.create_script_file_artifact(
organization_id=organization_id,
script_id=script_id,
script_version=script_version,
file_path=f"review/{block_label}_prompt.txt",
data=result.original_prompt.encode("utf-8"),
)
# Store retry prompt separately if the reviewer retried (validation failure)
if result.final_prompt != result.original_prompt:
await app.ARTIFACT_MANAGER.create_script_file_artifact(
organization_id=organization_id,
script_id=script_id,
script_version=script_version,
file_path=f"review/{block_label}_retry_prompt.txt",
data=result.final_prompt.encode("utf-8"),
)
await app.ARTIFACT_MANAGER.create_script_file_artifact(
organization_id=organization_id,
script_id=script_id,
script_version=script_version,
file_path=f"review/{block_label}_response.json",
data=result.llm_response_raw.encode("utf-8"),
)
except Exception:
LOG.warning(
"Failed to store reviewer artifacts",
block_label=block_label,
script_id=script_id,
exc_info=True,
)
# Exhaustive allowlist of valid page.* API references. # Exhaustive allowlist of valid page.* API references.
# Anything not in this set that appears as `page.<name>` in generated code is an error. # Anything not in this set that appears as `page.<name>` in generated code is an error.
_ALLOWED_PAGE_API: frozenset[str] = frozenset( _ALLOWED_PAGE_API: frozenset[str] = frozenset(
@ -86,10 +147,10 @@ class ScriptReviewer:
historical_episodes: list[ScriptFallbackEpisode] | None = None, historical_episodes: list[ScriptFallbackEpisode] | None = None,
run_parameter_values: dict[str, str] | None = None, run_parameter_values: dict[str, str] | None = None,
user_instructions: str | None = None, user_instructions: str | None = None,
) -> dict[str, str] | None: ) -> dict[str, BlockReviewResult] | None:
"""Review fallback episodes and generate updated code for affected blocks. """Review fallback episodes and generate updated code for affected blocks.
Returns {block_label: updated_code} or None if review fails. Returns {block_label: BlockReviewResult} or None if review fails.
""" """
if not episodes: if not episodes:
return None return None
@ -173,11 +234,11 @@ class ScriptReviewer:
episodes_by_block[episode.block_label] = [] episodes_by_block[episode.block_label] = []
episodes_by_block[episode.block_label].append(episode) episodes_by_block[episode.block_label].append(episode)
updated_blocks: dict[str, str] = {} updated_blocks: dict[str, BlockReviewResult] = {}
for block_label, block_episodes in episodes_by_block.items(): for block_label, block_episodes in episodes_by_block.items():
try: try:
updated_code = await self._review_block( result = await self._review_block(
organization_id=organization_id, organization_id=organization_id,
workflow_permanent_id=workflow_permanent_id, workflow_permanent_id=workflow_permanent_id,
script_revision_id=script_revision_id, script_revision_id=script_revision_id,
@ -191,8 +252,8 @@ class ScriptReviewer:
user_instructions=user_instructions, user_instructions=user_instructions,
historical_run_params=historical_run_params, historical_run_params=historical_run_params,
) )
if updated_code: if result:
updated_blocks[block_label] = updated_code updated_blocks[block_label] = result
except Exception: except Exception:
LOG.exception( LOG.exception(
"ScriptReviewer: failed to review block", "ScriptReviewer: failed to review block",
@ -213,14 +274,14 @@ class ScriptReviewer:
user_instructions: str, user_instructions: str,
episodes: list[ScriptFallbackEpisode] | None = None, episodes: list[ScriptFallbackEpisode] | None = None,
run_parameter_values: dict[str, str] | None = None, run_parameter_values: dict[str, str] | None = None,
) -> dict[str, str] | None: ) -> dict[str, BlockReviewResult] | None:
"""Review script blocks using user-provided instructions. """Review script blocks using user-provided instructions.
When episodes are available, they are included as context for the LLM. When episodes are available, they are included as context for the LLM.
When no episodes are available, the reviewer works from the existing code When no episodes are available, the reviewer works from the existing code
and the user's instructions alone. and the user's instructions alone.
Returns {block_label: updated_code} or None if review fails. Returns {block_label: BlockReviewResult} or None if review fails.
""" """
if episodes: if episodes:
return await self.review_fallback_episodes( return await self.review_fallback_episodes(
@ -249,10 +310,10 @@ class ScriptReviewer:
) )
return None return None
updated_blocks: dict[str, str] = {} updated_blocks: dict[str, BlockReviewResult] = {}
for block_label, existing_code in block_codes.items(): for block_label, existing_code in block_codes.items():
try: try:
updated_code = await self._review_block( result = await self._review_block(
organization_id=organization_id, organization_id=organization_id,
workflow_permanent_id=workflow_permanent_id, workflow_permanent_id=workflow_permanent_id,
script_revision_id=script_revision_id, script_revision_id=script_revision_id,
@ -264,8 +325,8 @@ class ScriptReviewer:
user_instructions=user_instructions, user_instructions=user_instructions,
preloaded_code=existing_code, preloaded_code=existing_code,
) )
if updated_code: if result:
updated_blocks[block_label] = updated_code updated_blocks[block_label] = result
except Exception: except Exception:
LOG.exception( LOG.exception(
"ScriptReviewer: failed to review block with instructions", "ScriptReviewer: failed to review block with instructions",
@ -559,8 +620,12 @@ class ScriptReviewer:
user_instructions: str | None = None, user_instructions: str | None = None,
preloaded_code: str | None = None, preloaded_code: str | None = None,
historical_run_params: dict[str, dict[str, str]] | None = None, historical_run_params: dict[str, dict[str, str]] | None = None,
) -> str | None: ) -> BlockReviewResult | None:
"""Review a single block's fallback episodes and generate updated code.""" """Review a single block's fallback episodes and generate updated code.
Returns a BlockReviewResult with the code, prompt, and raw LLM response,
or None if review fails.
"""
LOG.info( LOG.info(
"ScriptReviewer: starting block review", "ScriptReviewer: starting block review",
block_label=block_label, block_label=block_label,
@ -870,13 +935,60 @@ class ScriptReviewer:
current_prompt = self._build_retry_prompt(updated_code, hardcoded_error, function_signature) current_prompt = self._build_retry_prompt(updated_code, hardcoded_error, function_signature)
continue continue
# Validate ai='proactive' misuse (should be 'fallback' on interaction methods)
proactive_error = self._validate_proactive_misuse(updated_code)
if proactive_error is not None:
LOG.warning(
"ScriptReviewer: proactive misuse detected, retrying",
block_label=block_label,
attempt=attempt,
error=proactive_error,
)
if attempt < max_attempts:
current_prompt = self._build_retry_prompt(updated_code, proactive_error, function_signature)
continue
# Validate fragile auto-generated selectors
fragile_error = self._validate_fragile_selectors(updated_code)
if fragile_error is not None:
LOG.warning(
"ScriptReviewer: fragile selector detected, retrying",
block_label=block_label,
attempt=attempt,
error=fragile_error,
)
if attempt < max_attempts:
current_prompt = self._build_retry_prompt(updated_code, fragile_error, function_signature)
continue
# Validate hardcoded run-specific data in selectors/prompts
run_data_error = self._validate_hardcoded_run_data(updated_code)
if run_data_error is not None:
LOG.warning(
"ScriptReviewer: hardcoded run data detected, retrying",
block_label=block_label,
attempt=attempt,
error=run_data_error,
)
if attempt < max_attempts:
current_prompt = self._build_retry_prompt(updated_code, run_data_error, function_signature)
continue
LOG.info( LOG.info(
"ScriptReviewer: generated updated code for block", "ScriptReviewer: generated updated code for block",
block_label=block_label, block_label=block_label,
attempt=attempt, attempt=attempt,
code_length=len(updated_code), code_length=len(updated_code),
) )
return updated_code return BlockReviewResult(
code=updated_code,
original_prompt=reviewer_prompt,
final_prompt=current_prompt,
llm_response_raw=json.dumps(
llm_response if not isinstance(llm_response, str) else {"raw": llm_response},
default=str,
),
)
except Exception: except Exception:
LOG.exception( LOG.exception(
@ -1794,8 +1906,10 @@ class ScriptReviewer:
and func.value.value == "page" and func.value.value == "page"
) )
# Regex to extract string literals (single or double quoted, excluding escaped quotes) # Regex to extract string literals (single or double quoted, excluding escaped quotes).
_STRING_LITERAL_RE: re.Pattern[str] = re.compile(r"""(?<![\\])(['"])((?:(?!\1)[^\\]|\\.)*)(\1)""") # Uses separate alternations for single- and double-quoted strings to avoid
# backtracking ambiguity (CodeQL py/redos).
_STRING_LITERAL_RE: re.Pattern[str] = re.compile(r""""([^"\\]*(?:\\.[^"\\]*)*)"|'([^'\\]*(?:\\.[^'\\]*)*)'""")
def _validate_no_hardcoded_values( def _validate_no_hardcoded_values(
self, self,
@ -1821,7 +1935,8 @@ class ScriptReviewer:
if stripped.startswith("#"): if stripped.startswith("#"):
continue continue
for match in self._STRING_LITERAL_RE.finditer(line): for match in self._STRING_LITERAL_RE.finditer(line):
literal_value = match.group(2) # group(1) = double-quoted content, group(2) = single-quoted content
literal_value = match.group(1) or match.group(2)
if literal_value: if literal_value:
code_literals.add(literal_value) code_literals.add(literal_value)
@ -1845,6 +1960,217 @@ class ScriptReviewer:
f"Replace ALL hardcoded parameter values with context.parameters['key'] references." f"Replace ALL hardcoded parameter values with context.parameters['key'] references."
) )
# Methods whose primary purpose is interaction — ai='proactive' on these
# defeats caching by always invoking the LLM even when the selector works.
_INTERACTION_METHODS: frozenset[str] = frozenset({"click", "fill", "fill_autocomplete", "type", "select_option"})
# Regex to find page.<method>( calls
_PAGE_CALL_RE: re.Pattern[str] = re.compile(r"""\bpage\.(\w+)\s*\(""")
def _validate_proactive_misuse(self, code: str) -> str | None:
"""Flag ai='proactive' on interaction methods (click, fill, type, select_option).
Using ai='proactive' means the LLM is always invoked even when the selector
works, defeating the zero-LLM-cost goal of caching. These should almost always
use ai='fallback' instead.
Returns an error message or None if no issues found.
"""
issues: list[str] = []
lines = code.split("\n")
i = 0
while i < len(lines):
stripped = lines[i].lstrip()
if stripped.startswith("#"):
i += 1
continue
match = self._PAGE_CALL_RE.search(lines[i])
if match and match.group(1) in self._INTERACTION_METHODS:
# Gather the full call (may span multiple lines)
call_text = lines[i]
end_line = self._find_call_end(lines, i)
if end_line > i:
call_text = "\n".join(lines[i : end_line + 1])
if re.search(r"""\bai\s*=\s*['"]proactive['"]""", call_text):
issues.append(f"page.{match.group(1)}() on line {i + 1}")
i = end_line + 1
else:
i += 1
if not issues:
return None
return (
f"ai='proactive' used on interaction methods: {', '.join(issues[:5])}. "
f"Using ai='proactive' on interaction methods ({'/'.join(sorted(self._INTERACTION_METHODS))}) means the LLM is "
f"ALWAYS invoked even when the selector works, defeating the zero-LLM-cost "
f"goal of caching. Change to ai='fallback' — this tries the selector first "
f"and only invokes the LLM if the selector fails."
)
# Known auto-generated ID patterns from popular web frameworks.
# These IDs change across deployments/sessions and break cached selectors.
_FRAGILE_ID_PATTERNS: list[re.Pattern[str]] = [
re.compile(r"#dnn_\w+"), # DotNetNuke
re.compile(r"#ember[\-_]?\d+"), # Ember.js
re.compile(r"#react-select-\d+"), # React Select
re.compile(r"\[data-reactid=['\"][\d.]+['\"]\]"), # React (legacy)
re.compile(r"#ext-gen-?\d+"), # ExtJS
re.compile(r"\.css-[a-z0-9]{4,}"), # CSS-in-JS (Emotion, styled-components)
re.compile(r"\.MuiButton-root|\.Mui\w+-\w+", re.IGNORECASE), # Material UI
re.compile(r"#__next\w+"), # Next.js internal
re.compile(r"\[data-v-[a-f0-9]+\]"), # Vue scoped styles
]
# Regex to find selector= string values in page.* calls.
# Handles nested quotes: selector='a:has-text("X")' or selector="a:has-text('X')"
# Uses [^'"] / [^"'] instead of [^'] / [^"] to prevent backtracking ambiguity (CodeQL py/redos).
_SELECTOR_SINGLE_RE: re.Pattern[str] = re.compile(r"""\bselector\s*=\s*f?'([^'"]*(?:"[^"]*"[^'"]*)*)'""")
_SELECTOR_DOUBLE_RE: re.Pattern[str] = re.compile(r'''\bselector\s*=\s*f?"([^"']*(?:'[^']*'[^"']*)*)"''')
def _find_selector_values(self, text: str) -> list[str]:
"""Extract selector string values from text (single or multi-line), handling nested quotes."""
results = []
for m in self._SELECTOR_SINGLE_RE.finditer(text):
results.append(m.group(1))
for m in self._SELECTOR_DOUBLE_RE.finditer(text):
results.append(m.group(1))
return results
def _validate_fragile_selectors(self, code: str) -> str | None:
"""Flag selectors using auto-generated IDs from web frameworks.
Auto-generated IDs (e.g., #dnn_ctl00_xxx, #ember-123, .css-1a2b3c) change
across deployments and are a leading cause of selector breakage and AI fallbacks.
Uses multi-line call gathering so selectors split across lines are still caught.
Returns an error message or None if no issues found.
"""
issues: list[str] = []
lines = code.split("\n")
i = 0
while i < len(lines):
stripped = lines[i].lstrip()
if stripped.startswith("#"):
i += 1
continue
# Gather full call text for page.* calls that may span multiple lines
match = self._PAGE_CALL_RE.search(lines[i])
if match:
end_line = self._find_call_end(lines, i)
call_text = "\n".join(lines[i : end_line + 1]) if end_line > i else lines[i]
line_num = i + 1 # report the starting line
else:
call_text = lines[i]
line_num = i + 1
for selector_val in self._find_selector_values(call_text):
for pattern in self._FRAGILE_ID_PATTERNS:
if pattern.search(selector_val):
issues.append(
f"line {line_num}: selector='{selector_val[:60]}' matches fragile pattern {pattern.pattern}"
)
break # one match per selector is enough
i = end_line + 1 if match and end_line > i else i + 1
if not issues:
return None
return (
f"Fragile auto-generated selectors detected: {'; '.join(issues[:3])}. "
f"These IDs are generated by web frameworks (DotNetNuke, Ember, React, MUI, etc.) "
f"and change across deployments. Replace with stable selectors: "
f"aria-label, placeholder, name, role, data-testid, or :has-text() with stable text. "
f"If no stable selector exists, use ai='fallback' with a descriptive prompt and NO selector."
)
# Regex for dates in common formats (MM/DD/YYYY, M/D/YYYY, YYYY-MM-DD)
# TODO: Could tighten month/day ranges to reduce false positives on URL-like strings,
# but in practice LLM-generated selectors rarely contain such values.
_DATE_RE: re.Pattern[str] = re.compile(r"\b(?:\d{1,2}/\d{1,2}/\d{4}|\d{4}-\d{2}-\d{2})\b")
# Regex for :has-text("X") where X is very short (1-2 chars) — likely hardcoded run data
_SHORT_HAS_TEXT_RE: re.Pattern[str] = re.compile(r""":has-text\(\s*['"](.{1,2})['"]\s*\)""")
# TODO: prompt extraction regex won't handle triple-quoted or multi-line prompt values.
# Acceptable for now since LLM-generated code rarely uses triple-quoted prompts.
_PROMPT_RE: re.Pattern[str] = re.compile(r"""\bprompt\s*=\s*(?:f?['"])(.*?)(?:['"])""", re.DOTALL)
def _validate_hardcoded_run_data(self, code: str) -> str | None:
"""Flag selectors and prompts containing hardcoded run-specific data.
Catches:
1. Date literals (MM/DD/YYYY, YYYY-MM-DD) in selector= or prompt= values
that should use context.parameters instead
2. Very short :has-text() values (1-2 chars) that are likely meaningless
data from the original recording (e.g., a:has-text("6"))
Uses multi-line call gathering so selectors/prompts split across lines are still caught.
Returns an error message or None if no issues found.
"""
issues: list[str] = []
lines = code.split("\n")
i = 0
while i < len(lines):
stripped = lines[i].lstrip()
if stripped.startswith("#"):
i += 1
continue
# Gather full call text for page.* calls that may span multiple lines
match = self._PAGE_CALL_RE.search(lines[i])
if match:
end_line = self._find_call_end(lines, i)
call_text = "\n".join(lines[i : end_line + 1]) if end_line > i else lines[i]
else:
call_text = lines[i]
line_num = i + 1 # report the starting line
# Check selectors for hardcoded dates
for selector_val in self._find_selector_values(call_text):
# Check for dates in selectors
date_match = self._DATE_RE.search(selector_val)
if date_match:
issues.append(
f"line {line_num}: selector contains hardcoded date '{date_match.group()}'"
f"use context.parameters for date values"
)
# Check for very short :has-text() values
for ht_match in self._SHORT_HAS_TEXT_RE.finditer(selector_val):
short_text = ht_match.group(1)
# Allow common stable short texts
if short_text.lower() not in {"ok", "no", "x", "", "", ""}:
issues.append(
f'line {line_num}: selector has :has-text("{short_text}") — '
f"a 1-2 character :has-text() value is almost certainly "
f"hardcoded data from the recording run, not a stable selector"
)
# Check prompts for hardcoded dates (only in prompt= kwargs, not general strings)
prompt_match = self._PROMPT_RE.search(call_text)
if prompt_match:
prompt_val = prompt_match.group(1)
date_match = self._DATE_RE.search(prompt_val)
if date_match:
issues.append(
f"line {line_num}: prompt contains hardcoded date '{date_match.group()}'"
f"use a parameter reference like context.parameters['download_start_date']"
)
i = end_line + 1 if match and end_line > i else i + 1
if not issues:
return None
return (
f"Hardcoded run-specific data detected: {'; '.join(issues[:3])}. "
f"Dates, invoice numbers, and other per-run values must NOT be hardcoded in "
f"selectors or prompts. Use context.parameters['key'] for dynamic values, "
f"or use ai='fallback' with a generic prompt that describes the intent."
)
# Regexes for extracting branch return values from generated conditional code. # Regexes for extracting branch return values from generated conditional code.
_BRANCH_LABEL_STR_RE: re.Pattern[str] = re.compile(r"""["']next_block_label["']\s*:\s*["']([^"']+)["']""") _BRANCH_LABEL_STR_RE: re.Pattern[str] = re.compile(r"""["']next_block_label["']\s*:\s*["']([^"']+)["']""")
_BRANCH_LABEL_NONE_RE: re.Pattern[str] = re.compile(r"""["']next_block_label["']\s*:\s*None""") _BRANCH_LABEL_NONE_RE: re.Pattern[str] = re.compile(r"""["']next_block_label["']\s*:\s*None""")

View file

@ -531,11 +531,7 @@ class ActionHandler:
# renderable content). Detect this and navigate back to the original URL so # renderable content). Detect this and navigate back to the original URL so
# subsequent steps are not stuck on a blank page. # subsequent steps are not stuck on a blank page.
blank_page_urls = {"about:blank", ":"} blank_page_urls = {"about:blank", ":"}
if ( if page.url in blank_page_urls and page_url_before_download not in blank_page_urls:
not page.is_closed()
and page.url in blank_page_urls
and page_url_before_download not in blank_page_urls
):
LOG.warning( LOG.warning(
"Working page navigated to blank after download action, navigating back to original URL", "Working page navigated to blank after download action, navigating back to original URL",
original_url=page_url_before_download, original_url=page_url_before_download,

View file

@ -10,7 +10,7 @@ import structlog
from playwright._impl._errors import TargetClosedError from playwright._impl._errors import TargetClosedError
from skyvern.config import settings from skyvern.config import settings
from skyvern.exceptions import BrowserSessionNotRenewable, MissingBrowserAddressError from skyvern.exceptions import BrowserSessionClosed, BrowserSessionNotRenewable, MissingBrowserAddressError
from skyvern.forge import app from skyvern.forge import app
from skyvern.forge.sdk.db.agent_db import AgentDB from skyvern.forge.sdk.db.agent_db import AgentDB
from skyvern.forge.sdk.db.polls import wait_on_persistent_browser_address from skyvern.forge.sdk.db.polls import wait_on_persistent_browser_address
@ -222,6 +222,9 @@ class DefaultPersistentSessionsManager(PersistentSessionsManager):
if persistent_browser_session is None: if persistent_browser_session is None:
raise Exception(f"Persistent browser session not found for {browser_session_id}") raise Exception(f"Persistent browser session not found for {browser_session_id}")
if is_final_status(persistent_browser_session.status):
raise BrowserSessionClosed(browser_session_id)
await self.occupy_browser_session( await self.occupy_browser_session(
session_id=browser_session_id, session_id=browser_session_id,
runnable_type=runnable_type, runnable_type=runnable_type,

View file

@ -406,7 +406,6 @@ async def test_handle_action_navigates_back_from_blank_page_after_download() ->
# Page starts at a real URL; the mocked action will navigate it to about:blank # Page starts at a real URL; the mocked action will navigate it to about:blank
page = MagicMock() page = MagicMock()
page.url = original_url page.url = original_url
page.is_closed.return_value = False
browser_state = MagicMock() browser_state = MagicMock()
# Same page count before and after (no extra tab opened by the print action) # Same page count before and after (no extra tab opened by the print action)

View file

@ -0,0 +1,234 @@
"""Tests for MCP DOM inspection tools (get_html, get_value, get_styles)."""
from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock
import pytest
from skyvern.cli.core.result import BrowserContext
from skyvern.cli.mcp_tools import inspection as mcp_inspection
# ═══════════════════════════════════════════════════
# Helpers
# ═══════════════════════════════════════════════════
def _make_mock_page(url: str = "https://example.com") -> MagicMock:
page = MagicMock()
page.url = url
locator = MagicMock()
locator.evaluate = AsyncMock(return_value="<span>hello</span>")
locator.input_value = AsyncMock(return_value="test-value")
page.locator = MagicMock(return_value=locator)
return page
def _make_skyvern_page(page: MagicMock) -> MagicMock:
"""Mimic SkyvernBrowserPage which delegates attribute access to the raw page."""
wrapper = MagicMock()
wrapper.page = page
wrapper.locator = page.locator
wrapper.url = page.url
return wrapper
def _patch_get_page(monkeypatch: pytest.MonkeyPatch, page: MagicMock, ctx: BrowserContext) -> AsyncMock:
skyvern_page = _make_skyvern_page(page)
mock = AsyncMock(return_value=(skyvern_page, ctx))
monkeypatch.setattr(mcp_inspection, "get_page", mock)
return mock
# ═══════════════════════════════════════════════════
# skyvern_get_html
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_get_html_inner(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.locator.return_value.evaluate = AsyncMock(return_value="<span>hello</span>")
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_inspection.skyvern_get_html(selector="#content")
assert result["ok"] is True
assert result["data"]["html"] == "<span>hello</span>"
assert result["data"]["outer"] is False
assert result["data"]["length"] == len("<span>hello</span>")
page.locator.assert_called_with("#content")
@pytest.mark.asyncio
async def test_get_html_outer(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.locator.return_value.evaluate = AsyncMock(return_value='<div id="content"><span>hello</span></div>')
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_inspection.skyvern_get_html(selector="#content", outer=True)
assert result["ok"] is True
assert result["data"]["outer"] is True
assert "<div" in result["data"]["html"]
@pytest.mark.asyncio
async def test_get_html_no_browser(monkeypatch: pytest.MonkeyPatch) -> None:
from skyvern.cli.mcp_tools._session import BrowserNotAvailableError
monkeypatch.setattr(mcp_inspection, "get_page", AsyncMock(side_effect=BrowserNotAvailableError()))
result = await mcp_inspection.skyvern_get_html(selector="#x")
assert result["ok"] is False
@pytest.mark.asyncio
async def test_get_html_bad_selector(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.locator.return_value.evaluate = AsyncMock(side_effect=RuntimeError("Element not found"))
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_inspection.skyvern_get_html(selector="#nonexistent")
assert result["ok"] is False
assert "Element not found" in result["error"]["message"]
@pytest.mark.asyncio
async def test_get_html_empty(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.locator.return_value.evaluate = AsyncMock(return_value="")
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_inspection.skyvern_get_html(selector="#empty")
assert result["ok"] is True
assert result["data"]["html"] == ""
assert result["data"]["length"] == 0
# ═══════════════════════════════════════════════════
# skyvern_get_value
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_get_value(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.locator.return_value.input_value = AsyncMock(return_value="user@example.com")
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_inspection.skyvern_get_value(selector="#email")
assert result["ok"] is True
assert result["data"]["value"] == "user@example.com"
assert result["data"]["selector"] == "#email"
@pytest.mark.asyncio
async def test_get_value_empty(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.locator.return_value.input_value = AsyncMock(return_value="")
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_inspection.skyvern_get_value(selector="#empty-input")
assert result["ok"] is True
assert result["data"]["value"] == ""
@pytest.mark.asyncio
async def test_get_value_no_browser(monkeypatch: pytest.MonkeyPatch) -> None:
from skyvern.cli.mcp_tools._session import BrowserNotAvailableError
monkeypatch.setattr(mcp_inspection, "get_page", AsyncMock(side_effect=BrowserNotAvailableError()))
result = await mcp_inspection.skyvern_get_value(selector="#x")
assert result["ok"] is False
@pytest.mark.asyncio
async def test_get_value_not_input(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.locator.return_value.input_value = AsyncMock(side_effect=RuntimeError("Not an input element"))
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_inspection.skyvern_get_value(selector="#div-element")
assert result["ok"] is False
assert "Not an input element" in result["error"]["message"]
# ═══════════════════════════════════════════════════
# skyvern_get_styles
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_get_styles_specific_props(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.locator.return_value.evaluate = AsyncMock(return_value={"color": "rgb(0, 0, 0)", "font-size": "16px"})
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_inspection.skyvern_get_styles(selector="#heading", properties=["color", "font-size"])
assert result["ok"] is True
assert result["data"]["styles"]["color"] == "rgb(0, 0, 0)"
assert result["data"]["styles"]["font-size"] == "16px"
assert result["data"]["count"] == 2
@pytest.mark.asyncio
async def test_get_styles_all(monkeypatch: pytest.MonkeyPatch) -> None:
styles = {f"prop-{i}": f"value-{i}" for i in range(50)}
page = _make_mock_page()
page.locator.return_value.evaluate = AsyncMock(return_value=styles)
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_inspection.skyvern_get_styles(selector="body")
assert result["ok"] is True
assert result["data"]["count"] == 50
@pytest.mark.asyncio
async def test_get_styles_no_browser(monkeypatch: pytest.MonkeyPatch) -> None:
from skyvern.cli.mcp_tools._session import BrowserNotAvailableError
monkeypatch.setattr(mcp_inspection, "get_page", AsyncMock(side_effect=BrowserNotAvailableError()))
result = await mcp_inspection.skyvern_get_styles(selector="#x")
assert result["ok"] is False
@pytest.mark.asyncio
async def test_get_styles_bad_selector(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.locator.return_value.evaluate = AsyncMock(side_effect=RuntimeError("Selector not found"))
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_inspection.skyvern_get_styles(selector="#nope", properties=["color"])
assert result["ok"] is False
assert "Selector not found" in result["error"]["message"]
@pytest.mark.asyncio
async def test_get_styles_empty_properties(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.locator.return_value.evaluate = AsyncMock(return_value={})
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_inspection.skyvern_get_styles(selector="#hidden", properties=[])
assert result["ok"] is True
assert result["data"]["count"] == 0

View file

@ -0,0 +1,362 @@
"""Tests for MCP HAR recording tools (skyvern_har_start, skyvern_har_stop)."""
from __future__ import annotations
from collections import deque
from types import SimpleNamespace
from unittest.mock import AsyncMock, MagicMock
import pytest
from skyvern.cli.core.result import BrowserContext
from skyvern.cli.mcp_tools import inspection as mcp_inspection
# ═══════════════════════════════════════════════════
# Helpers
# ═══════════════════════════════════════════════════
def _make_mock_page(url: str = "https://example.com") -> MagicMock:
page = MagicMock()
page.url = url
return page
def _make_skyvern_page(page: MagicMock) -> MagicMock:
wrapper = MagicMock()
wrapper.page = page
wrapper.url = page.url
return wrapper
def _make_session_state(**overrides):
defaults = {
"har_enabled": False,
"_har_entries": deque(maxlen=5000),
"console_messages": deque(maxlen=1000),
"network_requests": deque(maxlen=1000),
"dialog_events": deque(maxlen=1000),
"_hooked_page_ids": set(),
"_hooked_handlers_map": {},
}
defaults.update(overrides)
return SimpleNamespace(**defaults)
def _patch_get_page(monkeypatch: pytest.MonkeyPatch, page: MagicMock, ctx: BrowserContext) -> AsyncMock:
skyvern_page = _make_skyvern_page(page)
mock = AsyncMock(return_value=(skyvern_page, ctx))
monkeypatch.setattr(mcp_inspection, "get_page", mock)
return mock
def _patch_stateless(monkeypatch: pytest.MonkeyPatch, stateless: bool = False) -> None:
monkeypatch.setattr("skyvern.cli.core.session_manager.is_stateless_http_mode", lambda: stateless)
# ═══════════════════════════════════════════════════
# HAR entry capture in _on_response
# ═══════════════════════════════════════════════════
def test_on_response_captures_har_when_enabled() -> None:
state = _make_session_state(har_enabled=True)
raw_page = MagicMock()
raw_page.url = "https://example.com"
handlers = mcp_inspection._make_page_handlers(state, raw_page)
on_response = handlers["response"]
response = MagicMock()
response.url = "https://api.example.com/data"
response.status = 200
response.status_text = "OK"
response.headers = {"content-type": "application/json", "content-length": "42"}
response.request.method = "GET"
response.request.headers = {"accept": "application/json"}
response.request.timing = {"responseEnd": 150.5}
on_response(response)
assert len(state._har_entries) == 1
entry = state._har_entries[0]
assert entry["request"]["method"] == "GET"
assert entry["request"]["httpVersion"] == "HTTP/1.1"
assert entry["request"]["queryString"] == []
assert entry["request"]["cookies"] == []
assert entry["request"]["headersSize"] == -1
assert entry["request"]["bodySize"] == -1
assert entry["response"]["status"] == 200
assert entry["response"]["httpVersion"] == "HTTP/1.1"
assert entry["response"]["redirectURL"] == ""
assert entry["response"]["headersSize"] == -1
assert entry["response"]["bodySize"] == -1
assert entry["response"]["cookies"] == []
assert entry["response"]["content"]["mimeType"] == "application/json"
assert entry["response"]["content"]["size"] == 42
def test_on_response_skips_har_when_disabled() -> None:
state = _make_session_state(har_enabled=False)
raw_page = MagicMock()
raw_page.url = "https://example.com"
handlers = mcp_inspection._make_page_handlers(state, raw_page)
on_response = handlers["response"]
response = MagicMock()
response.url = "https://api.example.com/data"
response.status = 200
response.headers = {"content-type": "text/html"}
response.request.method = "GET"
response.request.headers = {}
response.request.timing = {}
on_response(response)
assert len(state._har_entries) == 0
assert len(state.network_requests) == 1 # Normal capture still works
def test_on_response_redacts_auth_headers_in_har() -> None:
state = _make_session_state(har_enabled=True)
raw_page = MagicMock()
raw_page.url = "https://example.com"
handlers = mcp_inspection._make_page_handlers(state, raw_page)
on_response = handlers["response"]
response = MagicMock()
response.url = "https://api.example.com/data"
response.status = 200
response.status_text = "OK"
response.headers = {"content-type": "text/html", "set-cookie": "session=abc123"}
response.request.method = "GET"
response.request.headers = {"authorization": "Bearer token123", "accept": "text/html", "cookie": "session=old"}
response.request.timing = {}
on_response(response)
entry = state._har_entries[0]
req_header_names = [h["name"] for h in entry["request"]["headers"]]
assert "authorization" not in req_header_names
assert "cookie" not in req_header_names
assert "accept" in req_header_names
resp_header_names = [h["name"] for h in entry["response"]["headers"]]
assert "set-cookie" not in resp_header_names
assert "content-type" in resp_header_names
def test_on_response_redacts_secret_query_params_in_har() -> None:
state = _make_session_state(har_enabled=True)
raw_page = MagicMock()
raw_page.url = "https://example.com"
handlers = mcp_inspection._make_page_handlers(state, raw_page)
on_response = handlers["response"]
response = MagicMock()
response.url = "https://api.example.com/data?token=secret123&foo=bar&api_key=hidden"
response.status = 200
response.status_text = "OK"
response.headers = {"content-type": "text/html"}
response.request.method = "GET"
response.request.headers = {"accept": "text/html"}
response.request.timing = {}
on_response(response)
entry = state._har_entries[0]
qs = {p["name"]: p["value"] for p in entry["request"]["queryString"]}
assert qs["foo"] == "bar"
assert qs["token"] == "REDACTED"
assert qs["api_key"] == "REDACTED"
# ═══════════════════════════════════════════════════
# skyvern_har_start
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_har_start_happy_path(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_stateless(monkeypatch, False)
state = _make_session_state()
monkeypatch.setattr(mcp_inspection, "get_current_session", lambda: state)
result = await mcp_inspection.skyvern_har_start()
assert result["ok"] is True
assert state.har_enabled is True
assert result["data"]["recording"] is True
@pytest.mark.asyncio
async def test_har_start_already_active(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_stateless(monkeypatch, False)
state = _make_session_state(har_enabled=True)
monkeypatch.setattr(mcp_inspection, "get_current_session", lambda: state)
result = await mcp_inspection.skyvern_har_start()
assert result["ok"] is False
assert "already active" in result["error"]["message"]
@pytest.mark.asyncio
async def test_har_start_clears_buffer(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_stateless(monkeypatch, False)
entries = deque(maxlen=5000)
entries.append({"old": "entry"})
state = _make_session_state(_har_entries=entries)
monkeypatch.setattr(mcp_inspection, "get_current_session", lambda: state)
result = await mcp_inspection.skyvern_har_start()
assert result["ok"] is True
assert len(state._har_entries) == 0
@pytest.mark.asyncio
async def test_har_start_no_browser(monkeypatch: pytest.MonkeyPatch) -> None:
from skyvern.cli.mcp_tools._session import BrowserNotAvailableError
monkeypatch.setattr(mcp_inspection, "get_page", AsyncMock(side_effect=BrowserNotAvailableError()))
_patch_stateless(monkeypatch, False)
result = await mcp_inspection.skyvern_har_start()
assert result["ok"] is False
@pytest.mark.asyncio
async def test_har_start_stateless_mode(monkeypatch: pytest.MonkeyPatch) -> None:
_patch_stateless(monkeypatch, True)
result = await mcp_inspection.skyvern_har_start()
assert result["ok"] is False
assert "stateless" in result["error"]["message"].lower()
# ═══════════════════════════════════════════════════
# skyvern_har_stop
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_har_stop_happy_path(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_stateless(monkeypatch, False)
entries = deque(maxlen=5000)
entries.append(
{
"startedDateTime": "2026-01-01T00:00:00Z",
"time": 100,
"request": {"method": "GET", "url": "https://example.com", "headers": []},
"response": {
"status": 200,
"statusText": "OK",
"headers": [],
"content": {"size": 1024, "mimeType": "text/html"},
},
"timings": {"send": 0, "wait": 100, "receive": 0},
}
)
state = _make_session_state(har_enabled=True, _har_entries=entries)
monkeypatch.setattr(mcp_inspection, "get_current_session", lambda: state)
result = await mcp_inspection.skyvern_har_stop()
assert result["ok"] is True
assert state.har_enabled is False
assert len(state._har_entries) == 0
assert result["data"]["entry_count"] == 1
har = result["data"]["har"]
assert har["log"]["version"] == "1.2"
assert har["log"]["creator"]["name"] == "Skyvern"
assert len(har["log"]["entries"]) == 1
@pytest.mark.asyncio
async def test_har_stop_not_recording(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_stateless(monkeypatch, False)
state = _make_session_state(har_enabled=False)
monkeypatch.setattr(mcp_inspection, "get_current_session", lambda: state)
result = await mcp_inspection.skyvern_har_stop()
assert result["ok"] is False
assert "No active HAR recording" in result["error"]["message"]
@pytest.mark.asyncio
async def test_har_stop_no_browser(monkeypatch: pytest.MonkeyPatch) -> None:
from skyvern.cli.mcp_tools._session import BrowserNotAvailableError
monkeypatch.setattr(mcp_inspection, "get_page", AsyncMock(side_effect=BrowserNotAvailableError()))
_patch_stateless(monkeypatch, False)
result = await mcp_inspection.skyvern_har_stop()
assert result["ok"] is False
@pytest.mark.asyncio
async def test_har_stop_stateless_mode(monkeypatch: pytest.MonkeyPatch) -> None:
_patch_stateless(monkeypatch, True)
result = await mcp_inspection.skyvern_har_stop()
assert result["ok"] is False
@pytest.mark.asyncio
async def test_har_roundtrip(monkeypatch: pytest.MonkeyPatch) -> None:
"""Start → capture entries → stop → verify HAR output."""
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_stateless(monkeypatch, False)
state = _make_session_state()
monkeypatch.setattr(mcp_inspection, "get_current_session", lambda: state)
# Start
result = await mcp_inspection.skyvern_har_start()
assert result["ok"] is True
assert state.har_enabled is True
# Simulate entries being added (as _on_response would do)
state._har_entries.append(
{
"startedDateTime": "2026-01-01T00:00:00Z",
"time": 50,
"request": {"method": "POST", "url": "https://api.example.com/submit", "headers": []},
"response": {"status": 201, "statusText": "Created", "headers": [], "content": {"size": 0, "mimeType": ""}},
"timings": {"send": 0, "wait": 50, "receive": 0},
}
)
# Stop
result = await mcp_inspection.skyvern_har_stop()
assert result["ok"] is True
assert result["data"]["entry_count"] == 1
assert state.har_enabled is False
assert len(state._har_entries) == 0

View file

@ -58,13 +58,13 @@ def _network_entry(url: str = "https://a.com", method: str = "GET", status: int
class TestEnsureHooks: class TestEnsureHooks:
def test_registers_three_listeners(self) -> None: def test_registers_four_listeners(self) -> None:
state = _make_state() state = _make_state()
raw = MagicMock() raw = MagicMock()
raw.on = MagicMock() raw.on = MagicMock()
_register_hooks_on_page(state, raw) _register_hooks_on_page(state, raw)
assert raw.on.call_count == 3 assert raw.on.call_count == 4
assert {c.args[0] for c in raw.on.call_args_list} == {"console", "response", "dialog"} assert {c.args[0] for c in raw.on.call_args_list} == {"console", "response", "dialog", "pageerror"}
def test_idempotent(self) -> None: def test_idempotent(self) -> None:
state = _make_state() state = _make_state()
@ -72,7 +72,7 @@ class TestEnsureHooks:
raw.on = MagicMock() raw.on = MagicMock()
_register_hooks_on_page(state, raw) _register_hooks_on_page(state, raw)
_register_hooks_on_page(state, raw) _register_hooks_on_page(state, raw)
assert raw.on.call_count == 3 assert raw.on.call_count == 4
def test_keeps_hooks_on_both_pages(self) -> None: def test_keeps_hooks_on_both_pages(self) -> None:
"""Multi-page: hooks are registered on ALL pages, not removed on switch.""" """Multi-page: hooks are registered on ALL pages, not removed on switch."""
@ -86,8 +86,8 @@ class TestEnsureHooks:
_register_hooks_on_page(state, raw2) _register_hooks_on_page(state, raw2)
# Both pages should have hooks registered — no removal # Both pages should have hooks registered — no removal
assert raw1.remove_listener.call_count == 0 assert raw1.remove_listener.call_count == 0
assert raw1.on.call_count == 3 assert raw1.on.call_count == 4
assert raw2.on.call_count == 3 assert raw2.on.call_count == 4
# --- Console messages --- # --- Console messages ---

View file

@ -0,0 +1,274 @@
"""Tests for MCP page JS error tool (skyvern_get_errors) and pageerror hook."""
from __future__ import annotations
import time
from collections import deque
from types import SimpleNamespace
from unittest.mock import AsyncMock, MagicMock
import pytest
from skyvern.cli.core.result import BrowserContext
from skyvern.cli.mcp_tools import inspection as mcp_inspection
# ═══════════════════════════════════════════════════
# Helpers
# ═══════════════════════════════════════════════════
def _make_mock_page(url: str = "https://example.com") -> MagicMock:
page = MagicMock()
page.url = url
return page
def _make_skyvern_page(page: MagicMock) -> MagicMock:
wrapper = MagicMock()
wrapper.page = page
wrapper.url = page.url
return wrapper
def _make_session_state(**overrides):
defaults = {
"page_errors": deque(maxlen=1000),
"console_messages": deque(maxlen=1000),
"network_requests": deque(maxlen=1000),
"dialog_events": deque(maxlen=1000),
"_hooked_page_ids": set(),
"_hooked_handlers_map": {},
}
defaults.update(overrides)
return SimpleNamespace(**defaults)
def _patch_get_page(monkeypatch: pytest.MonkeyPatch, page: MagicMock, ctx: BrowserContext) -> AsyncMock:
skyvern_page = _make_skyvern_page(page)
mock = AsyncMock(return_value=(skyvern_page, ctx))
monkeypatch.setattr(mcp_inspection, "get_page", mock)
return mock
# ═══════════════════════════════════════════════════
# pageerror hook registration
# ═══════════════════════════════════════════════════
def test_make_page_handlers_includes_pageerror() -> None:
state = _make_session_state()
raw_page = MagicMock()
raw_page.url = "https://example.com"
handlers = mcp_inspection._make_page_handlers(state, raw_page)
assert "pageerror" in handlers
def test_pageerror_handler_appends_to_buffer() -> None:
state = _make_session_state()
raw_page = MagicMock()
raw_page.url = "https://example.com"
handlers = mcp_inspection._make_page_handlers(state, raw_page)
handler = handlers["pageerror"]
handler(Exception("ReferenceError: foo is not defined"))
assert len(state.page_errors) == 1
entry = state.page_errors[0]
assert "ReferenceError" in entry["message"]
assert entry["page_url"] == "https://example.com"
assert "timestamp" in entry
assert "tab_id" in entry
def test_pageerror_handler_survives_exception() -> None:
"""Handler should never crash even with bizarre error objects."""
state = _make_session_state()
raw_page = MagicMock()
raw_page.url = "https://example.com"
handlers = mcp_inspection._make_page_handlers(state, raw_page)
handler = handlers["pageerror"]
# Calling with an object whose str() raises shouldn't crash
class BadError:
def __str__(self):
raise RuntimeError("cannot stringify")
handler(BadError())
# Should not raise — errors are silently caught
assert len(state.page_errors) == 1
def test_register_hooks_registers_pageerror() -> None:
state = _make_session_state()
raw_page = MagicMock()
raw_page.url = "https://example.com"
mcp_inspection._register_hooks_on_page(state, raw_page)
# Check that pageerror was registered
on_calls = [call for call in raw_page.on.call_args_list if call[0][0] == "pageerror"]
assert len(on_calls) == 1
# ═══════════════════════════════════════════════════
# skyvern_get_errors — happy path
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_get_errors_empty(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
state = _make_session_state()
monkeypatch.setattr(mcp_inspection, "get_current_session", lambda: state)
monkeypatch.setattr("skyvern.cli.core.session_manager.is_stateless_http_mode", lambda: False)
result = await mcp_inspection.skyvern_get_errors()
assert result["ok"] is True
assert result["data"]["count"] == 0
assert result["data"]["errors"] == []
@pytest.mark.asyncio
async def test_get_errors_returns_buffered_errors(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
errors = deque(maxlen=1000)
errors.append(
{
"message": "TypeError: null is not an object",
"timestamp": time.time(),
"page_url": "https://example.com",
"tab_id": "1",
}
)
errors.append(
{
"message": "ReferenceError: x is not defined",
"timestamp": time.time(),
"page_url": "https://example.com",
"tab_id": "1",
}
)
state = _make_session_state(page_errors=errors)
monkeypatch.setattr(mcp_inspection, "get_current_session", lambda: state)
monkeypatch.setattr("skyvern.cli.core.session_manager.is_stateless_http_mode", lambda: False)
result = await mcp_inspection.skyvern_get_errors()
assert result["ok"] is True
assert result["data"]["count"] == 2
@pytest.mark.asyncio
async def test_get_errors_text_filter(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
errors = deque(maxlen=1000)
errors.append(
{
"message": "TypeError: null is not an object",
"timestamp": time.time(),
"page_url": "https://example.com",
"tab_id": "1",
}
)
errors.append(
{
"message": "ReferenceError: x is not defined",
"timestamp": time.time(),
"page_url": "https://example.com",
"tab_id": "1",
}
)
state = _make_session_state(page_errors=errors)
monkeypatch.setattr(mcp_inspection, "get_current_session", lambda: state)
monkeypatch.setattr("skyvern.cli.core.session_manager.is_stateless_http_mode", lambda: False)
result = await mcp_inspection.skyvern_get_errors(text="TypeError")
assert result["ok"] is True
assert result["data"]["count"] == 1
assert "TypeError" in result["data"]["errors"][0]["message"]
@pytest.mark.asyncio
async def test_get_errors_clear(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
errors = deque(maxlen=1000)
errors.append({"message": "Error 1", "timestamp": time.time(), "page_url": "https://example.com", "tab_id": "1"})
errors.append({"message": "Error 2", "timestamp": time.time(), "page_url": "https://example.com", "tab_id": "1"})
state = _make_session_state(page_errors=errors)
monkeypatch.setattr(mcp_inspection, "get_current_session", lambda: state)
monkeypatch.setattr("skyvern.cli.core.session_manager.is_stateless_http_mode", lambda: False)
result = await mcp_inspection.skyvern_get_errors(clear=True)
assert result["ok"] is True
assert result["data"]["count"] == 2
assert len(state.page_errors) == 0
@pytest.mark.asyncio
async def test_get_errors_clear_with_filter(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
errors = deque(maxlen=1000)
errors.append(
{"message": "TypeError: null", "timestamp": time.time(), "page_url": "https://example.com", "tab_id": "1"}
)
errors.append(
{"message": "ReferenceError: x", "timestamp": time.time(), "page_url": "https://example.com", "tab_id": "1"}
)
state = _make_session_state(page_errors=errors)
monkeypatch.setattr(mcp_inspection, "get_current_session", lambda: state)
monkeypatch.setattr("skyvern.cli.core.session_manager.is_stateless_http_mode", lambda: False)
result = await mcp_inspection.skyvern_get_errors(text="TypeError", clear=True)
assert result["ok"] is True
assert result["data"]["count"] == 1
# Only the matched one was removed; the other remains
assert len(state.page_errors) == 1
assert "ReferenceError" in state.page_errors[0]["message"]
# ═══════════════════════════════════════════════════
# skyvern_get_errors — error cases
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_get_errors_no_browser(monkeypatch: pytest.MonkeyPatch) -> None:
from skyvern.cli.mcp_tools._session import BrowserNotAvailableError
monkeypatch.setattr(mcp_inspection, "get_page", AsyncMock(side_effect=BrowserNotAvailableError()))
monkeypatch.setattr("skyvern.cli.core.session_manager.is_stateless_http_mode", lambda: False)
result = await mcp_inspection.skyvern_get_errors()
assert result["ok"] is False
@pytest.mark.asyncio
async def test_get_errors_stateless_mode(monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr("skyvern.cli.core.session_manager.is_stateless_http_mode", lambda: True)
result = await mcp_inspection.skyvern_get_errors()
assert result["ok"] is False
assert "stateless" in result["error"]["message"].lower()

View file

@ -0,0 +1,240 @@
"""Tests for MCP semantic locator tool (skyvern_find) and do_find."""
from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock
import pytest
from skyvern.cli.core.browser_ops import LOCATOR_TYPES, FindResult, do_find
from skyvern.cli.core.guards import GuardError
from skyvern.cli.core.result import BrowserContext
from skyvern.cli.mcp_tools import browser as mcp_browser
# ═══════════════════════════════════════════════════
# Helpers
# ═══════════════════════════════════════════════════
def _make_locator(count: int = 1, text: str = "Submit", visible: bool = True) -> MagicMock:
locator = MagicMock()
locator.count = AsyncMock(return_value=count)
first = MagicMock()
first.text_content = AsyncMock(return_value=text)
first.is_visible = AsyncMock(return_value=visible)
locator.first = first
return locator
def _make_mock_page(locator: MagicMock | None = None) -> MagicMock:
page = MagicMock()
page.url = "https://example.com"
loc = locator or _make_locator()
page.get_by_role = MagicMock(return_value=loc)
page.get_by_text = MagicMock(return_value=loc)
page.get_by_label = MagicMock(return_value=loc)
page.get_by_placeholder = MagicMock(return_value=loc)
page.get_by_alt_text = MagicMock(return_value=loc)
page.get_by_test_id = MagicMock(return_value=loc)
return page
def _make_skyvern_page(page: MagicMock) -> MagicMock:
wrapper = MagicMock()
wrapper.page = page
wrapper.url = page.url
# Delegate semantic locator methods
wrapper.get_by_role = page.get_by_role
wrapper.get_by_text = page.get_by_text
wrapper.get_by_label = page.get_by_label
wrapper.get_by_placeholder = page.get_by_placeholder
wrapper.get_by_alt_text = page.get_by_alt_text
wrapper.get_by_test_id = page.get_by_test_id
return wrapper
def _patch_get_page(monkeypatch: pytest.MonkeyPatch, page: MagicMock, ctx: BrowserContext) -> AsyncMock:
skyvern_page = _make_skyvern_page(page)
mock = AsyncMock(return_value=(skyvern_page, ctx))
monkeypatch.setattr(mcp_browser, "get_page", mock)
return mock
# ═══════════════════════════════════════════════════
# do_find (browser_ops)
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_do_find_by_role() -> None:
page = _make_mock_page()
result = await do_find(page, by="role", value="button")
assert isinstance(result, FindResult)
assert result.count == 1
assert result.first_text == "Submit"
assert result.first_visible is True
assert "role" in result.selector
page.get_by_role.assert_called_once_with("button")
@pytest.mark.asyncio
async def test_do_find_by_text() -> None:
page = _make_mock_page()
result = await do_find(page, by="text", value="Click me")
page.get_by_text.assert_called_once_with("Click me")
assert result.count == 1
@pytest.mark.asyncio
async def test_do_find_by_label() -> None:
page = _make_mock_page()
await do_find(page, by="label", value="Email")
page.get_by_label.assert_called_once_with("Email")
@pytest.mark.asyncio
async def test_do_find_by_placeholder() -> None:
page = _make_mock_page()
await do_find(page, by="placeholder", value="Enter email")
page.get_by_placeholder.assert_called_once_with("Enter email")
@pytest.mark.asyncio
async def test_do_find_by_alt() -> None:
page = _make_mock_page()
result = await do_find(page, by="alt", value="Logo")
page.get_by_alt_text.assert_called_once_with("Logo")
assert result.selector == "get_by_alt_text('Logo')"
@pytest.mark.asyncio
async def test_do_find_by_testid() -> None:
page = _make_mock_page()
result = await do_find(page, by="testid", value="submit-btn")
page.get_by_test_id.assert_called_once_with("submit-btn")
assert result.selector == "get_by_test_id('submit-btn')"
@pytest.mark.asyncio
async def test_do_find_invalid_type() -> None:
page = _make_mock_page()
with pytest.raises(GuardError, match="Invalid locator type"):
await do_find(page, by="invalid", value="anything")
@pytest.mark.asyncio
async def test_do_find_no_matches() -> None:
locator = _make_locator(count=0)
page = _make_mock_page(locator)
result = await do_find(page, by="role", value="dialog")
assert result.count == 0
assert result.first_text is None
assert result.first_visible is False
@pytest.mark.asyncio
async def test_do_find_multiple_matches() -> None:
locator = _make_locator(count=5, text="Item 1")
page = _make_mock_page(locator)
result = await do_find(page, by="text", value="Item")
assert result.count == 5
assert result.first_text == "Item 1"
@pytest.mark.asyncio
async def test_do_find_hidden_element() -> None:
locator = _make_locator(count=1, text="Hidden", visible=False)
page = _make_mock_page(locator)
result = await do_find(page, by="text", value="Hidden")
assert result.first_visible is False
def test_locator_types_constant() -> None:
assert "role" in LOCATOR_TYPES
assert "text" in LOCATOR_TYPES
assert "label" in LOCATOR_TYPES
assert "placeholder" in LOCATOR_TYPES
assert "alt" in LOCATOR_TYPES
assert "testid" in LOCATOR_TYPES
assert len(LOCATOR_TYPES) == 6
# ═══════════════════════════════════════════════════
# skyvern_find (MCP tool)
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_find_tool_happy_path(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_browser.skyvern_find(by="role", value="button")
assert result["ok"] is True
assert result["data"]["count"] == 1
assert result["data"]["first_text"] == "Submit"
assert result["data"]["first_visible"] is True
assert result["data"]["selector"] == "get_by_role('button')"
@pytest.mark.asyncio
async def test_find_tool_no_browser(monkeypatch: pytest.MonkeyPatch) -> None:
from skyvern.cli.mcp_tools._session import BrowserNotAvailableError
monkeypatch.setattr(mcp_browser, "get_page", AsyncMock(side_effect=BrowserNotAvailableError()))
result = await mcp_browser.skyvern_find(by="role", value="button")
assert result["ok"] is False
@pytest.mark.asyncio
async def test_find_tool_invalid_locator_type(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_browser.skyvern_find(by="xpath", value="//div")
assert result["ok"] is False
assert "Invalid locator type" in result["error"]["message"]
@pytest.mark.asyncio
async def test_find_tool_no_matches(monkeypatch: pytest.MonkeyPatch) -> None:
locator = _make_locator(count=0)
page = _make_mock_page(locator)
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_browser.skyvern_find(by="role", value="dialog")
assert result["ok"] is True
assert result["data"]["count"] == 0
assert result["data"]["first_text"] is None
@pytest.mark.asyncio
async def test_find_tool_exception(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.get_by_role = MagicMock(side_effect=RuntimeError("Locator error"))
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_browser.skyvern_find(by="role", value="button")
assert result["ok"] is False
assert "Locator error" in result["error"]["message"]

View file

@ -0,0 +1,413 @@
"""Tests for MCP auth state persistence tools (state_save / state_load)."""
from __future__ import annotations
import json
from pathlib import Path
from types import SimpleNamespace
from unittest.mock import AsyncMock, MagicMock
import pytest
from skyvern.cli.core.browser_ops import _cookie_domain_matches
from skyvern.cli.core.result import BrowserContext
from skyvern.cli.core.session_manager import SessionState
from skyvern.cli.mcp_tools import state as mcp_state
from skyvern.cli.mcp_tools.state import _validate_state_path
# ═══════════════════════════════════════════════════
# Helpers
# ═══════════════════════════════════════════════════
def _make_mock_page(url: str = "https://example.com", title: str = "Example") -> MagicMock:
page = MagicMock()
page.url = url
page.title = AsyncMock(return_value=title)
page.evaluate = AsyncMock(return_value={})
page.is_closed.return_value = False
return page
def _make_mock_browser(cookies: list | None = None) -> MagicMock:
browser = MagicMock()
browser._browser_context = MagicMock()
browser._browser_context.cookies = AsyncMock(return_value=cookies or [])
browser._browser_context.add_cookies = AsyncMock()
return browser
def _make_session_state(browser: MagicMock | None = None) -> SessionState:
state = SessionState()
state.browser = browser
return state
def _patch_get_page(monkeypatch: pytest.MonkeyPatch, page: MagicMock, ctx: BrowserContext) -> AsyncMock:
skyvern_page = SimpleNamespace(page=page)
mock = AsyncMock(return_value=(skyvern_page, ctx))
monkeypatch.setattr(mcp_state, "get_page", mock)
return mock
def _patch_session(monkeypatch: pytest.MonkeyPatch, state: SessionState) -> MagicMock:
mock = MagicMock(return_value=state)
monkeypatch.setattr(mcp_state, "get_current_session", mock)
return mock
# ═══════════════════════════════════════════════════
# _cookie_domain_matches
# ═══════════════════════════════════════════════════
class TestCookieDomainMatches:
def test_exact_match(self) -> None:
assert _cookie_domain_matches("example.com", "example.com") is True
def test_subdomain_match_with_dot(self) -> None:
assert _cookie_domain_matches(".example.com", "sub.example.com") is True
def test_subdomain_match_without_dot(self) -> None:
assert _cookie_domain_matches("example.com", "sub.example.com") is True
def test_suffix_attack_rejected(self) -> None:
assert _cookie_domain_matches("example.com", "evil-example.com") is False
def test_empty_cookie_domain(self) -> None:
assert _cookie_domain_matches("", "example.com") is False
def test_empty_page_domain(self) -> None:
assert _cookie_domain_matches("example.com", "") is False
def test_both_empty(self) -> None:
assert _cookie_domain_matches("", "") is False
def test_dot_only_cookie_domain(self) -> None:
assert _cookie_domain_matches(".", "example.com") is False
def test_deep_subdomain_match(self) -> None:
assert _cookie_domain_matches(".example.com", "a.b.c.example.com") is True
def test_different_domain_rejected(self) -> None:
assert _cookie_domain_matches("other.com", "example.com") is False
# ═══════════════════════════════════════════════════
# _validate_state_path
# ═══════════════════════════════════════════════════
class TestValidateStatePath:
def test_valid_path_in_cwd(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.chdir(tmp_path)
result = _validate_state_path("state.json")
assert result == (tmp_path / "state.json").resolve()
def test_valid_path_no_extension(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.chdir(tmp_path)
result = _validate_state_path("mystate")
assert result == (tmp_path / "mystate").resolve()
def test_rejects_outside_allowed_roots(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.chdir(tmp_path)
with pytest.raises(ValueError, match="must be under working directory"):
_validate_state_path("/etc/passwd")
def test_rejects_path_traversal(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.chdir(tmp_path)
with pytest.raises(ValueError, match="must be under working directory"):
_validate_state_path("../../../etc/passwd")
def test_rejects_symlinks(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.chdir(tmp_path)
target = tmp_path / "real.json"
target.write_text("{}")
link = tmp_path / "link.json"
link.symlink_to(target)
with pytest.raises(ValueError, match="Symlinks not allowed"):
_validate_state_path("link.json")
def test_rejects_bad_extension(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.chdir(tmp_path)
with pytest.raises(ValueError, match="must have .json extension"):
_validate_state_path("state.exe")
def test_must_exist_raises_when_missing(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.chdir(tmp_path)
with pytest.raises(FileNotFoundError, match="State file not found"):
_validate_state_path("missing.json", must_exist=True)
def test_must_exist_passes_when_present(self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.chdir(tmp_path)
f = tmp_path / "exists.json"
f.write_text("{}")
result = _validate_state_path("exists.json", must_exist=True)
assert result == f.resolve()
def test_home_skyvern_path_allowed(self, monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None:
skyvern_dir = tmp_path / ".skyvern"
skyvern_dir.mkdir()
monkeypatch.setattr(Path, "home", classmethod(lambda cls: tmp_path))
monkeypatch.chdir(tmp_path / "elsewhere" if (tmp_path / "elsewhere").exists() else tmp_path)
result = _validate_state_path(str(skyvern_dir / "state.json"))
assert ".skyvern" in str(result)
# ═══════════════════════════════════════════════════
# skyvern_state_save
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_state_save_happy_path(
monkeypatch: pytest.MonkeyPatch,
tmp_path: Path,
) -> None:
monkeypatch.chdir(tmp_path)
cookies = [{"name": "sid", "value": "abc", "domain": "example.com", "path": "/"}]
local_storage = {"key1": "val1"}
session_storage = {"skey": "sval"}
page = _make_mock_page("https://example.com")
page.evaluate = AsyncMock(side_effect=[local_storage, session_storage])
browser = _make_mock_browser(cookies)
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_session(monkeypatch, _make_session_state(browser))
result = await mcp_state.skyvern_state_save(file_path="auth.json")
assert result["ok"] is True
assert result["data"]["cookie_count"] == 1
assert result["data"]["local_storage_count"] == 1
assert result["data"]["session_storage_count"] == 1
saved = json.loads((tmp_path / "auth.json").read_text())
assert saved["version"] == 1
assert saved["cookies"] == cookies
assert saved["local_storage"] == local_storage
assert saved["session_storage"] == session_storage
@pytest.mark.asyncio
async def test_state_save_no_browser(monkeypatch: pytest.MonkeyPatch) -> None:
from skyvern.cli.mcp_tools._session import BrowserNotAvailableError
monkeypatch.setattr(mcp_state, "get_page", AsyncMock(side_effect=BrowserNotAvailableError()))
result = await mcp_state.skyvern_state_save(file_path="test.json")
assert result["ok"] is False
@pytest.mark.asyncio
async def test_state_save_invalid_path(
monkeypatch: pytest.MonkeyPatch,
tmp_path: Path,
) -> None:
monkeypatch.chdir(tmp_path)
page = _make_mock_page()
browser = _make_mock_browser()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_session(monkeypatch, _make_session_state(browser))
result = await mcp_state.skyvern_state_save(file_path="/etc/evil.json")
assert result["ok"] is False
assert "must be under" in result["error"]["message"]
@pytest.mark.asyncio
async def test_state_save_no_browser_in_session(
monkeypatch: pytest.MonkeyPatch,
tmp_path: Path,
) -> None:
monkeypatch.chdir(tmp_path)
page = _make_mock_page()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_session(monkeypatch, _make_session_state(None))
result = await mcp_state.skyvern_state_save(file_path="auth.json")
assert result["ok"] is False
# ═══════════════════════════════════════════════════
# skyvern_state_load
# ═══════════════════════════════════════════════════
def _write_state_file(path: Path, *, cookies: list | None = None, url: str = "https://example.com") -> None:
state = {
"version": 1,
"url": url,
"timestamp": "2026-04-01T00:00:00+00:00",
"cookies": cookies or [],
"local_storage": {"lk": "lv"},
"session_storage": {"sk": "sv"},
}
path.write_text(json.dumps(state))
@pytest.mark.asyncio
async def test_state_load_happy_path(
monkeypatch: pytest.MonkeyPatch,
tmp_path: Path,
) -> None:
monkeypatch.chdir(tmp_path)
cookies = [
{"name": "sid", "value": "abc", "domain": "example.com", "path": "/"},
{"name": "other", "value": "xyz", "domain": "evil.com", "path": "/"},
]
state_file = tmp_path / "auth.json"
_write_state_file(state_file, cookies=cookies)
page = _make_mock_page("https://example.com")
browser = _make_mock_browser()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_session(monkeypatch, _make_session_state(browser))
result = await mcp_state.skyvern_state_load(file_path="auth.json")
assert result["ok"] is True
assert result["data"]["cookie_count"] == 1
assert result["data"]["skipped_cookies"] == 1
assert result["data"]["local_storage_count"] == 1
assert result["data"]["session_storage_count"] == 1
browser._browser_context.add_cookies.assert_awaited_once()
added = browser._browser_context.add_cookies.call_args[0][0]
assert len(added) == 1
assert added[0]["domain"] == "example.com"
@pytest.mark.asyncio
async def test_state_load_no_browser(monkeypatch: pytest.MonkeyPatch) -> None:
from skyvern.cli.mcp_tools._session import BrowserNotAvailableError
monkeypatch.setattr(mcp_state, "get_page", AsyncMock(side_effect=BrowserNotAvailableError()))
result = await mcp_state.skyvern_state_load(file_path="test.json")
assert result["ok"] is False
@pytest.mark.asyncio
async def test_state_load_file_not_found(
monkeypatch: pytest.MonkeyPatch,
tmp_path: Path,
) -> None:
monkeypatch.chdir(tmp_path)
page = _make_mock_page()
browser = _make_mock_browser()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_session(monkeypatch, _make_session_state(browser))
result = await mcp_state.skyvern_state_load(file_path="nonexistent.json")
assert result["ok"] is False
assert "not found" in result["error"]["message"].lower()
@pytest.mark.asyncio
async def test_state_load_bad_version(
monkeypatch: pytest.MonkeyPatch,
tmp_path: Path,
) -> None:
monkeypatch.chdir(tmp_path)
state_file = tmp_path / "bad.json"
state_file.write_text(json.dumps({"version": 999}))
page = _make_mock_page()
browser = _make_mock_browser()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_session(monkeypatch, _make_session_state(browser))
result = await mcp_state.skyvern_state_load(file_path="bad.json")
assert result["ok"] is False
assert "version" in result["error"]["message"].lower()
@pytest.mark.asyncio
async def test_state_load_malformed_json(
monkeypatch: pytest.MonkeyPatch,
tmp_path: Path,
) -> None:
monkeypatch.chdir(tmp_path)
state_file = tmp_path / "bad.json"
state_file.write_text("not json at all")
page = _make_mock_page()
browser = _make_mock_browser()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_session(monkeypatch, _make_session_state(browser))
result = await mcp_state.skyvern_state_load(file_path="bad.json")
assert result["ok"] is False
@pytest.mark.asyncio
async def test_state_load_filters_cross_domain_cookies(
monkeypatch: pytest.MonkeyPatch,
tmp_path: Path,
) -> None:
"""Cookies from a different domain must not be applied."""
monkeypatch.chdir(tmp_path)
cookies = [
{"name": "c1", "value": "v1", "domain": ".other.com", "path": "/"},
{"name": "c2", "value": "v2", "domain": "another.org", "path": "/"},
]
state_file = tmp_path / "cross.json"
_write_state_file(state_file, cookies=cookies, url="https://other.com")
page = _make_mock_page("https://example.com")
browser = _make_mock_browser()
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_session(monkeypatch, _make_session_state(browser))
result = await mcp_state.skyvern_state_load(file_path="cross.json")
assert result["ok"] is True
assert result["data"]["cookie_count"] == 0
assert result["data"]["skipped_cookies"] == 2
browser._browser_context.add_cookies.assert_not_awaited()
@pytest.mark.asyncio
async def test_state_save_load_roundtrip(
monkeypatch: pytest.MonkeyPatch,
tmp_path: Path,
) -> None:
"""Save then load should produce consistent results."""
monkeypatch.chdir(tmp_path)
cookies = [{"name": "tok", "value": "123", "domain": "example.com", "path": "/"}]
ls = {"theme": "dark"}
ss = {"cart": "item1"}
page = _make_mock_page("https://example.com")
page.evaluate = AsyncMock(side_effect=[ls, ss])
browser = _make_mock_browser(cookies)
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
_patch_session(monkeypatch, _make_session_state(browser))
save_result = await mcp_state.skyvern_state_save(file_path="roundtrip.json")
assert save_result["ok"] is True
page.evaluate = AsyncMock(return_value=None)
load_result = await mcp_state.skyvern_state_load(file_path="roundtrip.json")
assert load_result["ok"] is True
assert load_result["data"]["cookie_count"] == 1
assert load_result["data"]["local_storage_count"] == 1
assert load_result["data"]["session_storage_count"] == 1
assert load_result["data"]["skipped_cookies"] == 0

View file

@ -0,0 +1,201 @@
"""Tests for MCP web storage tools (sessionStorage + localStorage clear)."""
from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock
import pytest
from skyvern.cli.core.result import BrowserContext
from skyvern.cli.mcp_tools import storage as mcp_storage
# ═══════════════════════════════════════════════════
# Helpers
# ═══════════════════════════════════════════════════
def _make_mock_page(url: str = "https://example.com") -> MagicMock:
page = MagicMock()
page.url = url
page.evaluate = AsyncMock(return_value={})
return page
def _make_skyvern_page(page: MagicMock) -> MagicMock:
"""Mimic SkyvernBrowserPage which delegates attribute access to the raw page."""
wrapper = MagicMock()
wrapper.page = page
wrapper.evaluate = page.evaluate
wrapper.url = page.url
return wrapper
def _patch_get_page(monkeypatch: pytest.MonkeyPatch, page: MagicMock, ctx: BrowserContext) -> AsyncMock:
skyvern_page = _make_skyvern_page(page)
mock = AsyncMock(return_value=(skyvern_page, ctx))
monkeypatch.setattr(mcp_storage, "get_page", mock)
return mock
# ═══════════════════════════════════════════════════
# skyvern_get_session_storage
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_get_session_storage_all(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.evaluate = AsyncMock(return_value={"token": "abc", "lang": "en"})
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_storage.skyvern_get_session_storage()
assert result["ok"] is True
assert result["data"]["count"] == 2
assert result["data"]["items"]["token"] == "abc"
@pytest.mark.asyncio
async def test_get_session_storage_specific_keys(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.evaluate = AsyncMock(side_effect=["abc", None])
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_storage.skyvern_get_session_storage(keys=["token", "missing"])
assert result["ok"] is True
assert result["data"]["count"] == 1
assert result["data"]["items"] == {"token": "abc"}
@pytest.mark.asyncio
async def test_get_session_storage_empty(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.evaluate = AsyncMock(return_value={})
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_storage.skyvern_get_session_storage()
assert result["ok"] is True
assert result["data"]["count"] == 0
@pytest.mark.asyncio
async def test_get_session_storage_no_browser(monkeypatch: pytest.MonkeyPatch) -> None:
from skyvern.cli.mcp_tools._session import BrowserNotAvailableError
monkeypatch.setattr(mcp_storage, "get_page", AsyncMock(side_effect=BrowserNotAvailableError()))
result = await mcp_storage.skyvern_get_session_storage()
assert result["ok"] is False
@pytest.mark.asyncio
async def test_get_session_storage_evaluate_error(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.evaluate = AsyncMock(side_effect=RuntimeError("page crashed"))
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_storage.skyvern_get_session_storage()
assert result["ok"] is False
assert "page crashed" in result["error"]["message"]
# ═══════════════════════════════════════════════════
# skyvern_set_session_storage
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_set_session_storage(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.evaluate = AsyncMock(return_value=None)
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_storage.skyvern_set_session_storage(key="theme", value="dark")
assert result["ok"] is True
assert result["data"]["key"] == "theme"
assert result["data"]["value_length"] == 4
page.evaluate.assert_awaited_once()
@pytest.mark.asyncio
async def test_set_session_storage_no_browser(monkeypatch: pytest.MonkeyPatch) -> None:
from skyvern.cli.mcp_tools._session import BrowserNotAvailableError
monkeypatch.setattr(mcp_storage, "get_page", AsyncMock(side_effect=BrowserNotAvailableError()))
result = await mcp_storage.skyvern_set_session_storage(key="k", value="v")
assert result["ok"] is False
# ═══════════════════════════════════════════════════
# skyvern_clear_session_storage
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_clear_session_storage(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.evaluate = AsyncMock(return_value=3)
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_storage.skyvern_clear_session_storage()
assert result["ok"] is True
assert result["data"]["cleared_count"] == 3
@pytest.mark.asyncio
async def test_clear_session_storage_no_browser(monkeypatch: pytest.MonkeyPatch) -> None:
from skyvern.cli.mcp_tools._session import BrowserNotAvailableError
monkeypatch.setattr(mcp_storage, "get_page", AsyncMock(side_effect=BrowserNotAvailableError()))
result = await mcp_storage.skyvern_clear_session_storage()
assert result["ok"] is False
# ═══════════════════════════════════════════════════
# skyvern_clear_local_storage
# ═══════════════════════════════════════════════════
@pytest.mark.asyncio
async def test_clear_local_storage(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.evaluate = AsyncMock(return_value=5)
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_storage.skyvern_clear_local_storage()
assert result["ok"] is True
assert result["data"]["cleared_count"] == 5
@pytest.mark.asyncio
async def test_clear_local_storage_no_browser(monkeypatch: pytest.MonkeyPatch) -> None:
from skyvern.cli.mcp_tools._session import BrowserNotAvailableError
monkeypatch.setattr(mcp_storage, "get_page", AsyncMock(side_effect=BrowserNotAvailableError()))
result = await mcp_storage.skyvern_clear_local_storage()
assert result["ok"] is False
@pytest.mark.asyncio
async def test_clear_local_storage_error(monkeypatch: pytest.MonkeyPatch) -> None:
page = _make_mock_page()
page.evaluate = AsyncMock(side_effect=RuntimeError("security error"))
ctx = BrowserContext(mode="local")
_patch_get_page(monkeypatch, page, ctx)
result = await mcp_storage.skyvern_clear_local_storage()
assert result["ok"] is False
assert "security error" in result["error"]["message"]

View file

@ -420,9 +420,9 @@ class TestMultiPageInspectionHooks:
# Both pages should have hooks # Both pages should have hooks
assert id(page_a) in state._hooked_page_ids assert id(page_a) in state._hooked_page_ids
assert id(page_b) in state._hooked_page_ids assert id(page_b) in state._hooked_page_ids
# 3 events per page: console, response, dialog # 4 events per page: console, response, dialog, pageerror
assert page_a.on.call_count == 3 assert page_a.on.call_count == 4
assert page_b.on.call_count == 3 assert page_b.on.call_count == 4
def test_hooks_idempotent(self) -> None: def test_hooks_idempotent(self) -> None:
from skyvern.cli.mcp_tools.inspection import ensure_hooks_on_all_pages from skyvern.cli.mcp_tools.inspection import ensure_hooks_on_all_pages
@ -437,7 +437,7 @@ class TestMultiPageInspectionHooks:
ensure_hooks_on_all_pages(state, [page_a]) ensure_hooks_on_all_pages(state, [page_a])
# Should only register once # Should only register once
assert page_a.on.call_count == 3 assert page_a.on.call_count == 4
def test_stale_pages_pruned(self) -> None: def test_stale_pages_pruned(self) -> None:
from skyvern.cli.mcp_tools.inspection import ensure_hooks_on_all_pages from skyvern.cli.mcp_tools.inspection import ensure_hooks_on_all_pages

View file

@ -0,0 +1,324 @@
"""Tests for ScriptReviewer quality validators: proactive misuse, fragile selectors, hardcoded run data."""
from skyvern.services.script_reviewer import ScriptReviewer
class TestValidateProactiveMisuse:
"""Tests for _validate_proactive_misuse."""
def setup_method(self) -> None:
self.reviewer = ScriptReviewer()
def test_fallback_is_fine(self) -> None:
code = """
async def login(page, context):
await page.fill(selector='input[name="email"]', value='test', ai='fallback', prompt='email')
await page.click(selector='button', ai='fallback', prompt='submit')
"""
assert self.reviewer._validate_proactive_misuse(code) is None
def test_proactive_on_fill_flagged(self) -> None:
code = """
async def login(page, context):
await page.fill(selector='input[placeholder="Username"]', ai='proactive', prompt='username')
"""
error = self.reviewer._validate_proactive_misuse(code)
assert error is not None
assert "page.fill()" in error
assert "ai='fallback'" in error
def test_proactive_on_click_flagged(self) -> None:
code = """
async def block_fn(page, context):
await page.click(selector='button:has-text("Submit")', ai='proactive', prompt='submit')
"""
error = self.reviewer._validate_proactive_misuse(code)
assert error is not None
assert "page.click()" in error
def test_proactive_on_select_option_flagged(self) -> None:
code = """
async def block_fn(page, context):
await page.select_option(selector='select[name="format"]', value='PDF', ai='proactive', prompt='format')
"""
error = self.reviewer._validate_proactive_misuse(code)
assert error is not None
assert "page.select_option()" in error
def test_proactive_on_type_flagged(self) -> None:
code = """
async def block_fn(page, context):
await page.type(selector='input[name="search"]', ai='proactive', prompt='search')
"""
error = self.reviewer._validate_proactive_misuse(code)
assert error is not None
assert "page.type()" in error
def test_proactive_on_extract_not_flagged(self) -> None:
"""ai='proactive' on extract is legitimate — extract doesn't have selectors."""
code = """
async def block_fn(page, context):
result = await page.extract(prompt='Get the invoice data', ai='proactive')
"""
# extract is not in _INTERACTION_METHODS, so this should pass
assert self.reviewer._validate_proactive_misuse(code) is None
def test_comments_ignored(self) -> None:
code = """
async def block_fn(page, context):
# await page.fill(selector='input', ai='proactive', prompt='test')
await page.fill(selector='input', value='x', ai='fallback', prompt='test')
"""
assert self.reviewer._validate_proactive_misuse(code) is None
def test_multiline_call_flagged(self) -> None:
"""ai='proactive' on a continuation line should still be caught."""
code = """
async def login(page, context):
await page.fill(
selector='input[name="email"]',
value='test',
ai='proactive',
prompt='email field',
)
"""
error = self.reviewer._validate_proactive_misuse(code)
assert error is not None
assert "page.fill()" in error
def test_multiple_issues_reported(self) -> None:
code = """
async def login(page, context):
await page.fill(selector='#user', ai='proactive', prompt='user')
await page.fill(selector='#pass', ai='proactive', prompt='pass')
await page.click(selector='#submit', ai='proactive', prompt='submit')
"""
error = self.reviewer._validate_proactive_misuse(code)
assert error is not None
# Should mention multiple occurrences
assert "page.fill()" in error
assert "page.click()" in error
class TestValidateFragileSelectors:
"""Tests for _validate_fragile_selectors."""
def setup_method(self) -> None:
self.reviewer = ScriptReviewer()
def test_stable_selectors_pass(self) -> None:
code = """
async def login(page, context):
await page.fill(selector='input[name="email"]', value='test', ai='fallback', prompt='email')
await page.click(selector='button:has-text("Sign In")', ai='fallback', prompt='sign in')
await page.fill(selector='input[placeholder="Password"]', value='pass', ai='fallback', prompt='pass')
"""
assert self.reviewer._validate_fragile_selectors(code) is None
def test_dotnetnuke_id_flagged(self) -> None:
code = """
async def login(page, context):
await page.click(selector='#dnn_ctl00_aMyAccount', ai='fallback', prompt='account')
"""
error = self.reviewer._validate_fragile_selectors(code)
assert error is not None
assert "dnn_" in error
assert "DotNetNuke" in error
def test_ember_id_flagged(self) -> None:
code = """
async def block_fn(page, context):
await page.click(selector='#ember-1234', ai='fallback', prompt='click')
"""
error = self.reviewer._validate_fragile_selectors(code)
assert error is not None
assert "ember" in error.lower()
def test_react_select_id_flagged(self) -> None:
code = """
async def block_fn(page, context):
await page.click(selector='#react-select-5-option-2', ai='fallback', prompt='select option')
"""
error = self.reviewer._validate_fragile_selectors(code)
assert error is not None
assert "react-select" in error.lower()
def test_css_in_js_class_flagged(self) -> None:
code = """
async def block_fn(page, context):
await page.click(selector='.css-1a2b3c', ai='fallback', prompt='click button')
"""
error = self.reviewer._validate_fragile_selectors(code)
assert error is not None
assert "css-" in error.lower()
def test_mui_class_flagged(self) -> None:
code = """
async def block_fn(page, context):
await page.click(selector='.MuiButton-root', ai='fallback', prompt='click button')
"""
error = self.reviewer._validate_fragile_selectors(code)
assert error is not None
def test_extjs_id_flagged(self) -> None:
code = """
async def block_fn(page, context):
await page.click(selector='#ext-gen-456', ai='fallback', prompt='click')
"""
error = self.reviewer._validate_fragile_selectors(code)
assert error is not None
assert "ext-gen" in error.lower()
def test_comments_ignored(self) -> None:
code = """
async def block_fn(page, context):
# selector='#dnn_ctl00_aMyAccount' is fragile but this is a comment
await page.click(selector='button:has-text("Submit")', ai='fallback', prompt='submit')
"""
assert self.reviewer._validate_fragile_selectors(code) is None
def test_multiline_call_flagged(self) -> None:
"""Fragile selector on a continuation line should still be caught."""
code = """
async def block_fn(page, context):
await page.click(
selector='#dnn_ctl00_aMyAccount',
ai='fallback',
prompt='account link',
)
"""
error = self.reviewer._validate_fragile_selectors(code)
assert error is not None
assert "dnn_" in error
def test_no_selector_passes(self) -> None:
"""Code without selector= kwargs should not trigger."""
code = """
async def block_fn(page, context):
await page.click(ai='fallback', prompt='click the submit button')
"""
assert self.reviewer._validate_fragile_selectors(code) is None
class TestValidateHardcodedRunData:
"""Tests for _validate_hardcoded_run_data."""
def setup_method(self) -> None:
self.reviewer = ScriptReviewer()
def test_clean_code_passes(self) -> None:
code = """
async def block_fn(page, context):
await page.click(selector='button:has-text("Download")', ai='fallback', prompt='download invoice')
"""
assert self.reviewer._validate_hardcoded_run_data(code) is None
def test_date_in_selector_flagged(self) -> None:
code = """
async def block_fn(page, context):
await page.click(selector='a:has-text("03/17/2026")', ai='fallback', prompt='click invoice')
"""
error = self.reviewer._validate_hardcoded_run_data(code)
assert error is not None
assert "03/17/2026" in error
assert "date" in error.lower()
def test_iso_date_in_selector_flagged(self) -> None:
code = """
async def block_fn(page, context):
await page.click(selector='td:has-text("2026-03-17")', ai='fallback', prompt='click invoice')
"""
error = self.reviewer._validate_hardcoded_run_data(code)
assert error is not None
assert "2026-03-17" in error
def test_date_in_prompt_flagged(self) -> None:
code = """
async def block_fn(page, context):
await page.select_option(selector='select', value='PDF', ai='proactive', prompt='Select format for invoice dated 3/17/2026')
"""
error = self.reviewer._validate_hardcoded_run_data(code)
assert error is not None
assert "3/17/2026" in error
assert "prompt" in error.lower()
def test_short_has_text_flagged(self) -> None:
"""a:has-text("6") is almost certainly hardcoded run data."""
code = """
async def block_fn(page, context):
await page.click(selector='a:has-text("6")', ai='fallback', prompt='click invoice')
"""
error = self.reviewer._validate_hardcoded_run_data(code)
assert error is not None
assert ':has-text("6")' in error
def test_short_has_text_number_flagged(self) -> None:
code = """
async def block_fn(page, context):
await page.click(selector='a:has-text("12")', ai='fallback', prompt='click row')
"""
error = self.reviewer._validate_hardcoded_run_data(code)
assert error is not None
def test_stable_short_text_ok(self) -> None:
"""Common UI elements with short text like 'OK' or 'X' should pass."""
code = """
async def block_fn(page, context):
await page.click(selector='button:has-text("OK")', ai='fallback', prompt='confirm')
"""
assert self.reviewer._validate_hardcoded_run_data(code) is None
def test_long_has_text_ok(self) -> None:
""":has-text with meaningful text (3+ chars) should pass."""
code = """
async def block_fn(page, context):
await page.click(selector='a:has-text("Download")', ai='fallback', prompt='download')
"""
assert self.reviewer._validate_hardcoded_run_data(code) is None
def test_comments_ignored(self) -> None:
code = """
async def block_fn(page, context):
# Don't use selector='a:has-text("6")' — it's hardcoded
await page.click(selector='a:has-text("Download")', ai='fallback', prompt='download')
"""
assert self.reviewer._validate_hardcoded_run_data(code) is None
def test_multiline_date_in_selector_flagged(self) -> None:
"""Hardcoded date on a continuation line should still be caught."""
code = """
async def block_fn(page, context):
await page.click(
selector='a:has-text("03/17/2026")',
ai='fallback',
prompt='click invoice',
)
"""
error = self.reviewer._validate_hardcoded_run_data(code)
assert error is not None
assert "03/17/2026" in error
def test_multiline_date_in_prompt_flagged(self) -> None:
"""Hardcoded date in prompt on a continuation line should still be caught."""
code = """
async def block_fn(page, context):
await page.select_option(
selector='select',
value='PDF',
ai='proactive',
prompt='Select format for invoice dated 3/17/2026',
)
"""
error = self.reviewer._validate_hardcoded_run_data(code)
assert error is not None
assert "3/17/2026" in error
def test_parameterized_date_ok(self) -> None:
"""Dates in context.parameters references should not trigger."""
code = """
async def block_fn(page, context):
start_date = context.parameters['download_start_date']
await page.click(selector=f'a:has-text("{start_date}")', ai='fallback', prompt='click date')
"""
# f-string selectors won't match the date regex in the selector value
assert self.reviewer._validate_hardcoded_run_data(code) is None

309
uv.lock generated
View file

@ -2,13 +2,19 @@ version = 1
revision = 3 revision = 3
requires-python = ">=3.11, <3.14" requires-python = ">=3.11, <3.14"
resolution-markers = [ resolution-markers = [
"python_full_version >= '3.13'", "python_full_version >= '3.13' and sys_platform == 'win32'",
"python_full_version == '3.12.*'", "python_full_version >= '3.13' and sys_platform == 'emscripten'",
"python_full_version < '3.12'", "python_full_version >= '3.13' and sys_platform != 'emscripten' and sys_platform != 'win32'",
"python_full_version == '3.12.*' and sys_platform == 'win32'",
"python_full_version == '3.12.*' and sys_platform == 'emscripten'",
"python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'win32'",
"python_full_version < '3.12' and sys_platform == 'win32'",
"python_full_version < '3.12' and sys_platform == 'emscripten'",
"python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'",
] ]
[options] [options]
exclude-newer = "2026-03-24T13:53:47.750565Z" exclude-newer = "2026-03-26T19:14:39.897426Z"
exclude-newer-span = "P7D" exclude-newer-span = "P7D"
[manifest] [manifest]
@ -658,16 +664,16 @@ wheels = [
[[package]] [[package]]
name = "build" name = "build"
version = "1.4.0" version = "1.4.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "colorama", marker = "os_name == 'nt'" }, { name = "colorama", marker = "os_name == 'nt'" },
{ name = "packaging" }, { name = "packaging" },
{ name = "pyproject-hooks" }, { name = "pyproject-hooks" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/42/18/94eaffda7b329535d91f00fe605ab1f1e5cd68b2074d03f255c7d250687d/build-1.4.0.tar.gz", hash = "sha256:f1b91b925aa322be454f8330c6fb48b465da993d1e7e7e6fa35027ec49f3c936", size = 50054, upload-time = "2026-01-08T16:41:47.696Z" } sdist = { url = "https://files.pythonhosted.org/packages/6c/1d/ab15c8ac57f4ee8778d7633bc6685f808ab414437b8644f555389cdc875e/build-1.4.2.tar.gz", hash = "sha256:35b14e1ee329c186d3f08466003521ed7685ec15ecffc07e68d706090bf161d1", size = 83433, upload-time = "2026-03-25T14:20:27.659Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/c5/0d/84a4380f930db0010168e0aa7b7a8fed9ba1835a8fbb1472bc6d0201d529/build-1.4.0-py3-none-any.whl", hash = "sha256:6a07c1b8eb6f2b311b96fcbdbce5dab5fe637ffda0fd83c9cac622e927501596", size = 24141, upload-time = "2026-01-08T16:41:46.453Z" }, { url = "https://files.pythonhosted.org/packages/4a/57/3b7d4dd193ade4641c865bc2b93aeeb71162e81fc348b8dad020215601ed/build-1.4.2-py3-none-any.whl", hash = "sha256:7a4d8651ea877cb2a89458b1b198f2e69f536c95e89129dbf5d448045d60db88", size = 24643, upload-time = "2026-03-25T14:20:26.568Z" },
] ]
[[package]] [[package]]
@ -768,7 +774,7 @@ wheels = [
[[package]] [[package]]
name = "cfn-lint" name = "cfn-lint"
version = "1.47.0" version = "1.47.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "aws-sam-translator" }, { name = "aws-sam-translator" },
@ -779,9 +785,9 @@ dependencies = [
{ name = "sympy" }, { name = "sympy" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/61/f2/67074ab011d47e973b57e1efb6adf798fbaab3873b9a3edcf6f8b9c33639/cfn_lint-1.47.0.tar.gz", hash = "sha256:29a9377febd6c8fa30903d437a010bcf042426b220d009101c6cf8523819291f", size = 3673549, upload-time = "2026-03-17T17:14:45.95Z" } sdist = { url = "https://files.pythonhosted.org/packages/10/34/e66811016e7709cab78b0cf896437b922d7537986ac727344663b6cc2044/cfn_lint-1.47.1.tar.gz", hash = "sha256:b2eedbcee3aa104602f79933e3ad74c01f0fa1e226b70327118926fd78d8d3f1", size = 3672271, upload-time = "2026-03-24T15:59:34.526Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/16/2e/c552923e89004a268f94520de836525523132dd76ec52d3476a076d9dda2/cfn_lint-1.47.0-py3-none-any.whl", hash = "sha256:1148f37f2733cdb361897746eda903b5414ad5dd7e5df9ea634ac9f6c67975c9", size = 5281656, upload-time = "2026-03-17T17:14:43.607Z" }, { url = "https://files.pythonhosted.org/packages/a5/88/19802ef0e1ef6259c4bc4b58226c0e7ff8b7ae93806ca32354c007e3480a/cfn_lint-1.47.1-py3-none-any.whl", hash = "sha256:3a4b5dba0fd03c24f2bc0e112a88ad90fa29014971e881b8f1e297d22f398a97", size = 5299292, upload-time = "2026-03-24T15:59:31.86Z" },
] ]
[[package]] [[package]]
@ -886,47 +892,47 @@ wheels = [
[[package]] [[package]]
name = "cryptography" name = "cryptography"
version = "46.0.5" version = "46.0.6"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, { name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } sdist = { url = "https://files.pythonhosted.org/packages/a4/ba/04b1bd4218cbc58dc90ce967106d51582371b898690f3ae0402876cc4f34/cryptography-46.0.6.tar.gz", hash = "sha256:27550628a518c5c6c903d84f637fbecf287f6cb9ced3804838a1295dc1fd0759", size = 750542, upload-time = "2026-03-25T23:34:53.396Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" }, { url = "https://files.pythonhosted.org/packages/47/23/9285e15e3bc57325b0a72e592921983a701efc1ee8f91c06c5f0235d86d9/cryptography-46.0.6-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:64235194bad039a10bb6d2d930ab3323baaec67e2ce36215fd0952fad0930ca8", size = 7176401, upload-time = "2026-03-25T23:33:22.096Z" },
{ url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, { url = "https://files.pythonhosted.org/packages/60/f8/e61f8f13950ab6195b31913b42d39f0f9afc7d93f76710f299b5ec286ae6/cryptography-46.0.6-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:26031f1e5ca62fcb9d1fcb34b2b60b390d1aacaa15dc8b895a9ed00968b97b30", size = 4275275, upload-time = "2026-03-25T23:33:23.844Z" },
{ url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, { url = "https://files.pythonhosted.org/packages/19/69/732a736d12c2631e140be2348b4ad3d226302df63ef64d30dfdb8db7ad1c/cryptography-46.0.6-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9a693028b9cbe51b5a1136232ee8f2bc242e4e19d456ded3fa7c86e43c713b4a", size = 4425320, upload-time = "2026-03-25T23:33:25.703Z" },
{ url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, { url = "https://files.pythonhosted.org/packages/d4/12/123be7292674abf76b21ac1fc0e1af50661f0e5b8f0ec8285faac18eb99e/cryptography-46.0.6-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:67177e8a9f421aa2d3a170c3e56eca4e0128883cf52a071a7cbf53297f18b175", size = 4278082, upload-time = "2026-03-25T23:33:27.423Z" },
{ url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, { url = "https://files.pythonhosted.org/packages/5b/ba/d5e27f8d68c24951b0a484924a84c7cdaed7502bac9f18601cd357f8b1d2/cryptography-46.0.6-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:d9528b535a6c4f8ff37847144b8986a9a143585f0540fbcb1a98115b543aa463", size = 4926514, upload-time = "2026-03-25T23:33:29.206Z" },
{ url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, { url = "https://files.pythonhosted.org/packages/34/71/1ea5a7352ae516d5512d17babe7e1b87d9db5150b21f794b1377eac1edc0/cryptography-46.0.6-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:22259338084d6ae497a19bae5d4c66b7ca1387d3264d1c2c0e72d9e9b6a77b97", size = 4457766, upload-time = "2026-03-25T23:33:30.834Z" },
{ url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, { url = "https://files.pythonhosted.org/packages/01/59/562be1e653accee4fdad92c7a2e88fced26b3fdfce144047519bbebc299e/cryptography-46.0.6-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:760997a4b950ff00d418398ad73fbc91aa2894b5c1db7ccb45b4f68b42a63b3c", size = 3986535, upload-time = "2026-03-25T23:33:33.02Z" },
{ url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, { url = "https://files.pythonhosted.org/packages/d6/8b/b1ebfeb788bf4624d36e45ed2662b8bd43a05ff62157093c1539c1288a18/cryptography-46.0.6-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:3dfa6567f2e9e4c5dceb8ccb5a708158a2a871052fa75c8b78cb0977063f1507", size = 4277618, upload-time = "2026-03-25T23:33:34.567Z" },
{ url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, { url = "https://files.pythonhosted.org/packages/dd/52/a005f8eabdb28df57c20f84c44d397a755782d6ff6d455f05baa2785bd91/cryptography-46.0.6-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:cdcd3edcbc5d55757e5f5f3d330dd00007ae463a7e7aa5bf132d1f22a4b62b19", size = 4890802, upload-time = "2026-03-25T23:33:37.034Z" },
{ url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, { url = "https://files.pythonhosted.org/packages/ec/4d/8e7d7245c79c617d08724e2efa397737715ca0ec830ecb3c91e547302555/cryptography-46.0.6-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:d4e4aadb7fc1f88687f47ca20bb7227981b03afaae69287029da08096853b738", size = 4457425, upload-time = "2026-03-25T23:33:38.904Z" },
{ url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, { url = "https://files.pythonhosted.org/packages/1d/5c/f6c3596a1430cec6f949085f0e1a970638d76f81c3ea56d93d564d04c340/cryptography-46.0.6-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2b417edbe8877cda9022dde3a008e2deb50be9c407eef034aeeb3a8b11d9db3c", size = 4405530, upload-time = "2026-03-25T23:33:40.842Z" },
{ url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, { url = "https://files.pythonhosted.org/packages/7e/c9/9f9cea13ee2dbde070424e0c4f621c091a91ffcc504ffea5e74f0e1daeff/cryptography-46.0.6-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:380343e0653b1c9d7e1f55b52aaa2dbb2fdf2730088d48c43ca1c7c0abb7cc2f", size = 4667896, upload-time = "2026-03-25T23:33:42.781Z" },
{ url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, { url = "https://files.pythonhosted.org/packages/ad/b5/1895bc0821226f129bc74d00eccfc6a5969e2028f8617c09790bf89c185e/cryptography-46.0.6-cp311-abi3-win32.whl", hash = "sha256:bcb87663e1f7b075e48c3be3ecb5f0b46c8fc50b50a97cf264e7f60242dca3f2", size = 3026348, upload-time = "2026-03-25T23:33:45.021Z" },
{ url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, { url = "https://files.pythonhosted.org/packages/c3/f8/c9bcbf0d3e6ad288b9d9aa0b1dee04b063d19e8c4f871855a03ab3a297ab/cryptography-46.0.6-cp311-abi3-win_amd64.whl", hash = "sha256:6739d56300662c468fddb0e5e291f9b4d084bead381667b9e654c7dd81705124", size = 3483896, upload-time = "2026-03-25T23:33:46.649Z" },
{ url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, { url = "https://files.pythonhosted.org/packages/c4/cc/f330e982852403da79008552de9906804568ae9230da8432f7496ce02b71/cryptography-46.0.6-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:12cae594e9473bca1a7aceb90536060643128bb274fcea0fc459ab90f7d1ae7a", size = 7162776, upload-time = "2026-03-25T23:34:13.308Z" },
{ url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, { url = "https://files.pythonhosted.org/packages/49/b3/dc27efd8dcc4bff583b3f01d4a3943cd8b5821777a58b3a6a5f054d61b79/cryptography-46.0.6-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:639301950939d844a9e1c4464d7e07f902fe9a7f6b215bb0d4f28584729935d8", size = 4270529, upload-time = "2026-03-25T23:34:15.019Z" },
{ url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, { url = "https://files.pythonhosted.org/packages/e6/05/e8d0e6eb4f0d83365b3cb0e00eb3c484f7348db0266652ccd84632a3d58d/cryptography-46.0.6-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ed3775295fb91f70b4027aeba878d79b3e55c0b3e97eaa4de71f8f23a9f2eb77", size = 4414827, upload-time = "2026-03-25T23:34:16.604Z" },
{ url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, { url = "https://files.pythonhosted.org/packages/2f/97/daba0f5d2dc6d855e2dcb70733c812558a7977a55dd4a6722756628c44d1/cryptography-46.0.6-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8927ccfbe967c7df312ade694f987e7e9e22b2425976ddbf28271d7e58845290", size = 4271265, upload-time = "2026-03-25T23:34:18.586Z" },
{ url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, { url = "https://files.pythonhosted.org/packages/89/06/fe1fce39a37ac452e58d04b43b0855261dac320a2ebf8f5260dd55b201a9/cryptography-46.0.6-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:b12c6b1e1651e42ab5de8b1e00dc3b6354fdfd778e7fa60541ddacc27cd21410", size = 4916800, upload-time = "2026-03-25T23:34:20.561Z" },
{ url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, { url = "https://files.pythonhosted.org/packages/ff/8a/b14f3101fe9c3592603339eb5d94046c3ce5f7fc76d6512a2d40efd9724e/cryptography-46.0.6-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:063b67749f338ca9c5a0b7fe438a52c25f9526b851e24e6c9310e7195aad3b4d", size = 4448771, upload-time = "2026-03-25T23:34:22.406Z" },
{ url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, { url = "https://files.pythonhosted.org/packages/01/b3/0796998056a66d1973fd52ee89dc1bb3b6581960a91ad4ac705f182d398f/cryptography-46.0.6-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:02fad249cb0e090b574e30b276a3da6a149e04ee2f049725b1f69e7b8351ec70", size = 3978333, upload-time = "2026-03-25T23:34:24.281Z" },
{ url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, { url = "https://files.pythonhosted.org/packages/c5/3d/db200af5a4ffd08918cd55c08399dc6c9c50b0bc72c00a3246e099d3a849/cryptography-46.0.6-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:7e6142674f2a9291463e5e150090b95a8519b2fb6e6aaec8917dd8d094ce750d", size = 4271069, upload-time = "2026-03-25T23:34:25.895Z" },
{ url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, { url = "https://files.pythonhosted.org/packages/d7/18/61acfd5b414309d74ee838be321c636fe71815436f53c9f0334bf19064fa/cryptography-46.0.6-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:456b3215172aeefb9284550b162801d62f5f264a081049a3e94307fe20792cfa", size = 4878358, upload-time = "2026-03-25T23:34:27.67Z" },
{ url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, { url = "https://files.pythonhosted.org/packages/8b/65/5bf43286d566f8171917cae23ac6add941654ccf085d739195a4eacf1674/cryptography-46.0.6-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:341359d6c9e68834e204ceaf25936dffeafea3829ab80e9503860dcc4f4dac58", size = 4448061, upload-time = "2026-03-25T23:34:29.375Z" },
{ url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, { url = "https://files.pythonhosted.org/packages/e0/25/7e49c0fa7205cf3597e525d156a6bce5b5c9de1fd7e8cb01120e459f205a/cryptography-46.0.6-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9a9c42a2723999a710445bc0d974e345c32adfd8d2fac6d8a251fa829ad31cfb", size = 4399103, upload-time = "2026-03-25T23:34:32.036Z" },
{ url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, { url = "https://files.pythonhosted.org/packages/44/46/466269e833f1c4718d6cd496ffe20c56c9c8d013486ff66b4f69c302a68d/cryptography-46.0.6-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6617f67b1606dfd9fe4dbfa354a9508d4a6d37afe30306fe6c101b7ce3274b72", size = 4659255, upload-time = "2026-03-25T23:34:33.679Z" },
{ url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" }, { url = "https://files.pythonhosted.org/packages/0a/09/ddc5f630cc32287d2c953fc5d32705e63ec73e37308e5120955316f53827/cryptography-46.0.6-cp38-abi3-win32.whl", hash = "sha256:7f6690b6c55e9c5332c0b59b9c8a3fb232ebf059094c17f9019a51e9827df91c", size = 3010660, upload-time = "2026-03-25T23:34:35.418Z" },
{ url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" }, { url = "https://files.pythonhosted.org/packages/1b/82/ca4893968aeb2709aacfb57a30dec6fa2ab25b10fa9f064b8882ce33f599/cryptography-46.0.6-cp38-abi3-win_amd64.whl", hash = "sha256:79e865c642cfc5c0b3eb12af83c35c5aeff4fa5c672dc28c43721c2c9fdd2f0f", size = 3471160, upload-time = "2026-03-25T23:34:37.191Z" },
{ url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" }, { url = "https://files.pythonhosted.org/packages/2e/84/7ccff00ced5bac74b775ce0beb7d1be4e8637536b522b5df9b73ada42da2/cryptography-46.0.6-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:2ea0f37e9a9cf0df2952893ad145fd9627d326a59daec9b0802480fa3bcd2ead", size = 3475444, upload-time = "2026-03-25T23:34:38.944Z" },
{ url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" }, { url = "https://files.pythonhosted.org/packages/bc/1f/4c926f50df7749f000f20eede0c896769509895e2648db5da0ed55db711d/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a3e84d5ec9ba01f8fd03802b2147ba77f0c8f2617b2aff254cedd551844209c8", size = 4218227, upload-time = "2026-03-25T23:34:40.871Z" },
{ url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" }, { url = "https://files.pythonhosted.org/packages/c6/65/707be3ffbd5f786028665c3223e86e11c4cda86023adbc56bd72b1b6bab5/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:12f0fa16cc247b13c43d56d7b35287ff1569b5b1f4c5e87e92cc4fcc00cd10c0", size = 4381399, upload-time = "2026-03-25T23:34:42.609Z" },
{ url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" }, { url = "https://files.pythonhosted.org/packages/f3/6d/73557ed0ef7d73d04d9aba745d2c8e95218213687ee5e76b7d236a5030fc/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:50575a76e2951fe7dbd1f56d181f8c5ceeeb075e9ff88e7ad997d2f42af06e7b", size = 4217595, upload-time = "2026-03-25T23:34:44.205Z" },
{ url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" }, { url = "https://files.pythonhosted.org/packages/9e/c5/e1594c4eec66a567c3ac4400008108a415808be2ce13dcb9a9045c92f1a0/cryptography-46.0.6-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:90e5f0a7b3be5f40c3a0a0eafb32c681d8d2c181fc2a1bdabe9b3f611d9f6b1a", size = 4380912, upload-time = "2026-03-25T23:34:46.328Z" },
{ url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" }, { url = "https://files.pythonhosted.org/packages/1a/89/843b53614b47f97fe1abc13f9a86efa5ec9e275292c457af1d4a60dc80e0/cryptography-46.0.6-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6728c49e3b2c180ef26f8e9f0a883a2c585638db64cf265b49c9ba10652d430e", size = 3409955, upload-time = "2026-03-25T23:34:48.465Z" },
] ]
[[package]] [[package]]
@ -1416,7 +1422,7 @@ wheels = [
[[package]] [[package]]
name = "google-cloud-aiplatform" name = "google-cloud-aiplatform"
version = "1.142.0" version = "1.143.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "docstring-parser" }, { name = "docstring-parser" },
@ -1432,9 +1438,9 @@ dependencies = [
{ name = "pydantic" }, { name = "pydantic" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/41/0d/3063a0512d60cf18854a279e00ccb796429545464345ef821cf77cb93d05/google_cloud_aiplatform-1.142.0.tar.gz", hash = "sha256:87b49e002703dc14885093e9b264587db84222bef5f70f5a442d03f41beecdd1", size = 10207993, upload-time = "2026-03-20T22:49:13.797Z" } sdist = { url = "https://files.pythonhosted.org/packages/a7/08/939fb05870fdf155410a927e22f5b053d49f18e215618e102fba1d8bb147/google_cloud_aiplatform-1.143.0.tar.gz", hash = "sha256:1f0124a89795a6b473deb28724dd37d95334205df3a9c9c48d0b8d7a3d5d5cc4", size = 10215389, upload-time = "2026-03-25T18:30:15.444Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/59/8b/f29646d3fa940f0e38cfcc12137f4851856b50d7486a3c05103ebc78d82d/google_cloud_aiplatform-1.142.0-py2.py3-none-any.whl", hash = "sha256:17c91db9b613cbbafb2c36335b123686aeb2b4b8448be5134b565ae07165a39a", size = 8388991, upload-time = "2026-03-20T22:49:10.334Z" }, { url = "https://files.pythonhosted.org/packages/90/14/16323e604e79dc63b528268f97a841c2c29dd8eb16395de6bf530c1a5ebe/google_cloud_aiplatform-1.143.0-py2.py3-none-any.whl", hash = "sha256:78df97d044859f743a9cc48b89a260d33579b0d548b1589bb3ae9f4c2afc0c5a", size = 8392705, upload-time = "2026-03-25T18:30:11.496Z" },
] ]
[[package]] [[package]]
@ -1591,7 +1597,9 @@ name = "greenlet"
version = "3.0.3" version = "3.0.3"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
resolution-markers = [ resolution-markers = [
"python_full_version < '3.12'", "python_full_version < '3.12' and sys_platform == 'win32'",
"python_full_version < '3.12' and sys_platform == 'emscripten'",
"python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'",
] ]
sdist = { url = "https://files.pythonhosted.org/packages/17/14/3bddb1298b9a6786539ac609ba4b7c9c0842e12aa73aaa4d8d73ec8f8185/greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491", size = 182013, upload-time = "2023-12-21T22:02:54.659Z" } sdist = { url = "https://files.pythonhosted.org/packages/17/14/3bddb1298b9a6786539ac609ba4b7c9c0842e12aa73aaa4d8d73ec8f8185/greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491", size = 182013, upload-time = "2023-12-21T22:02:54.659Z" }
wheels = [ wheels = [
@ -1620,8 +1628,12 @@ name = "greenlet"
version = "3.3.2" version = "3.3.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
resolution-markers = [ resolution-markers = [
"python_full_version >= '3.13'", "python_full_version >= '3.13' and sys_platform == 'win32'",
"python_full_version == '3.12.*'", "python_full_version >= '3.13' and sys_platform == 'emscripten'",
"python_full_version >= '3.13' and sys_platform != 'emscripten' and sys_platform != 'win32'",
"python_full_version == '3.12.*' and sys_platform == 'win32'",
"python_full_version == '3.12.*' and sys_platform == 'emscripten'",
"python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'win32'",
] ]
sdist = { url = "https://files.pythonhosted.org/packages/a3/51/1664f6b78fc6ebbd98019a1fd730e83fa78f2db7058f72b1463d3612b8db/greenlet-3.3.2.tar.gz", hash = "sha256:2eaf067fc6d886931c7962e8c6bede15d2f01965560f3359b27c80bde2d151f2", size = 188267, upload-time = "2026-02-20T20:54:15.531Z" } sdist = { url = "https://files.pythonhosted.org/packages/a3/51/1664f6b78fc6ebbd98019a1fd730e83fa78f2db7058f72b1463d3612b8db/greenlet-3.3.2.tar.gz", hash = "sha256:2eaf067fc6d886931c7962e8c6bede15d2f01965560f3359b27c80bde2d151f2", size = 188267, upload-time = "2026-02-20T20:54:15.531Z" }
wheels = [ wheels = [
@ -1904,7 +1916,7 @@ wheels = [
[[package]] [[package]]
name = "huggingface-hub" name = "huggingface-hub"
version = "1.7.2" version = "1.8.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "filelock" }, { name = "filelock" },
@ -1917,9 +1929,9 @@ dependencies = [
{ name = "typer" }, { name = "typer" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/19/15/eafc1c57bf0f8afffb243dcd4c0cceb785e956acc17bba4d9bf2ae21fc9c/huggingface_hub-1.7.2.tar.gz", hash = "sha256:7f7e294e9bbb822e025bdb2ada025fa4344d978175a7f78e824d86e35f7ab43b", size = 724684, upload-time = "2026-03-20T10:36:08.767Z" } sdist = { url = "https://files.pythonhosted.org/packages/8e/2a/a847fd02261cd051da218baf99f90ee7c7040c109a01833db4f838f25256/huggingface_hub-1.8.0.tar.gz", hash = "sha256:c5627b2fd521e00caf8eff4ac965ba988ea75167fad7ee72e17f9b7183ec63f3", size = 735839, upload-time = "2026-03-25T16:01:28.152Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/08/de/3ad061a05f74728927ded48c90b73521b9a9328c85d841bdefb30e01fb85/huggingface_hub-1.7.2-py3-none-any.whl", hash = "sha256:288f33a0a17b2a73a1359e2a5fd28d1becb2c121748c6173ab8643fb342c850e", size = 618036, upload-time = "2026-03-20T10:36:06.824Z" }, { url = "https://files.pythonhosted.org/packages/a9/ae/8a3a16ea4d202cb641b51d2681bdd3d482c1c592d7570b3fa264730829ce/huggingface_hub-1.8.0-py3-none-any.whl", hash = "sha256:d3eb5047bd4e33c987429de6020d4810d38a5bef95b3b40df9b17346b7f353f2", size = 625208, upload-time = "2026-03-25T16:01:26.603Z" },
] ]
[[package]] [[package]]
@ -1975,7 +1987,7 @@ wheels = [
[[package]] [[package]]
name = "ipykernel" name = "ipykernel"
version = "6.31.0" version = "7.2.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "appnope", marker = "sys_platform == 'darwin'" }, { name = "appnope", marker = "sys_platform == 'darwin'" },
@ -1992,9 +2004,9 @@ dependencies = [
{ name = "tornado" }, { name = "tornado" },
{ name = "traitlets" }, { name = "traitlets" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/a5/1d/d5ba6edbfe6fae4c3105bca3a9c889563cc752c7f2de45e333164c7f4846/ipykernel-6.31.0.tar.gz", hash = "sha256:2372ce8bc1ff4f34e58cafed3a0feb2194b91fc7cad0fc72e79e47b45ee9e8f6", size = 167493, upload-time = "2025-10-20T11:42:39.948Z" } sdist = { url = "https://files.pythonhosted.org/packages/ca/8d/b68b728e2d06b9e0051019640a40a9eb7a88fcd82c2e1b5ce70bef5ff044/ipykernel-7.2.0.tar.gz", hash = "sha256:18ed160b6dee2cbb16e5f3575858bc19d8f1fe6046a9a680c708494ce31d909e", size = 176046, upload-time = "2026-02-06T16:43:27.403Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/f6/d8/502954a4ec0efcf264f99b65b41c3c54e65a647d9f0d6f62cd02227d242c/ipykernel-6.31.0-py3-none-any.whl", hash = "sha256:abe5386f6ced727a70e0eb0cf1da801fa7c5fa6ff82147747d5a0406cd8c94af", size = 117003, upload-time = "2025-10-20T11:42:37.502Z" }, { url = "https://files.pythonhosted.org/packages/82/b9/e73d5d9f405cba7706c539aa8b311b49d4c2f3d698d9c12f815231169c71/ipykernel-7.2.0-py3-none-any.whl", hash = "sha256:3bbd4420d2b3cc105cbdf3756bfc04500b1e52f090a90716851f3916c62e1661", size = 118788, upload-time = "2026-02-06T16:43:25.149Z" },
] ]
[[package]] [[package]]
@ -3196,26 +3208,26 @@ wheels = [
[[package]] [[package]]
name = "nh3" name = "nh3"
version = "0.3.3" version = "0.3.4"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/cc/37/ab55eb2b05e334ff9a1ad52c556ace1f9c20a3f63613a165d384d5387657/nh3-0.3.3.tar.gz", hash = "sha256:185ed41b88c910b9ca8edc89ca3b4be688a12cb9de129d84befa2f74a0039fee", size = 18968, upload-time = "2026-02-14T09:35:15.664Z" } sdist = { url = "https://files.pythonhosted.org/packages/4e/86/f8d3a7c9bd1bbaa181f6312c757e0b74d25f71ecf84ea3c0dc5e0f01840d/nh3-0.3.4.tar.gz", hash = "sha256:96709a379997c1b28c8974146ca660b0dcd3794f4f6d50c1ea549bab39ac6ade", size = 19520, upload-time = "2026-03-25T10:57:30.789Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/13/3e/aef8cf8e0419b530c95e96ae93a5078e9b36c1e6613eeb1df03a80d5194e/nh3-0.3.3-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e8ee96156f7dfc6e30ecda650e480c5ae0a7d38f0c6fafc3c1c655e2500421d9", size = 1448640, upload-time = "2026-02-14T09:34:49.316Z" }, { url = "https://files.pythonhosted.org/packages/4a/57/a97955bc95960cfb1f0517043d60a121f4ba93fde252d4d9ffd3c2a9eead/nh3-0.3.4-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:d8bebcb20ab4b91858385cd98fe58046ec4a624275b45ef9b976475604f45b49", size = 1439519, upload-time = "2026-03-25T10:57:12.019Z" },
{ url = "https://files.pythonhosted.org/packages/ca/43/d2011a4f6c0272cb122eeff40062ee06bb2b6e57eabc3a5e057df0d582df/nh3-0.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45fe0d6a607264910daec30360c8a3b5b1500fd832d21b2da608256287bcb92d", size = 839405, upload-time = "2026-02-14T09:34:50.779Z" }, { url = "https://files.pythonhosted.org/packages/2b/60/c9a33361da8cde7c7760f091cd10467bc470634e4eea31c8bb70935b00a4/nh3-0.3.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d825722a1e8cbc87d7ca1e47ffb1d2a6cf343ad4c1b8465becf7cadcabcdfd0", size = 833798, upload-time = "2026-03-25T10:57:13.264Z" },
{ url = "https://files.pythonhosted.org/packages/f8/f3/965048510c1caf2a34ed04411a46a04a06eb05563cd06f1aa57b71eb2bc8/nh3-0.3.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5bc1d4b30ba1ba896669d944b6003630592665974bd11a3dc2f661bde92798a7", size = 825849, upload-time = "2026-02-14T09:34:52.622Z" }, { url = "https://files.pythonhosted.org/packages/6b/19/9487790780b8c94eacca37866c1270b747a4af8e244d43b3b550fddbbf62/nh3-0.3.4-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4aa8b43e68c26b68069a3b6cef09de166d1d7fa140cf8d77e409a46cbf742e44", size = 820414, upload-time = "2026-03-25T10:57:14.236Z" },
{ url = "https://files.pythonhosted.org/packages/78/99/b4bbc6ad16329d8db2c2c320423f00b549ca3b129c2b2f9136be2606dbb0/nh3-0.3.3-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f433a2dd66545aad4a720ad1b2150edcdca75bfff6f4e6f378ade1ec138d5e77", size = 1068303, upload-time = "2026-02-14T09:34:54.179Z" }, { url = "https://files.pythonhosted.org/packages/6b/b4/c6a340dd321d20b1e4a663307032741da045685c87403926c43656f6f5ec/nh3-0.3.4-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f5f214618ad5eff4f2a6b13a8d4da4d9e7f37c569d90a13fb9f0caaf7d04fe21", size = 1061531, upload-time = "2026-03-25T10:57:15.384Z" },
{ url = "https://files.pythonhosted.org/packages/3f/34/3420d97065aab1b35f3e93ce9c96c8ebd423ce86fe84dee3126790421a2a/nh3-0.3.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52e973cb742e95b9ae1b35822ce23992428750f4b46b619fe86eba4205255b30", size = 1029316, upload-time = "2026-02-14T09:34:56.186Z" }, { url = "https://files.pythonhosted.org/packages/c4/49/f6b4b474e0032e4bcbb7174b44e4cf6915670e09c62421deb06ccfcb88b8/nh3-0.3.4-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3390e4333883673a684ce16c1716b481e91782d6f56dec5c85fed9feedb23382", size = 1021889, upload-time = "2026-03-25T10:57:16.454Z" },
{ url = "https://files.pythonhosted.org/packages/f1/9a/99eda757b14e596fdb2ca5f599a849d9554181aa899274d0d183faef4493/nh3-0.3.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c730617bdc15d7092dcc0469dc2826b914c8f874996d105b4bc3842a41c1cd9", size = 919944, upload-time = "2026-02-14T09:34:57.886Z" }, { url = "https://files.pythonhosted.org/packages/43/da/e52a6941746d1f974752af3fc8591f1dbcdcf7fd8c726c7d99f444ba820e/nh3-0.3.4-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18a2e44ccb29cbb45071b8f3f2dab9ebfb41a6516f328f91f1f1fd18196239a4", size = 912965, upload-time = "2026-03-25T10:57:17.624Z" },
{ url = "https://files.pythonhosted.org/packages/6f/84/c0dc75c7fb596135f999e59a410d9f45bdabb989f1cb911f0016d22b747b/nh3-0.3.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e98fa3dbfd54e25487e36ba500bc29bca3a4cab4ffba18cfb1a35a2d02624297", size = 811461, upload-time = "2026-02-14T09:34:59.65Z" }, { url = "https://files.pythonhosted.org/packages/d6/b7/ec1cbc6b297a808c513f59f501656389623fc09ad6a58c640851289c7854/nh3-0.3.4-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0961a27dc2057c38d0364cb05880e1997ae1c80220cbc847db63213720b8f304", size = 804975, upload-time = "2026-03-25T10:57:18.994Z" },
{ url = "https://files.pythonhosted.org/packages/7e/ec/b1bf57cab6230eec910e4863528dc51dcf21b57aaf7c88ee9190d62c9185/nh3-0.3.3-cp38-abi3-manylinux_2_31_riscv64.whl", hash = "sha256:3a62b8ae7c235481715055222e54c682422d0495a5c73326807d4e44c5d14691", size = 840360, upload-time = "2026-02-14T09:35:01.444Z" }, { url = "https://files.pythonhosted.org/packages/a9/56/b1275aa2c6510191eed76178da4626b0900402439cb9f27d6b9bf7c6d5e9/nh3-0.3.4-cp38-abi3-manylinux_2_31_riscv64.whl", hash = "sha256:9337517edb7c10228252cce2898e20fb3d77e32ffaccbb3c66897927d74215a0", size = 833400, upload-time = "2026-03-25T10:57:20.086Z" },
{ url = "https://files.pythonhosted.org/packages/37/5e/326ae34e904dde09af1de51219a611ae914111f0970f2f111f4f0188f57e/nh3-0.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc305a2264868ec8fa16548296f803d8fd9c1fa66cd28b88b605b1bd06667c0b", size = 859872, upload-time = "2026-02-14T09:35:03.348Z" }, { url = "https://files.pythonhosted.org/packages/7c/a5/5d574ffa3c6e49a5364d1b25ebad165501c055340056671493beb467a15e/nh3-0.3.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d866701affe67a5171b916b5c076e767a74c6a9efb7fb2006eb8d3c5f9a293d5", size = 854277, upload-time = "2026-03-25T10:57:21.433Z" },
{ url = "https://files.pythonhosted.org/packages/09/38/7eba529ce17ab4d3790205da37deabb4cb6edcba15f27b8562e467f2fc97/nh3-0.3.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:90126a834c18af03bfd6ff9a027bfa6bbf0e238527bc780a24de6bd7cc1041e2", size = 1023550, upload-time = "2026-02-14T09:35:04.829Z" }, { url = "https://files.pythonhosted.org/packages/79/36/8aeb2ab21517cefa212db109e41024e02650716cb42bf293d0a88437a92d/nh3-0.3.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:47d749d99ae005ab19517224140b280dd56e77b33afb82f9b600e106d0458003", size = 1022021, upload-time = "2026-03-25T10:57:22.433Z" },
{ url = "https://files.pythonhosted.org/packages/05/a2/556fdecd37c3681b1edee2cf795a6799c6ed0a5551b2822636960d7e7651/nh3-0.3.3-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:24769a428e9e971e4ccfb24628f83aaa7dc3c8b41b130c8ddc1835fa1c924489", size = 1105212, upload-time = "2026-02-14T09:35:06.821Z" }, { url = "https://files.pythonhosted.org/packages/9c/95/9fd860997685e64abe2d5a995ca2eb5004c0fb6d6585429612a7871548b9/nh3-0.3.4-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:f987cb56458323405e8e5ea827e1befcf141ffa0c0ac797d6d02e6b646056d9a", size = 1103526, upload-time = "2026-03-25T10:57:23.487Z" },
{ url = "https://files.pythonhosted.org/packages/dd/e3/5db0b0ad663234967d83702277094687baf7c498831a2d3ad3451c11770f/nh3-0.3.3-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:b7a18ee057761e455d58b9d31445c3e4b2594cff4ddb84d2e331c011ef46f462", size = 1069970, upload-time = "2026-02-14T09:35:08.504Z" }, { url = "https://files.pythonhosted.org/packages/7d/0d/df545070614c1007f0109bb004230226c9000e7857c9785583ec25cda9d7/nh3-0.3.4-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:883d5a6d6ee8078c4afc8e96e022fe579c4c265775ff6ee21e39b8c542cabab3", size = 1068050, upload-time = "2026-03-25T10:57:24.624Z" },
{ url = "https://files.pythonhosted.org/packages/79/b2/2ea21b79c6e869581ce5f51549b6e185c4762233591455bf2a326fb07f3b/nh3-0.3.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5a4b2c1f3e6f3cbe7048e17f4fefad3f8d3e14cc0fd08fb8599e0d5653f6b181", size = 1047588, upload-time = "2026-02-14T09:35:09.911Z" }, { url = "https://files.pythonhosted.org/packages/94/d5/17b016df52df052f714c53be71df26a1943551d9931e9383b92c998b88f8/nh3-0.3.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:75643c22f5092d8e209f766ee8108c400bc1e44760fc94d2d638eb138d18f853", size = 1046037, upload-time = "2026-03-25T10:57:25.799Z" },
{ url = "https://files.pythonhosted.org/packages/e2/92/2e434619e658c806d9c096eed2cdff9a883084299b7b19a3f0824eb8e63d/nh3-0.3.3-cp38-abi3-win32.whl", hash = "sha256:e974850b131fdffa75e7ad8e0d9c7a855b96227b093417fdf1bd61656e530f37", size = 616179, upload-time = "2026-02-14T09:35:11.366Z" }, { url = "https://files.pythonhosted.org/packages/51/39/49f737907e6ab2b4ca71855d3bd63dd7958862e9c8b94fb4e5b18ccf6988/nh3-0.3.4-cp38-abi3-win32.whl", hash = "sha256:72e4e9ca1c4bd41b4a28b0190edc2e21e3f71496acd36a0162858e1a28db3d7e", size = 609542, upload-time = "2026-03-25T10:57:27.112Z" },
{ url = "https://files.pythonhosted.org/packages/73/88/1ce287ef8649dc51365b5094bd3713b76454838140a32ab4f8349973883c/nh3-0.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:2efd17c0355d04d39e6d79122b42662277ac10a17ea48831d90b46e5ef7e4fc0", size = 631159, upload-time = "2026-02-14T09:35:12.77Z" }, { url = "https://files.pythonhosted.org/packages/73/4f/af8e9071d7464575a7316831938237ffc9d92d27f163dbdd964b1309cd9b/nh3-0.3.4-cp38-abi3-win_amd64.whl", hash = "sha256:c10b1f0c741e257a5cb2978d6bac86e7c784ab20572724b20c6402c2e24bce75", size = 624244, upload-time = "2026-03-25T10:57:28.302Z" },
{ url = "https://files.pythonhosted.org/packages/31/f1/b4835dbde4fb06f29db89db027576d6014081cd278d9b6751facc3e69e43/nh3-0.3.3-cp38-abi3-win_arm64.whl", hash = "sha256:b838e619f483531483d26d889438e53a880510e832d2aafe73f93b7b1ac2bce2", size = 616645, upload-time = "2026-02-14T09:35:14.062Z" }, { url = "https://files.pythonhosted.org/packages/44/0c/37695d6b0168f6714b5c492331636a9e6123d6ec22d25876c68d06eab1b8/nh3-0.3.4-cp38-abi3-win_arm64.whl", hash = "sha256:43ad4eedee7e049b9069bc015b7b095d320ed6d167ecec111f877de1540656e9", size = 616649, upload-time = "2026-03-25T10:57:29.623Z" },
] ]
[[package]] [[package]]
@ -3350,7 +3362,7 @@ wheels = [
[[package]] [[package]]
name = "openai" name = "openai"
version = "2.29.0" version = "2.30.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "anyio" }, { name = "anyio" },
@ -3362,9 +3374,9 @@ dependencies = [
{ name = "tqdm" }, { name = "tqdm" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/b4/15/203d537e58986b5673e7f232453a2a2f110f22757b15921cbdeea392e520/openai-2.29.0.tar.gz", hash = "sha256:32d09eb2f661b38d3edd7d7e1a2943d1633f572596febe64c0cd370c86d52bec", size = 671128, upload-time = "2026-03-17T17:53:49.599Z" } sdist = { url = "https://files.pythonhosted.org/packages/88/15/52580c8fbc16d0675d516e8749806eda679b16de1e4434ea06fb6feaa610/openai-2.30.0.tar.gz", hash = "sha256:92f7661c990bda4b22a941806c83eabe4896c3094465030dd882a71abe80c885", size = 676084, upload-time = "2026-03-25T22:08:59.96Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/d0/b1/35b6f9c8cf9318e3dbb7146cc82dab4cf61182a8d5406fc9b50864362895/openai-2.29.0-py3-none-any.whl", hash = "sha256:b7c5de513c3286d17c5e29b92c4c98ceaf0d775244ac8159aeb1bddf840eb42a", size = 1141533, upload-time = "2026-03-17T17:53:47.348Z" }, { url = "https://files.pythonhosted.org/packages/2a/9e/5bfa2270f902d5b92ab7d41ce0475b8630572e71e349b2a4996d14bdda93/openai-2.30.0-py3-none-any.whl", hash = "sha256:9a5ae616888eb2748ec5e0c5b955a51592e0b201a11f4262db920f2a78c5231d", size = 1146656, upload-time = "2026-03-25T22:08:58.2Z" },
] ]
[[package]] [[package]]
@ -3823,43 +3835,46 @@ wheels = [
[[package]] [[package]]
name = "pandas" name = "pandas"
version = "2.3.3" version = "3.0.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "numpy" }, { name = "numpy" },
{ name = "python-dateutil" }, { name = "python-dateutil" },
{ name = "pytz" }, { name = "tzdata", marker = "sys_platform == 'emscripten' or sys_platform == 'win32'" },
{ name = "tzdata" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/33/01/d40b85317f86cf08d853a4f495195c73815fdf205eef3993821720274518/pandas-2.3.3.tar.gz", hash = "sha256:e05e1af93b977f7eafa636d043f9f94c7ee3ac81af99c13508215942e64c993b", size = 4495223, upload-time = "2025-09-29T23:34:51.853Z" } sdist = { url = "https://files.pythonhosted.org/packages/2e/0c/b28ed414f080ee0ad153f848586d61d1878f91689950f037f976ce15f6c8/pandas-3.0.1.tar.gz", hash = "sha256:4186a699674af418f655dbd420ed87f50d56b4cd6603784279d9eef6627823c8", size = 4641901, upload-time = "2026-02-17T22:20:16.434Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/c1/fa/7ac648108144a095b4fb6aa3de1954689f7af60a14cf25583f4960ecb878/pandas-2.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:602b8615ebcc4a0c1751e71840428ddebeb142ec02c786e8ad6b1ce3c8dec523", size = 11578790, upload-time = "2025-09-29T23:18:30.065Z" }, { url = "https://files.pythonhosted.org/packages/ff/07/c7087e003ceee9b9a82539b40414ec557aa795b584a1a346e89180853d79/pandas-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:de09668c1bf3b925c07e5762291602f0d789eca1b3a781f99c1c78f6cac0e7ea", size = 10323380, upload-time = "2026-02-17T22:18:16.133Z" },
{ url = "https://files.pythonhosted.org/packages/9b/35/74442388c6cf008882d4d4bdfc4109be87e9b8b7ccd097ad1e7f006e2e95/pandas-2.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8fe25fc7b623b0ef6b5009149627e34d2a4657e880948ec3c840e9402e5c1b45", size = 10833831, upload-time = "2025-09-29T23:38:56.071Z" }, { url = "https://files.pythonhosted.org/packages/c1/27/90683c7122febeefe84a56f2cde86a9f05f68d53885cebcc473298dfc33e/pandas-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24ba315ba3d6e5806063ac6eb717504e499ce30bd8c236d8693a5fd3f084c796", size = 9923455, upload-time = "2026-02-17T22:18:19.13Z" },
{ url = "https://files.pythonhosted.org/packages/fe/e4/de154cbfeee13383ad58d23017da99390b91d73f8c11856f2095e813201b/pandas-2.3.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b468d3dad6ff947df92dcb32ede5b7bd41a9b3cceef0a30ed925f6d01fb8fa66", size = 12199267, upload-time = "2025-09-29T23:18:41.627Z" }, { url = "https://files.pythonhosted.org/packages/0e/f1/ed17d927f9950643bc7631aa4c99ff0cc83a37864470bc419345b656a41f/pandas-3.0.1-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:406ce835c55bac912f2a0dcfaf27c06d73c6b04a5dde45f1fd3169ce31337389", size = 10753464, upload-time = "2026-02-17T22:18:21.134Z" },
{ url = "https://files.pythonhosted.org/packages/bf/c9/63f8d545568d9ab91476b1818b4741f521646cbdd151c6efebf40d6de6f7/pandas-2.3.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b98560e98cb334799c0b07ca7967ac361a47326e9b4e5a7dfb5ab2b1c9d35a1b", size = 12789281, upload-time = "2025-09-29T23:18:56.834Z" }, { url = "https://files.pythonhosted.org/packages/2e/7c/870c7e7daec2a6c7ff2ac9e33b23317230d4e4e954b35112759ea4a924a7/pandas-3.0.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:830994d7e1f31dd7e790045235605ab61cff6c94defc774547e8b7fdfbff3dc7", size = 11255234, upload-time = "2026-02-17T22:18:24.175Z" },
{ url = "https://files.pythonhosted.org/packages/f2/00/a5ac8c7a0e67fd1a6059e40aa08fa1c52cc00709077d2300e210c3ce0322/pandas-2.3.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37b5848ba49824e5c30bedb9c830ab9b7751fd049bc7914533e01c65f79791", size = 13240453, upload-time = "2025-09-29T23:19:09.247Z" }, { url = "https://files.pythonhosted.org/packages/5c/39/3653fe59af68606282b989c23d1a543ceba6e8099cbcc5f1d506a7bae2aa/pandas-3.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a64ce8b0f2de1d2efd2ae40b0abe7f8ae6b29fbfb3812098ed5a6f8e235ad9bf", size = 11767299, upload-time = "2026-02-17T22:18:26.824Z" },
{ url = "https://files.pythonhosted.org/packages/27/4d/5c23a5bc7bd209231618dd9e606ce076272c9bc4f12023a70e03a86b4067/pandas-2.3.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:db4301b2d1f926ae677a751eb2bd0e8c5f5319c9cb3f88b0becbbb0b07b34151", size = 13890361, upload-time = "2025-09-29T23:19:25.342Z" }, { url = "https://files.pythonhosted.org/packages/9b/31/1daf3c0c94a849c7a8dab8a69697b36d313b229918002ba3e409265c7888/pandas-3.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9832c2c69da24b602c32e0c7b1b508a03949c18ba08d4d9f1c1033426685b447", size = 12333292, upload-time = "2026-02-17T22:18:28.996Z" },
{ url = "https://files.pythonhosted.org/packages/8e/59/712db1d7040520de7a4965df15b774348980e6df45c129b8c64d0dbe74ef/pandas-2.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:f086f6fe114e19d92014a1966f43a3e62285109afe874f067f5abbdcbb10e59c", size = 11348702, upload-time = "2025-09-29T23:19:38.296Z" }, { url = "https://files.pythonhosted.org/packages/1f/67/af63f83cd6ca603a00fe8530c10a60f0879265b8be00b5930e8e78c5b30b/pandas-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:84f0904a69e7365f79a0c77d3cdfccbfb05bf87847e3a51a41e1426b0edb9c79", size = 9892176, upload-time = "2026-02-17T22:18:31.79Z" },
{ url = "https://files.pythonhosted.org/packages/9c/fb/231d89e8637c808b997d172b18e9d4a4bc7bf31296196c260526055d1ea0/pandas-2.3.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d21f6d74eb1725c2efaa71a2bfc661a0689579b58e9c0ca58a739ff0b002b53", size = 11597846, upload-time = "2025-09-29T23:19:48.856Z" }, { url = "https://files.pythonhosted.org/packages/79/ab/9c776b14ac4b7b4140788eca18468ea39894bc7340a408f1d1e379856a6b/pandas-3.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:4a68773d5a778afb31d12e34f7dd4612ab90de8c6fb1d8ffe5d4a03b955082a1", size = 9151328, upload-time = "2026-02-17T22:18:35.721Z" },
{ url = "https://files.pythonhosted.org/packages/5c/bd/bf8064d9cfa214294356c2d6702b716d3cf3bb24be59287a6a21e24cae6b/pandas-2.3.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3fd2f887589c7aa868e02632612ba39acb0b8948faf5cc58f0850e165bd46f35", size = 10729618, upload-time = "2025-09-29T23:39:08.659Z" }, { url = "https://files.pythonhosted.org/packages/37/51/b467209c08dae2c624873d7491ea47d2b47336e5403309d433ea79c38571/pandas-3.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:476f84f8c20c9f5bc47252b66b4bb25e1a9fc2fa98cead96744d8116cb85771d", size = 10344357, upload-time = "2026-02-17T22:18:38.262Z" },
{ url = "https://files.pythonhosted.org/packages/57/56/cf2dbe1a3f5271370669475ead12ce77c61726ffd19a35546e31aa8edf4e/pandas-2.3.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecaf1e12bdc03c86ad4a7ea848d66c685cb6851d807a26aa245ca3d2017a1908", size = 11737212, upload-time = "2025-09-29T23:19:59.765Z" }, { url = "https://files.pythonhosted.org/packages/7c/f1/e2567ffc8951ab371db2e40b2fe068e36b81d8cf3260f06ae508700e5504/pandas-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0ab749dfba921edf641d4036c4c21c0b3ea70fea478165cb98a998fb2a261955", size = 9884543, upload-time = "2026-02-17T22:18:41.476Z" },
{ url = "https://files.pythonhosted.org/packages/e5/63/cd7d615331b328e287d8233ba9fdf191a9c2d11b6af0c7a59cfcec23de68/pandas-2.3.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b3d11d2fda7eb164ef27ffc14b4fcab16a80e1ce67e9f57e19ec0afaf715ba89", size = 12362693, upload-time = "2025-09-29T23:20:14.098Z" }, { url = "https://files.pythonhosted.org/packages/d7/39/327802e0b6d693182403c144edacbc27eb82907b57062f23ef5a4c4a5ea7/pandas-3.0.1-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8e36891080b87823aff3640c78649b91b8ff6eea3c0d70aeabd72ea43ab069b", size = 10396030, upload-time = "2026-02-17T22:18:43.822Z" },
{ url = "https://files.pythonhosted.org/packages/a6/de/8b1895b107277d52f2b42d3a6806e69cfef0d5cf1d0ba343470b9d8e0a04/pandas-2.3.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a68e15f780eddf2b07d242e17a04aa187a7ee12b40b930bfdd78070556550e98", size = 12771002, upload-time = "2025-09-29T23:20:26.76Z" }, { url = "https://files.pythonhosted.org/packages/3d/fe/89d77e424365280b79d99b3e1e7d606f5165af2f2ecfaf0c6d24c799d607/pandas-3.0.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:532527a701281b9dd371e2f582ed9094f4c12dd9ffb82c0c54ee28d8ac9520c4", size = 10876435, upload-time = "2026-02-17T22:18:45.954Z" },
{ url = "https://files.pythonhosted.org/packages/87/21/84072af3187a677c5893b170ba2c8fbe450a6ff911234916da889b698220/pandas-2.3.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:371a4ab48e950033bcf52b6527eccb564f52dc826c02afd9a1bc0ab731bba084", size = 13450971, upload-time = "2025-09-29T23:20:41.344Z" }, { url = "https://files.pythonhosted.org/packages/b5/a6/2a75320849dd154a793f69c951db759aedb8d1dd3939eeacda9bdcfa1629/pandas-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:356e5c055ed9b0da1580d465657bc7d00635af4fd47f30afb23025352ba764d1", size = 11405133, upload-time = "2026-02-17T22:18:48.533Z" },
{ url = "https://files.pythonhosted.org/packages/86/41/585a168330ff063014880a80d744219dbf1dd7a1c706e75ab3425a987384/pandas-2.3.3-cp312-cp312-win_amd64.whl", hash = "sha256:a16dcec078a01eeef8ee61bf64074b4e524a2a3f4b3be9326420cabe59c4778b", size = 10992722, upload-time = "2025-09-29T23:20:54.139Z" }, { url = "https://files.pythonhosted.org/packages/58/53/1d68fafb2e02d7881df66aa53be4cd748d25cbe311f3b3c85c93ea5d30ca/pandas-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9d810036895f9ad6345b8f2a338dd6998a74e8483847403582cab67745bff821", size = 11932065, upload-time = "2026-02-17T22:18:50.837Z" },
{ url = "https://files.pythonhosted.org/packages/cd/4b/18b035ee18f97c1040d94debd8f2e737000ad70ccc8f5513f4eefad75f4b/pandas-2.3.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:56851a737e3470de7fa88e6131f41281ed440d29a9268dcbf0002da5ac366713", size = 11544671, upload-time = "2025-09-29T23:21:05.024Z" }, { url = "https://files.pythonhosted.org/packages/75/08/67cc404b3a966b6df27b38370ddd96b3b023030b572283d035181854aac5/pandas-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:536232a5fe26dd989bd633e7a0c450705fdc86a207fec7254a55e9a22950fe43", size = 9741627, upload-time = "2026-02-17T22:18:53.905Z" },
{ url = "https://files.pythonhosted.org/packages/31/94/72fac03573102779920099bcac1c3b05975c2cb5f01eac609faf34bed1ca/pandas-2.3.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdcd9d1167f4885211e401b3036c0c8d9e274eee67ea8d0758a256d60704cfe8", size = 10680807, upload-time = "2025-09-29T23:21:15.979Z" }, { url = "https://files.pythonhosted.org/packages/86/4f/caf9952948fb00d23795f09b893d11f1cacb384e666854d87249530f7cbe/pandas-3.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:0f463ebfd8de7f326d38037c7363c6dacb857c5881ab8961fb387804d6daf2f7", size = 9052483, upload-time = "2026-02-17T22:18:57.31Z" },
{ url = "https://files.pythonhosted.org/packages/16/87/9472cf4a487d848476865321de18cc8c920b8cab98453ab79dbbc98db63a/pandas-2.3.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e32e7cc9af0f1cc15548288a51a3b681cc2a219faa838e995f7dc53dbab1062d", size = 11709872, upload-time = "2025-09-29T23:21:27.165Z" }, { url = "https://files.pythonhosted.org/packages/0b/48/aad6ec4f8d007534c091e9a7172b3ec1b1ee6d99a9cbb936b5eab6c6cf58/pandas-3.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5272627187b5d9c20e55d27caf5f2cd23e286aba25cadf73c8590e432e2b7262", size = 10317509, upload-time = "2026-02-17T22:18:59.498Z" },
{ url = "https://files.pythonhosted.org/packages/15/07/284f757f63f8a8d69ed4472bfd85122bd086e637bf4ed09de572d575a693/pandas-2.3.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:318d77e0e42a628c04dc56bcef4b40de67918f7041c2b061af1da41dcff670ac", size = 12306371, upload-time = "2025-09-29T23:21:40.532Z" }, { url = "https://files.pythonhosted.org/packages/a8/14/5990826f779f79148ae9d3a2c39593dc04d61d5d90541e71b5749f35af95/pandas-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:661e0f665932af88c7877f31da0dc743fe9c8f2524bdffe23d24fdcb67ef9d56", size = 9860561, upload-time = "2026-02-17T22:19:02.265Z" },
{ url = "https://files.pythonhosted.org/packages/33/81/a3afc88fca4aa925804a27d2676d22dcd2031c2ebe08aabd0ae55b9ff282/pandas-2.3.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e0a175408804d566144e170d0476b15d78458795bb18f1304fb94160cabf40c", size = 12765333, upload-time = "2025-09-29T23:21:55.77Z" }, { url = "https://files.pythonhosted.org/packages/fa/80/f01ff54664b6d70fed71475543d108a9b7c888e923ad210795bef04ffb7d/pandas-3.0.1-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:75e6e292ff898679e47a2199172593d9f6107fd2dd3617c22c2946e97d5df46e", size = 10365506, upload-time = "2026-02-17T22:19:05.017Z" },
{ url = "https://files.pythonhosted.org/packages/8d/0f/b4d4ae743a83742f1153464cf1a8ecfafc3ac59722a0b5c8602310cb7158/pandas-2.3.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:93c2d9ab0fc11822b5eece72ec9587e172f63cff87c00b062f6e37448ced4493", size = 13418120, upload-time = "2025-09-29T23:22:10.109Z" }, { url = "https://files.pythonhosted.org/packages/f2/85/ab6d04733a7d6ff32bfc8382bf1b07078228f5d6ebec5266b91bfc5c4ff7/pandas-3.0.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ff8cf1d2896e34343197685f432450ec99a85ba8d90cce2030c5eee2ef98791", size = 10873196, upload-time = "2026-02-17T22:19:07.204Z" },
{ url = "https://files.pythonhosted.org/packages/4f/c7/e54682c96a895d0c808453269e0b5928a07a127a15704fedb643e9b0a4c8/pandas-2.3.3-cp313-cp313-win_amd64.whl", hash = "sha256:f8bfc0e12dc78f777f323f55c58649591b2cd0c43534e8355c51d3fede5f4dee", size = 10993991, upload-time = "2025-09-29T23:25:04.889Z" }, { url = "https://files.pythonhosted.org/packages/48/a9/9301c83d0b47c23ac5deab91c6b39fd98d5b5db4d93b25df8d381451828f/pandas-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eca8b4510f6763f3d37359c2105df03a7a221a508f30e396a51d0713d462e68a", size = 11370859, upload-time = "2026-02-17T22:19:09.436Z" },
{ url = "https://files.pythonhosted.org/packages/f9/ca/3f8d4f49740799189e1395812f3bf23b5e8fc7c190827d55a610da72ce55/pandas-2.3.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:75ea25f9529fdec2d2e93a42c523962261e567d250b0013b16210e1d40d7c2e5", size = 12048227, upload-time = "2025-09-29T23:22:24.343Z" }, { url = "https://files.pythonhosted.org/packages/59/fe/0c1fc5bd2d29c7db2ab372330063ad555fb83e08422829c785f5ec2176ca/pandas-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:06aff2ad6f0b94a17822cf8b83bbb563b090ed82ff4fe7712db2ce57cd50d9b8", size = 11924584, upload-time = "2026-02-17T22:19:11.562Z" },
{ url = "https://files.pythonhosted.org/packages/0e/5a/f43efec3e8c0cc92c4663ccad372dbdff72b60bdb56b2749f04aa1d07d7e/pandas-2.3.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74ecdf1d301e812db96a465a525952f4dde225fdb6d8e5a521d47e1f42041e21", size = 11411056, upload-time = "2025-09-29T23:22:37.762Z" }, { url = "https://files.pythonhosted.org/packages/d6/7d/216a1588b65a7aa5f4535570418a599d943c85afb1d95b0876fc00aa1468/pandas-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:9fea306c783e28884c29057a1d9baa11a349bbf99538ec1da44c8476563d1b25", size = 9742769, upload-time = "2026-02-17T22:19:13.926Z" },
{ url = "https://files.pythonhosted.org/packages/46/b1/85331edfc591208c9d1a63a06baa67b21d332e63b7a591a5ba42a10bb507/pandas-2.3.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6435cb949cb34ec11cc9860246ccb2fdc9ecd742c12d3304989017d53f039a78", size = 11645189, upload-time = "2025-09-29T23:22:51.688Z" }, { url = "https://files.pythonhosted.org/packages/c4/cb/810a22a6af9a4e97c8ab1c946b47f3489c5bca5adc483ce0ffc84c9cc768/pandas-3.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:a8d37a43c52917427e897cb2e429f67a449327394396a81034a4449b99afda59", size = 9043855, upload-time = "2026-02-17T22:19:16.09Z" },
{ url = "https://files.pythonhosted.org/packages/44/23/78d645adc35d94d1ac4f2a3c4112ab6f5b8999f4898b8cdf01252f8df4a9/pandas-2.3.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:900f47d8f20860de523a1ac881c4c36d65efcb2eb850e6948140fa781736e110", size = 12121912, upload-time = "2025-09-29T23:23:05.042Z" }, { url = "https://files.pythonhosted.org/packages/92/fa/423c89086cca1f039cf1253c3ff5b90f157b5b3757314aa635f6bf3e30aa/pandas-3.0.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d54855f04f8246ed7b6fc96b05d4871591143c46c0b6f4af874764ed0d2d6f06", size = 10752673, upload-time = "2026-02-17T22:19:18.304Z" },
{ url = "https://files.pythonhosted.org/packages/53/da/d10013df5e6aaef6b425aa0c32e1fc1f3e431e4bcabd420517dceadce354/pandas-2.3.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a45c765238e2ed7d7c608fc5bc4a6f88b642f2f01e70c0c23d2224dd21829d86", size = 12712160, upload-time = "2025-09-29T23:23:28.57Z" }, { url = "https://files.pythonhosted.org/packages/22/23/b5a08ec1f40020397f0faba72f1e2c11f7596a6169c7b3e800abff0e433f/pandas-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e1b677accee34a09e0dc2ce5624e4a58a1870ffe56fc021e9caf7f23cd7668f", size = 10404967, upload-time = "2026-02-17T22:19:20.726Z" },
{ url = "https://files.pythonhosted.org/packages/bd/17/e756653095a083d8a37cbd816cb87148debcfcd920129b25f99dd8d04271/pandas-2.3.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c4fc4c21971a1a9f4bdb4c73978c7f7256caa3e62b323f70d6cb80db583350bc", size = 13199233, upload-time = "2025-09-29T23:24:24.876Z" }, { url = "https://files.pythonhosted.org/packages/5c/81/94841f1bb4afdc2b52a99daa895ac2c61600bb72e26525ecc9543d453ebc/pandas-3.0.1-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a9cabbdcd03f1b6cd254d6dda8ae09b0252524be1592594c00b7895916cb1324", size = 10320575, upload-time = "2026-02-17T22:19:24.919Z" },
{ url = "https://files.pythonhosted.org/packages/0a/8b/2ae37d66a5342a83adadfd0cb0b4bf9c3c7925424dd5f40d15d6cfaa35ee/pandas-3.0.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5ae2ab1f166668b41e770650101e7090824fd34d17915dd9cd479f5c5e0065e9", size = 10710921, upload-time = "2026-02-17T22:19:27.181Z" },
{ url = "https://files.pythonhosted.org/packages/a2/61/772b2e2757855e232b7ccf7cb8079a5711becb3a97f291c953def15a833f/pandas-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6bf0603c2e30e2cafac32807b06435f28741135cb8697eae8b28c7d492fc7d76", size = 11334191, upload-time = "2026-02-17T22:19:29.411Z" },
{ url = "https://files.pythonhosted.org/packages/1b/08/b16c6df3ef555d8495d1d265a7963b65be166785d28f06a350913a4fac78/pandas-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6c426422973973cae1f4a23e51d4ae85974f44871b24844e4f7de752dd877098", size = 11782256, upload-time = "2026-02-17T22:19:32.34Z" },
{ url = "https://files.pythonhosted.org/packages/55/80/178af0594890dee17e239fca96d3d8670ba0f5ff59b7d0439850924a9c09/pandas-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b03f91ae8c10a85c1613102c7bef5229b5379f343030a3ccefeca8a33414cf35", size = 10485047, upload-time = "2026-02-17T22:19:34.605Z" },
] ]
[[package]] [[package]]
@ -3930,7 +3945,7 @@ name = "pexpect"
version = "4.9.0" version = "4.9.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "ptyprocess" }, { name = "ptyprocess", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" } sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
wheels = [ wheels = [
@ -4013,7 +4028,9 @@ name = "playwright"
version = "1.46.0" version = "1.46.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
resolution-markers = [ resolution-markers = [
"python_full_version < '3.12'", "python_full_version < '3.12' and sys_platform == 'win32'",
"python_full_version < '3.12' and sys_platform == 'emscripten'",
"python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'",
] ]
dependencies = [ dependencies = [
{ name = "greenlet", version = "3.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" }, { name = "greenlet", version = "3.0.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
@ -4034,8 +4051,12 @@ name = "playwright"
version = "1.58.0" version = "1.58.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
resolution-markers = [ resolution-markers = [
"python_full_version >= '3.13'", "python_full_version >= '3.13' and sys_platform == 'win32'",
"python_full_version == '3.12.*'", "python_full_version >= '3.13' and sys_platform == 'emscripten'",
"python_full_version >= '3.13' and sys_platform != 'emscripten' and sys_platform != 'win32'",
"python_full_version == '3.12.*' and sys_platform == 'win32'",
"python_full_version == '3.12.*' and sys_platform == 'emscripten'",
"python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'win32'",
] ]
dependencies = [ dependencies = [
{ name = "greenlet", version = "3.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" }, { name = "greenlet", version = "3.3.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
@ -4238,8 +4259,12 @@ name = "psycopg"
version = "3.1.18" version = "3.1.18"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
resolution-markers = [ resolution-markers = [
"python_full_version == '3.12.*'", "python_full_version == '3.12.*' and sys_platform == 'win32'",
"python_full_version < '3.12'", "python_full_version == '3.12.*' and sys_platform == 'emscripten'",
"python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'win32'",
"python_full_version < '3.12' and sys_platform == 'win32'",
"python_full_version < '3.12' and sys_platform == 'emscripten'",
"python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'",
] ]
dependencies = [ dependencies = [
{ name = "typing-extensions", marker = "python_full_version < '3.13'" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" },
@ -4263,7 +4288,9 @@ name = "psycopg"
version = "3.2.13" version = "3.2.13"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
resolution-markers = [ resolution-markers = [
"python_full_version >= '3.13'", "python_full_version >= '3.13' and sys_platform == 'win32'",
"python_full_version >= '3.13' and sys_platform == 'emscripten'",
"python_full_version >= '3.13' and sys_platform != 'emscripten' and sys_platform != 'win32'",
] ]
dependencies = [ dependencies = [
{ name = "tzdata", marker = "python_full_version >= '3.13' and sys_platform == 'win32'" }, { name = "tzdata", marker = "python_full_version >= '3.13' and sys_platform == 'win32'" },
@ -4286,8 +4313,12 @@ name = "psycopg-binary"
version = "3.1.18" version = "3.1.18"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
resolution-markers = [ resolution-markers = [
"python_full_version == '3.12.*'", "python_full_version == '3.12.*' and sys_platform == 'win32'",
"python_full_version < '3.12'", "python_full_version == '3.12.*' and sys_platform == 'emscripten'",
"python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'win32'",
"python_full_version < '3.12' and sys_platform == 'win32'",
"python_full_version < '3.12' and sys_platform == 'emscripten'",
"python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'",
] ]
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/e4/f8/bff5e40fcc800925da9b2efddf76554511416ad7d95e1fba9a2440c43f0c/psycopg_binary-3.1.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4e4de16a637ec190cbee82e0c2dc4860fed17a23a35f7a1e6dc479a5c6876722", size = 3327688, upload-time = "2024-02-04T21:07:23.631Z" }, { url = "https://files.pythonhosted.org/packages/e4/f8/bff5e40fcc800925da9b2efddf76554511416ad7d95e1fba9a2440c43f0c/psycopg_binary-3.1.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4e4de16a637ec190cbee82e0c2dc4860fed17a23a35f7a1e6dc479a5c6876722", size = 3327688, upload-time = "2024-02-04T21:07:23.631Z" },
@ -4319,7 +4350,9 @@ name = "psycopg-binary"
version = "3.2.13" version = "3.2.13"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
resolution-markers = [ resolution-markers = [
"python_full_version >= '3.13'", "python_full_version >= '3.13' and sys_platform == 'win32'",
"python_full_version >= '3.13' and sys_platform == 'emscripten'",
"python_full_version >= '3.13' and sys_platform != 'emscripten' and sys_platform != 'win32'",
] ]
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/06/f5/fc70804a999167daf5b876107b99e8fe91c3f785a31753c0e3e7b93446ba/psycopg_binary-3.2.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9cfe87749d010dfd34534ba8c71aa0674db9a3fce65232c98989f77c742c9ce7", size = 4013844, upload-time = "2025-11-21T22:30:25.985Z" }, { url = "https://files.pythonhosted.org/packages/06/f5/fc70804a999167daf5b876107b99e8fe91c3f785a31753c0e3e7b93446ba/psycopg_binary-3.2.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9cfe87749d010dfd34534ba8c71aa0674db9a3fce65232c98989f77c742c9ce7", size = 4013844, upload-time = "2025-11-21T22:30:25.985Z" },
@ -4559,7 +4592,9 @@ name = "pyee"
version = "11.1.0" version = "11.1.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
resolution-markers = [ resolution-markers = [
"python_full_version < '3.12'", "python_full_version < '3.12' and sys_platform == 'win32'",
"python_full_version < '3.12' and sys_platform == 'emscripten'",
"python_full_version < '3.12' and sys_platform != 'emscripten' and sys_platform != 'win32'",
] ]
dependencies = [ dependencies = [
{ name = "typing-extensions", marker = "python_full_version < '3.12'" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" },
@ -4574,8 +4609,12 @@ name = "pyee"
version = "13.0.1" version = "13.0.1"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
resolution-markers = [ resolution-markers = [
"python_full_version >= '3.13'", "python_full_version >= '3.13' and sys_platform == 'win32'",
"python_full_version == '3.12.*'", "python_full_version >= '3.13' and sys_platform == 'emscripten'",
"python_full_version >= '3.13' and sys_platform != 'emscripten' and sys_platform != 'win32'",
"python_full_version == '3.12.*' and sys_platform == 'win32'",
"python_full_version == '3.12.*' and sys_platform == 'emscripten'",
"python_full_version == '3.12.*' and sys_platform != 'emscripten' and sys_platform != 'win32'",
] ]
dependencies = [ dependencies = [
{ name = "typing-extensions", marker = "python_full_version >= '3.12'" }, { name = "typing-extensions", marker = "python_full_version >= '3.12'" },
@ -5179,7 +5218,7 @@ wheels = [
[[package]] [[package]]
name = "requests" name = "requests"
version = "2.32.5" version = "2.33.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "certifi" }, { name = "certifi" },
@ -5187,9 +5226,9 @@ dependencies = [
{ name = "idna" }, { name = "idna" },
{ name = "urllib3" }, { name = "urllib3" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } sdist = { url = "https://files.pythonhosted.org/packages/34/64/8860370b167a9721e8956ae116825caff829224fbca0ca6e7bf8ddef8430/requests-2.33.0.tar.gz", hash = "sha256:c7ebc5e8b0f21837386ad0e1c8fe8b829fa5f544d8df3b2253bff14ef29d7652", size = 134232, upload-time = "2026-03-25T15:10:41.586Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, { url = "https://files.pythonhosted.org/packages/56/5d/c814546c2333ceea4ba42262d8c4d55763003e767fa169adc693bd524478/requests-2.33.0-py3-none-any.whl", hash = "sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b", size = 65017, upload-time = "2026-03-25T15:10:40.382Z" },
] ]
[[package]] [[package]]
@ -5438,8 +5477,8 @@ name = "secretstorage"
version = "3.5.0" version = "3.5.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "cryptography" }, { name = "cryptography", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
{ name = "jeepney" }, { name = "jeepney", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/1c/03/e834bcd866f2f8a49a85eaff47340affa3bfa391ee9912a952a1faa68c7b/secretstorage-3.5.0.tar.gz", hash = "sha256:f04b8e4689cbce351744d5537bf6b1329c6fc68f91fa666f60a380edddcd11be", size = 19884, upload-time = "2025-11-23T19:02:53.191Z" } sdist = { url = "https://files.pythonhosted.org/packages/1c/03/e834bcd866f2f8a49a85eaff47340affa3bfa391ee9912a952a1faa68c7b/secretstorage-3.5.0.tar.gz", hash = "sha256:f04b8e4689cbce351744d5537bf6b1329c6fc68f91fa666f60a380edddcd11be", size = 19884, upload-time = "2025-11-23T19:02:53.191Z" }
wheels = [ wheels = [
@ -5484,7 +5523,7 @@ wheels = [
[[package]] [[package]]
name = "skyvern" name = "skyvern"
version = "1.0.28" version = "1.0.29"
source = { editable = "." } source = { editable = "." }
dependencies = [ dependencies = [
{ name = "aioboto3" }, { name = "aioboto3" },
@ -6124,16 +6163,16 @@ wheels = [
[[package]] [[package]]
name = "types-boto3" name = "types-boto3"
version = "1.42.74" version = "1.42.76"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "botocore-stubs" }, { name = "botocore-stubs" },
{ name = "types-s3transfer" }, { name = "types-s3transfer" },
{ name = "typing-extensions", marker = "python_full_version < '3.12'" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/66/d2/5f1c3dfd75993084c8cf5d31e169c342fb394d7905d6438428245661c39b/types_boto3-1.42.74.tar.gz", hash = "sha256:8013a2dfc1ba398217d2d2dc6b54b37494df65cbae363f9430af4824697cb655", size = 101691, upload-time = "2026-03-23T19:56:24.402Z" } sdist = { url = "https://files.pythonhosted.org/packages/2e/49/065066b7ab04660a02dee2c1835d40baa2e5abbb2e9e5f974ddac92a787c/types_boto3-1.42.76.tar.gz", hash = "sha256:fd947e58e015d6dac56eef826a897244185ea1776389bdf76b6ba9c1860296e6", size = 101995, upload-time = "2026-03-25T19:39:38.487Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/6c/b1/8836349f6c9a3bf2066fb8eaec74180406f0a11e8ce00fc33232f97a8c38/types_boto3-1.42.74-py3-none-any.whl", hash = "sha256:3791c49c694b5c6d980e38994032eff4ed90ab4f7b1ad4fb34f3501b7fc60d02", size = 69907, upload-time = "2026-03-23T19:56:19.283Z" }, { url = "https://files.pythonhosted.org/packages/2c/85/b11d0337dd0aac5de42fa414905f95e8aabf00c7f670f0c795c11b1e0a09/types_boto3-1.42.76-py3-none-any.whl", hash = "sha256:af93970c88fd10b1e7e30532ff56517ae187c4d3fb4fbcee14a310ce771c5873", size = 70061, upload-time = "2026-03-25T19:39:31.498Z" },
] ]
[package.optional-dependencies] [package.optional-dependencies]
@ -6143,14 +6182,14 @@ full = [
[[package]] [[package]]
name = "types-boto3-full" name = "types-boto3-full"
version = "1.42.74" version = "1.42.76"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "typing-extensions", marker = "python_full_version < '3.12'" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/c9/4d/5fa22d7c99b57e9f92c522b10d96caab0dc638880863cc86901e7f9316da/types_boto3_full-1.42.74.tar.gz", hash = "sha256:827c31e18c1cc8720b826b3c3b8f7be3612c5fc3800d61d0591e9d65257ecb0a", size = 8499614, upload-time = "2026-03-24T01:27:55.394Z" } sdist = { url = "https://files.pythonhosted.org/packages/f5/1a/084d992ba49114781b60a9783b357975a4eb767fa0519ee760781500c047/types_boto3_full-1.42.76.tar.gz", hash = "sha256:3d4b0d7b84977f4bc54e2d0b9a1161f263d4693d83279a1c55e5d1b783372aee", size = 8508720, upload-time = "2026-03-26T01:51:21.452Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/34/c0/9b55d89c3ec01aa91518cae52b73a03af6ad8c07e46551d865f40039fc23/types_boto3_full-1.42.74-py3-none-any.whl", hash = "sha256:654f814175af53caa1c34498883c6edcdb8555066e19bbe800f98d1d8b862d29", size = 12908188, upload-time = "2026-03-24T01:27:51.64Z" }, { url = "https://files.pythonhosted.org/packages/b9/a2/fe07a7544ea815bf748f4280b39310721b50980db26404ba0aa76bf35bba/types_boto3_full-1.42.76-py3-none-any.whl", hash = "sha256:0a0655d57938767929d4498dd28be43570fbb526aded0bf4cac9af97ba1cc2dc", size = 12929985, upload-time = "2026-03-26T01:51:17.694Z" },
] ]
[[package]] [[package]]