[1/5] [SKY-7600] Set schema for task_runs (#5300)

This commit is contained in:
Aaron Perez 2026-03-31 14:34:06 -05:00 committed by GitHub
parent 1d1d9c8e65
commit 474366bfd0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
37 changed files with 1932 additions and 657 deletions

View file

@ -0,0 +1,81 @@
"""add run history columns to task_runs
Revision ID: 5516c5bf7762
Revises: d77ed2605df0
Create Date: 2026-03-31T19:15:14.171986+00:00
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "5516c5bf7762"
down_revision: Union[str, None] = "d77ed2605df0"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.add_column("task_runs", sa.Column("status", sa.String(), nullable=True))
op.add_column("task_runs", sa.Column("started_at", sa.DateTime(), nullable=True))
op.add_column("task_runs", sa.Column("finished_at", sa.DateTime(), nullable=True))
op.add_column("task_runs", sa.Column("workflow_permanent_id", sa.String(), nullable=True))
op.add_column("task_runs", sa.Column("script_run", sa.JSON(), nullable=True))
op.add_column("task_runs", sa.Column("parent_workflow_run_id", sa.String(), nullable=True))
op.add_column("task_runs", sa.Column("debug_session_id", sa.String(), nullable=True))
op.add_column("task_runs", sa.Column("searchable_text", sa.Text(), nullable=True))
# pg_trgm is available by default on RDS and Cloud SQL.
op.execute("CREATE EXTENSION IF NOT EXISTS pg_trgm")
# All indexes are created CONCURRENTLY to avoid SHARE locks that block writes
# on the actively-written task_runs table.
with op.get_context().autocommit_block():
op.execute("SET statement_timeout = '3h';")
try:
op.execute(
"CREATE INDEX CONCURRENTLY IF NOT EXISTS ix_task_runs_org_status_created "
"ON task_runs USING btree (organization_id, status, created_at DESC)"
)
op.execute(
"CREATE INDEX CONCURRENTLY IF NOT EXISTS ix_task_runs_searchable_text_gin "
"ON task_runs USING gin (searchable_text gin_trgm_ops)"
)
op.execute("""
CREATE INDEX CONCURRENTLY IF NOT EXISTS ix_task_runs_org_toplevel_created
ON task_runs (organization_id, created_at DESC)
WHERE parent_workflow_run_id IS NULL
AND debug_session_id IS NULL
AND status IS NOT NULL;
""")
# Partial index covering non-terminal task_runs rows.
# Used by the task_runs_sync_activity cron to efficiently find rows
# that still need syncing. The index shrinks as runs complete.
op.execute("""
CREATE INDEX CONCURRENTLY IF NOT EXISTS ix_task_runs_nonterminal
ON task_runs (run_id, task_run_type)
WHERE status IS NULL
OR status NOT IN ('completed', 'failed', 'terminated', 'canceled', 'timed_out')
""")
finally:
try:
op.execute("RESET statement_timeout;")
except Exception:
pass # Don't mask the original index creation error
def downgrade() -> None:
op.execute("DROP INDEX IF EXISTS ix_task_runs_nonterminal")
op.execute("DROP INDEX IF EXISTS ix_task_runs_org_toplevel_created")
op.execute("DROP INDEX IF EXISTS ix_task_runs_searchable_text_gin")
op.execute("DROP INDEX IF EXISTS ix_task_runs_org_status_created")
op.drop_column("task_runs", "searchable_text")
op.drop_column("task_runs", "debug_session_id")
op.drop_column("task_runs", "parent_workflow_run_id")
op.drop_column("task_runs", "script_run")
op.drop_column("task_runs", "workflow_permanent_id")
op.drop_column("task_runs", "finished_at")
op.drop_column("task_runs", "started_at")
op.drop_column("task_runs", "status")

View file

@ -167,6 +167,9 @@ include = [
"skyvern-frontend/tsconfig.node.json" = "skyvern-frontend/tsconfig.node.json" "skyvern-frontend/tsconfig.node.json" = "skyvern-frontend/tsconfig.node.json"
[tool.uv] [tool.uv]
# Supply chain quarantine: block packages published less than 7 days ago.
# Override for urgent cases: uv add <pkg> --exclude-newer ""
exclude-newer = "7 days"
constraint-dependencies = [ constraint-dependencies = [
"authlib>=1.6.9", "authlib>=1.6.9",
"flask>=3.1.3", "flask>=3.1.3",

View file

@ -1,4 +1,10 @@
#!/bin/sh #!/bin/sh
# Alembic migrations are PostgreSQL-specific. Default to PostgreSQL when
# DATABASE_STRING is not set (e.g. OSS CI), so we don't pick up the new
# SQLite default from Settings._default_database_string().
: "${DATABASE_STRING:=postgresql+asyncpg://skyvern@localhost/skyvern}"
export DATABASE_STRING
# first apply migrations # first apply migrations
export PATH="${PATH}:.venv/bin" export PATH="${PATH}:.venv/bin"
alembic upgrade head alembic upgrade head

View file

@ -441,6 +441,31 @@ export type WorkflowRunApiResponse = {
workflow_title: string | null; workflow_title: string | null;
}; };
export const TaskRunType = {
TaskV1: "task_v1",
TaskV2: "task_v2",
WorkflowRun: "workflow_run",
OpenaiCua: "openai_cua",
AnthropicCua: "anthropic_cua",
UiTars: "ui_tars",
} as const;
export type TaskRunType = (typeof TaskRunType)[keyof typeof TaskRunType];
export type TaskRunListItem = {
task_run_id: string;
task_run_type: TaskRunType;
run_id: string;
title: string | null;
status: string;
started_at: string | null;
finished_at: string | null;
created_at: string;
workflow_permanent_id: string | null;
script_run: boolean;
searchable_text: string | null;
};
export type WorkflowRunStatusApiResponse = { export type WorkflowRunStatusApiResponse = {
workflow_id: string; workflow_id: string;
workflow_run_id: string; workflow_run_id: string;

View file

@ -1,9 +1,9 @@
import { getClient } from "@/api/AxiosClient"; import { getClient } from "@/api/AxiosClient";
import { useCredentialGetter } from "@/hooks/useCredentialGetter"; import { useCredentialGetter } from "@/hooks/useCredentialGetter";
import { useQuery } from "@tanstack/react-query"; import { useQuery } from "@tanstack/react-query";
import { Status, Task, TriggerType, WorkflowRunApiResponse } from "@/api/types"; import { Status, TaskRunListItem } from "@/api/types";
type QueryReturnType = Array<Task | WorkflowRunApiResponse>; type QueryReturnType = Array<TaskRunListItem>;
type UseQueryOptions = Omit< type UseQueryOptions = Omit<
Parameters<typeof useQuery<QueryReturnType>>[0], Parameters<typeof useQuery<QueryReturnType>>[0],
"queryKey" | "queryFn" "queryKey" | "queryFn"
@ -13,7 +13,6 @@ type Props = {
page?: number; page?: number;
pageSize?: number; pageSize?: number;
statusFilters?: Array<Status>; statusFilters?: Array<Status>;
triggerTypeFilters?: Array<TriggerType>;
search?: string; search?: string;
} & UseQueryOptions; } & UseQueryOptions;
@ -21,20 +20,14 @@ function useRunsQuery({
page = 1, page = 1,
pageSize = 10, pageSize = 10,
statusFilters, statusFilters,
triggerTypeFilters,
search, search,
...queryOptions
}: Props) { }: Props) {
const credentialGetter = useCredentialGetter(); const credentialGetter = useCredentialGetter();
return useQuery<Array<Task | WorkflowRunApiResponse>>({ return useQuery<Array<TaskRunListItem>>({
queryKey: [ queryKey: ["runs", { statusFilters }, page, pageSize, search],
"runs",
{ statusFilters, triggerTypeFilters },
page,
pageSize,
search,
],
queryFn: async () => { queryFn: async () => {
const client = await getClient(credentialGetter); const client = await getClient(credentialGetter, "sans-api-v1");
const params = new URLSearchParams(); const params = new URLSearchParams();
params.append("page", String(page)); params.append("page", String(page));
params.append("page_size", String(pageSize)); params.append("page_size", String(pageSize));
@ -43,16 +36,12 @@ function useRunsQuery({
params.append("status", status); params.append("status", status);
}); });
} }
if (triggerTypeFilters) {
triggerTypeFilters.forEach((triggerType) => {
params.append("trigger_type", triggerType);
});
}
if (search) { if (search) {
params.append("search_key", search); params.append("search_key", search);
} }
return client.get("/runs", { params }).then((res) => res.data); return client.get("/runs", { params }).then((res) => res.data);
}, },
...queryOptions,
}); });
} }

View file

@ -3,15 +3,12 @@ import { LightningBoltIcon, MixerHorizontalIcon } from "@radix-ui/react-icons";
import { Tip } from "@/components/Tip"; import { Tip } from "@/components/Tip";
import { import {
Status, Status,
Task, TaskRunListItem,
TriggerType, TaskRunType,
WorkflowRunApiResponse,
WorkflowRunStatusApiResponse, WorkflowRunStatusApiResponse,
} from "@/api/types"; } from "@/api/types";
import { StatusBadge } from "@/components/StatusBadge"; import { StatusBadge } from "@/components/StatusBadge";
import { StatusFilterDropdown } from "@/components/StatusFilterDropdown"; import { StatusFilterDropdown } from "@/components/StatusFilterDropdown";
import { TriggerTypeBadge } from "@/components/TriggerTypeBadge";
import { TriggerTypeFilterDropdown } from "@/components/TriggerTypeFilterDropdown";
import { import {
Pagination, Pagination,
PaginationContent, PaginationContent,
@ -41,7 +38,6 @@ import React, { useEffect, useState } from "react";
import { useNavigate, useSearchParams } from "react-router-dom"; import { useNavigate, useSearchParams } from "react-router-dom";
import { getClient } from "@/api/AxiosClient"; import { getClient } from "@/api/AxiosClient";
import { useCredentialGetter } from "@/hooks/useCredentialGetter"; import { useCredentialGetter } from "@/hooks/useCredentialGetter";
import * as env from "@/util/env";
import { useDebounce } from "use-debounce"; import { useDebounce } from "use-debounce";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { import {
@ -57,21 +53,33 @@ import { useParameterExpansion } from "@/routes/workflows/hooks/useParameterExpa
import { ParameterDisplayInline } from "@/routes/workflows/components/ParameterDisplayInline"; import { ParameterDisplayInline } from "@/routes/workflows/components/ParameterDisplayInline";
import { HighlightText } from "@/routes/workflows/components/HighlightText"; import { HighlightText } from "@/routes/workflows/components/HighlightText";
function isTask(run: Task | WorkflowRunApiResponse): run is Task { const statusValues = new Set<string>(Object.values(Status));
return "task_id" in run; function isKnownStatus(value: string): value is Status {
return statusValues.has(value);
}
function getRunNavigationPath(run: TaskRunListItem): string {
switch (run.task_run_type) {
case TaskRunType.WorkflowRun:
case TaskRunType.TaskV2:
return `/runs/${run.run_id}`;
case TaskRunType.TaskV1:
case TaskRunType.OpenaiCua:
case TaskRunType.AnthropicCua:
case TaskRunType.UiTars:
return `/tasks/${run.run_id}/actions`;
default:
return `/runs/${run.run_id}`;
}
} }
function RunHistory() { function RunHistory() {
const credentialGetter = useCredentialGetter();
const [searchParams, setSearchParams] = useSearchParams(); const [searchParams, setSearchParams] = useSearchParams();
const page = searchParams.get("page") ? Number(searchParams.get("page")) : 1; const page = searchParams.get("page") ? Number(searchParams.get("page")) : 1;
const itemsPerPage = searchParams.get("page_size") const itemsPerPage = searchParams.get("page_size")
? Number(searchParams.get("page_size")) ? Number(searchParams.get("page_size"))
: 10; : 10;
const [statusFilters, setStatusFilters] = useState<Array<Status>>([]); const [statusFilters, setStatusFilters] = useState<Array<Status>>([]);
const [triggerTypeFilters, setTriggerTypeFilters] = useState<
Array<TriggerType>
>([]);
const [search, setSearch] = useState(""); const [search, setSearch] = useState("");
const [debouncedSearch] = useDebounce(search, 500); const [debouncedSearch] = useDebounce(search, 500);
@ -79,43 +87,17 @@ function RunHistory() {
page, page,
pageSize: itemsPerPage, pageSize: itemsPerPage,
statusFilters, statusFilters,
triggerTypeFilters,
search: debouncedSearch, search: debouncedSearch,
}); });
const navigate = useNavigate(); const navigate = useNavigate();
const { data: nextPageRuns } = useQuery<Array<Task | WorkflowRunApiResponse>>( const { data: nextPageRuns } = useRunsQuery({
{ page: page + 1,
queryKey: [ pageSize: itemsPerPage,
"runs", statusFilters,
{ statusFilters, triggerTypeFilters }, search: debouncedSearch,
page + 1, enabled: runs?.length === itemsPerPage,
itemsPerPage, });
debouncedSearch,
],
queryFn: async () => {
const client = await getClient(credentialGetter);
const params = new URLSearchParams();
params.append("page", String(page + 1));
params.append("page_size", String(itemsPerPage));
if (statusFilters) {
statusFilters.forEach((status) => {
params.append("status", status);
});
}
if (triggerTypeFilters) {
triggerTypeFilters.forEach((triggerType) => {
params.append("trigger_type", triggerType);
});
}
if (debouncedSearch) {
params.append("search_key", debouncedSearch);
}
return client.get("/runs", { params }).then((res) => res.data);
},
enabled: runs && runs.length === itemsPerPage,
},
);
const isNextDisabled = const isNextDisabled =
isFetching || !nextPageRuns || nextPageRuns.length === 0; isFetching || !nextPageRuns || nextPageRuns.length === 0;
@ -136,8 +118,8 @@ function RunHistory() {
const workflowRunIds = const workflowRunIds =
runs runs
?.filter((run): run is WorkflowRunApiResponse => !isTask(run)) ?.filter((run) => run.task_run_type === TaskRunType.WorkflowRun)
.map((run) => run.workflow_run_id) .map((run) => run.run_id)
.filter((id): id is string => Boolean(id)) ?? []; .filter((id): id is string => Boolean(id)) ?? [];
setAutoExpandedRows(workflowRunIds); setAutoExpandedRows(workflowRunIds);
@ -170,12 +152,137 @@ function RunHistory() {
if (isNextDisabled) return; if (isNextDisabled) return;
setParamPatch({ page: String(page + 1) }); setParamPatch({ page: String(page + 1) });
} }
const displayTableBody = () => {
// Show loading skeleton
if (isFetching) {
return Array.from({ length: 10 }).map((_, index) => (
<TableRow key={`row-${index}`}>
<TableCell colSpan={6}>
<Skeleton className="h-4 w-full" />
</TableCell>
</TableRow>
));
}
// No runs found
if (runs?.length === 0) {
return (
<TableRow>
<TableCell colSpan={6}>
<div className="text-center">No runs found</div>
</TableCell>
</TableRow>
);
}
return runs?.map((run) => {
const executionTime = formatExecutionTime(
run.started_at ?? run.created_at,
run.finished_at,
);
const isWorkflowRun = run.task_run_type === TaskRunType.WorkflowRun;
const isExpanded = isWorkflowRun && expandedRows.has(run.run_id);
const navPath = getRunNavigationPath(run);
const titleContent = run.script_run ? (
<div className="flex items-center gap-2">
<Tip content="Ran with code">
<LightningBoltIcon className="text-[gold]" />
</Tip>
<span>{run.title ?? ""}</span>
</div>
) : (
run.title ?? ""
);
return (
<React.Fragment key={run.task_run_id}>
<TableRow
className="cursor-pointer"
onClick={(event) => {
handleNavigate(event, navPath);
}}
>
<TableCell className="max-w-0 truncate" title={run.run_id}>
<HighlightText text={run.run_id} query={debouncedSearch} />
</TableCell>
<TableCell
className="max-w-0 truncate"
title={run.title ?? undefined}
>
{titleContent}
</TableCell>
<TableCell>
{isKnownStatus(run.status) ? (
<StatusBadge status={run.status} />
) : (
<span className="text-sm text-slate-400">{run.status}</span>
)}
</TableCell>
<TableCell
className="max-w-0 truncate"
title={basicTimeFormat(run.created_at)}
>
{basicLocalTimeFormat(run.created_at)}
</TableCell>
<TableCell className="text-slate-400">
{executionTime ?? "-"}
</TableCell>
<TableCell>
{isWorkflowRun ? (
<div className="flex justify-end">
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<Button
size="icon"
variant="outline"
onClick={(event) => {
event.stopPropagation();
toggleParametersExpanded(run.run_id);
}}
className={cn(isExpanded && "text-blue-400")}
>
<MixerHorizontalIcon className="h-4 w-4" />
</Button>
</TooltipTrigger>
<TooltipContent>
{isExpanded ? "Hide Parameters" : "Show Parameters"}
</TooltipContent>
</Tooltip>
</TooltipProvider>
</div>
) : null}
</TableCell>
</TableRow>
{isExpanded && run.workflow_permanent_id && (
<TableRow key={`${run.run_id}-params`}>
<TableCell
colSpan={6}
className="bg-slate-50 dark:bg-slate-900/50"
>
<WorkflowRunParametersInline
workflowPermanentId={run.workflow_permanent_id}
workflowRunId={run.run_id}
searchQuery={debouncedSearch}
keywordMatchesParameter={matchesParameter}
/>
</TableCell>
</TableRow>
)}
</React.Fragment>
);
});
};
return ( return (
<div className="space-y-4"> <div className="space-y-4">
<header> <header>
<h1 className="text-2xl">Run History</h1> <h1 className="text-2xl">Run History</h1>
</header> </header>
<div className="flex items-center gap-4"> <div className="flex items-center justify-between gap-4">
<TableSearchInput <TableSearchInput
value={search} value={search}
onChange={(value) => { onChange={(value) => {
@ -187,19 +294,10 @@ function RunHistory() {
placeholder="Search by run ID or parameter..." placeholder="Search by run ID or parameter..."
className="w-48 lg:w-72" className="w-48 lg:w-72"
/> />
<TriggerTypeFilterDropdown
values={triggerTypeFilters}
onChange={(values) => {
setTriggerTypeFilters(values);
const params = new URLSearchParams(searchParams);
params.set("page", "1");
setSearchParams(params, { replace: true });
}}
/>
<StatusFilterDropdown <StatusFilterDropdown
values={statusFilters} values={statusFilters}
onChange={(values) => { onChange={(filters) => {
setStatusFilters(values); setStatusFilters(filters);
const params = new URLSearchParams(searchParams); const params = new URLSearchParams(searchParams);
params.set("page", "1"); params.set("page", "1");
setSearchParams(params, { replace: true }); setSearchParams(params, { replace: true });
@ -222,184 +320,7 @@ function RunHistory() {
<TableHead className="w-[8%] rounded-tr-lg text-slate-400"></TableHead> <TableHead className="w-[8%] rounded-tr-lg text-slate-400"></TableHead>
</TableRow> </TableRow>
</TableHeader> </TableHeader>
<TableBody> <TableBody>{displayTableBody()}</TableBody>
{isFetching ? (
Array.from({ length: 10 }).map((_, index) => (
<TableRow key={index}>
<TableCell colSpan={6}>
<Skeleton className="h-4 w-full" />
</TableCell>
</TableRow>
))
) : runs?.length === 0 ? (
<TableRow>
<TableCell colSpan={6}>
<div className="text-center">No runs found</div>
</TableCell>
</TableRow>
) : (
runs?.map((run) => {
if (isTask(run)) {
const taskExecutionTime = formatExecutionTime(
run.started_at ?? run.created_at,
run.finished_at,
);
return (
<TableRow
key={run.task_id}
className="cursor-pointer"
onClick={(event) => {
handleNavigate(event, `/tasks/${run.task_id}/actions`);
}}
>
<TableCell className="max-w-0 truncate">
{run.task_id}
</TableCell>
<TableCell className="max-w-0 truncate">
{run.url}
</TableCell>
<TableCell>
<StatusBadge status={run.status} />
</TableCell>
<TableCell
title={basicTimeFormat(run.created_at)}
className="max-w-0 truncate"
>
{basicLocalTimeFormat(run.created_at)}
</TableCell>
<TableCell className="truncate text-slate-400">
{taskExecutionTime ?? "-"}
</TableCell>
{/* Align with workflow row's expand button column. */}
<TableCell />
</TableRow>
);
}
const workflowTitle = (
<div className="flex items-center gap-2">
<span className="truncate">{run.workflow_title ?? ""}</span>
{run.script_run === true && (
<Tip content="Ran with code">
<LightningBoltIcon className="text-[gold]" />
</Tip>
)}
<TriggerTypeBadge triggerType={run.trigger_type} />
</div>
);
const isExpanded = expandedRows.has(run.workflow_run_id);
const workflowExecutionTime = formatExecutionTime(
run.started_at ?? run.created_at,
run.finished_at,
);
return (
<React.Fragment key={run.workflow_run_id}>
<TableRow
className="cursor-pointer"
onClick={(event) => {
handleNavigate(
event,
env.useNewRunsUrl
? `/runs/${run.workflow_run_id}`
: `/workflows/${run.workflow_permanent_id}/${run.workflow_run_id}/overview`,
);
}}
>
<TableCell
className="max-w-0 truncate"
title={run.workflow_run_id}
>
<HighlightText
text={run.workflow_run_id}
query={debouncedSearch}
/>
</TableCell>
<TableCell
className="max-w-0 truncate"
title={run.workflow_title ?? undefined}
>
{workflowTitle}
</TableCell>
<TableCell>
<StatusBadge status={run.status} />
</TableCell>
<TableCell
className="max-w-0 truncate"
title={
run.trigger_type === TriggerType.Scheduled &&
run.scheduled_for
? `Scheduled: ${basicTimeFormat(run.scheduled_for)}\nStarted: ${basicTimeFormat(run.created_at)}`
: basicTimeFormat(run.created_at)
}
>
<div className="flex flex-col">
<span>{basicLocalTimeFormat(run.created_at)}</span>
{run.trigger_type === TriggerType.Scheduled &&
run.schedule_name && (
<span className="text-xs text-slate-400">
{run.schedule_name}
{run.schedule_cron
? ` (${run.schedule_cron})`
: ""}
</span>
)}
</div>
</TableCell>
<TableCell className="truncate text-slate-400">
{workflowExecutionTime ?? "-"}
</TableCell>
<TableCell>
<div className="flex justify-end">
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<Button
size="icon"
variant="outline"
onClick={(event) => {
event.stopPropagation();
toggleParametersExpanded(
run.workflow_run_id,
);
}}
className={cn(isExpanded && "text-blue-400")}
>
<MixerHorizontalIcon className="h-4 w-4" />
</Button>
</TooltipTrigger>
<TooltipContent>
{isExpanded
? "Hide Parameters"
: "Show Parameters"}
</TooltipContent>
</Tooltip>
</TooltipProvider>
</div>
</TableCell>
</TableRow>
{isExpanded && (
<TableRow key={`${run.workflow_run_id}-params`}>
<TableCell
colSpan={6}
className="bg-slate-50 dark:bg-slate-900/50"
>
<WorkflowRunParametersInline
workflowPermanentId={run.workflow_permanent_id}
workflowRunId={run.workflow_run_id}
searchQuery={debouncedSearch}
keywordMatchesParameter={matchesParameter}
/>
</TableCell>
</TableRow>
)}
</React.Fragment>
);
})
)}
</TableBody>
</Table> </Table>
<div className="relative px-3 py-3"> <div className="relative px-3 py-3">
<div className="absolute left-3 top-1/2 flex -translate-y-1/2 items-center gap-2 text-sm"> <div className="absolute left-3 top-1/2 flex -translate-y-1/2 items-center gap-2 text-sm">
@ -466,7 +387,7 @@ function WorkflowRunParametersInline({
workflowRunId, workflowRunId,
searchQuery, searchQuery,
keywordMatchesParameter, keywordMatchesParameter,
}: WorkflowRunParametersInlineProps) { }: Readonly<WorkflowRunParametersInlineProps>) {
const { data: globalWorkflows } = useGlobalWorkflowsQuery(); const { data: globalWorkflows } = useGlobalWorkflowsQuery();
const credentialGetter = useCredentialGetter(); const credentialGetter = useCredentialGetter();

View file

@ -1,13 +1,43 @@
import logging import logging
import platform import platform
from pathlib import Path
from typing import Any from typing import Any
from pydantic import Field
from pydantic_settings import BaseSettings, SettingsConfigDict from pydantic_settings import BaseSettings, SettingsConfigDict
from skyvern import constants from skyvern import constants
from skyvern.constants import REPO_ROOT_DIR, SKYVERN_DIR from skyvern.constants import REPO_ROOT_DIR, SKYVERN_DIR
from skyvern.utils.env_paths import resolve_backend_env_path from skyvern.utils.env_paths import resolve_backend_env_path
def _default_database_string() -> str:
"""Return the default DATABASE_STRING.
Uses a SQLite file at ~/.skyvern/data.db so that ``skyvern run server``
works out of the box without Docker or Postgres. Users who set
DATABASE_STRING in .env or the environment get Postgres automatically
(pydantic-settings reads env before the default_factory runs).
This is a pure string computation no filesystem side effects.
The parent directory is created by _ensure_sqlite_dir() at engine
build time (agent_db.py) or server bootstrap time (api_app.py).
"""
db_path = Path.home() / ".skyvern" / "data.db"
return f"sqlite+aiosqlite:///{db_path}"
def _ensure_sqlite_dir(database_string: str) -> None:
"""Create the parent directory for a file-backed SQLite database URL.
No-op for in-memory SQLite (`:memory:`) or non-SQLite URLs.
"""
if not database_string.startswith("sqlite") or ":memory:" in database_string:
return
db_file = database_string.split("///", 1)[-1]
Path(db_file).expanduser().resolve().parent.mkdir(parents=True, exist_ok=True)
# NOTE: _DEFAULT_ENV_FILES resolves .env paths at import time and assumes # NOTE: _DEFAULT_ENV_FILES resolves .env paths at import time and assumes
# the process has changed dir to the desired project root by this time. # the process has changed dir to the desired project root by this time.
# Even if we were to resolve paths at instantiation time, the global `settings` # Even if we were to resolve paths at instantiation time, the global `settings`
@ -64,11 +94,7 @@ class Settings(BaseSettings):
LONG_RUNNING_TASK_WARNING_RATIO: float = 0.95 LONG_RUNNING_TASK_WARNING_RATIO: float = 0.95
MAX_RETRIES_PER_STEP: int = 5 MAX_RETRIES_PER_STEP: int = 5
DEBUG_MODE: bool = False DEBUG_MODE: bool = False
DATABASE_STRING: str = ( DATABASE_STRING: str = Field(default_factory=_default_database_string)
"postgresql+asyncpg://skyvern@localhost/skyvern"
if platform.system() == "Windows"
else "postgresql+psycopg://skyvern@localhost/skyvern"
)
DATABASE_REPLICA_STRING: str | None = None DATABASE_REPLICA_STRING: str | None = None
DATABASE_STATEMENT_TIMEOUT_MS: int = 60000 DATABASE_STATEMENT_TIMEOUT_MS: int = 60000
DISABLE_CONNECTION_POOL: bool = False DISABLE_CONNECTION_POOL: bool = False
@ -630,6 +656,9 @@ class Settings(BaseSettings):
) )
object.__setattr__(self, "DATABASE_STRING", updated_string) object.__setattr__(self, "DATABASE_STRING", updated_string)
def is_sqlite(self) -> bool:
return self.DATABASE_STRING.startswith("sqlite")
def is_cloud_environment(self) -> bool: def is_cloud_environment(self) -> bool:
""" """
:return: True if env is not local, else False :return: True if env is not local, else False

View file

@ -895,8 +895,14 @@ class FailedToGetTOTPVerificationCode(SkyvernException):
class SkyvernContextWindowExceededError(SkyvernException): class SkyvernContextWindowExceededError(SkyvernException):
def __init__(self) -> None: def __init__(self, model: str | None = None, prompt_name: str | None = None) -> None:
message = "Context window exceeded. Please contact support@skyvern.com for help." details = []
if model:
details.append(f"model: {model}")
if prompt_name:
details.append(f"prompt: {prompt_name}")
detail_str = f" ({', '.join(details)})" if details else ""
message = f"LLM context window exceeded{detail_str}. The page may have too much content for the AI model to process. Please try again or contact support@skyvern.com for help."
super().__init__(message) super().__init__(message)

View file

@ -13,7 +13,7 @@ from starlette.requests import HTTPConnection, Request
from starlette_context.middleware import RawContextMiddleware from starlette_context.middleware import RawContextMiddleware
from starlette_context.plugins.base import Plugin from starlette_context.plugins.base import Plugin
from skyvern.config import settings from skyvern.config import _ensure_sqlite_dir, settings
from skyvern.exceptions import SkyvernHTTPException from skyvern.exceptions import SkyvernHTTPException
from skyvern.forge import app as forge_app from skyvern.forge import app as forge_app
from skyvern.forge.forge_app_initializer import start_forge_app from skyvern.forge.forge_app_initializer import start_forge_app
@ -21,8 +21,15 @@ from skyvern.forge.request_logging import log_raw_request_middleware
from skyvern.forge.sdk.core import skyvern_context from skyvern.forge.sdk.core import skyvern_context
from skyvern.forge.sdk.core.skyvern_context import SkyvernContext from skyvern.forge.sdk.core.skyvern_context import SkyvernContext
from skyvern.forge.sdk.db.exceptions import NotFoundError from skyvern.forge.sdk.db.exceptions import NotFoundError
from skyvern.forge.sdk.db.models import Base
from skyvern.forge.sdk.routes import internal_auth from skyvern.forge.sdk.routes import internal_auth
from skyvern.forge.sdk.routes.routers import base_router, legacy_base_router, legacy_v2_router from skyvern.forge.sdk.routes.routers import base_router, legacy_base_router, legacy_v2_router
from skyvern.forge.sdk.services.local_org_auth_token_service import (
ensure_local_api_key,
ensure_local_org,
fingerprint_token,
regenerate_local_api_key,
)
from skyvern.services.cleanup_service import start_cleanup_scheduler, stop_cleanup_scheduler from skyvern.services.cleanup_service import start_cleanup_scheduler, stop_cleanup_scheduler
LOG = structlog.get_logger() LOG = structlog.get_logger()
@ -74,11 +81,65 @@ def custom_openapi(app: FastAPI) -> dict:
return app.openapi_schema return app.openapi_schema
async def _bootstrap_sqlite() -> None:
"""Auto-bootstrap SQLite on first server start.
Creates tables, a local org, and an API key so that
``skyvern run server`` works out of the box with zero configuration.
Idempotent: skips if the org already exists.
"""
_ensure_sqlite_dir(settings.DATABASE_STRING)
db = forge_app.DATABASE
async with db.engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
# Preserve an existing API key if it's a real value (not the skeleton default).
# settings.SKYVERN_API_KEY already incorporates env vars and .env via pydantic-settings.
existing_key = settings.SKYVERN_API_KEY if settings.SKYVERN_API_KEY != "PLACEHOLDER" else None
if existing_key:
preserved = await ensure_local_api_key(existing_key)
if preserved is not None:
api_key, org_id = preserved
LOG.info(
"Existing SKYVERN_API_KEY detected — preserving env value and syncing it into the local SQLite DB.",
organization_id=org_id,
api_key_fingerprint=fingerprint_token(api_key),
)
return
LOG.warning(
"Existing SKYVERN_API_KEY could not be preserved for local SQLite bootstrap; generating a new local key.",
)
organization = await ensure_local_org()
existing_token = await db.get_valid_org_auth_token(organization.organization_id, "api")
if existing_token is not None:
LOG.info("SQLite database already bootstrapped", organization_id=organization.organization_id)
return
api_key, org_id, backend_env, frontend_env = await regenerate_local_api_key()
LOG.info(
"SQLite bootstrap complete — local org and API key created",
organization_id=org_id,
api_key_fingerprint=fingerprint_token(api_key),
env_file_written=backend_env,
)
@asynccontextmanager @asynccontextmanager
async def lifespan(fastapi_app: FastAPI) -> AsyncGenerator[None, Any]: async def lifespan(fastapi_app: FastAPI) -> AsyncGenerator[None, Any]:
"""Lifespan context manager for FastAPI app startup and shutdown.""" """Lifespan context manager for FastAPI app startup and shutdown."""
LOG.info("Server started") LOG.info("Server started")
# Auto-bootstrap SQLite database on first server start.
# Re-raise on failure — a server with no tables/org/API key is
# useless and would produce confusing 401s on every request.
if settings.is_sqlite():
await _bootstrap_sqlite()
if forge_app.api_app_startup_event: if forge_app.api_app_startup_event:
LOG.info("Calling api app startup event") LOG.info("Calling api app startup event")
try: try:

View file

@ -766,7 +766,7 @@ class LLMAPIHandlerFactory:
prompt_name=prompt_name, prompt_name=prompt_name,
duration_seconds=duration_seconds, duration_seconds=duration_seconds,
) )
raise SkyvernContextWindowExceededError() from e raise SkyvernContextWindowExceededError(model=main_model_group, prompt_name=prompt_name) from e
except ValueError as e: except ValueError as e:
duration_seconds = time.time() - start_time duration_seconds = time.time() - start_time
LOG.exception( LOG.exception(
@ -1217,7 +1217,7 @@ class LLMAPIHandlerFactory:
prompt_name=prompt_name, prompt_name=prompt_name,
duration_seconds=duration_seconds, duration_seconds=duration_seconds,
) )
raise SkyvernContextWindowExceededError() from e raise SkyvernContextWindowExceededError(model=model_name, prompt_name=prompt_name) from e
except CancelledError: except CancelledError:
# Speculative steps are intentionally cancelled when goal verification completes first, # Speculative steps are intentionally cancelled when goal verification completes first,
# so we log at debug level. Non-speculative cancellations are unexpected errors. # so we log at debug level. Non-speculative cancellations are unexpected errors.
@ -1664,7 +1664,7 @@ class LLMCaller:
llm_key=self.llm_key, llm_key=self.llm_key,
model=self.llm_config.model_name, model=self.llm_config.model_name,
) )
raise SkyvernContextWindowExceededError() from e raise SkyvernContextWindowExceededError(model=self.llm_config.model_name) from e
except CancelledError: except CancelledError:
# Speculative steps are intentionally cancelled when goal verification returns completed, # Speculative steps are intentionally cancelled when goal verification returns completed,
# so we log at debug level. Non-speculative cancellations are unexpected errors. # so we log at debug level. Non-speculative cancellations are unexpected errors.

View file

@ -51,7 +51,8 @@ class LLMProviderError(BaseLLMError):
class LLMProviderErrorRetryableTask(LLMProviderError): class LLMProviderErrorRetryableTask(LLMProviderError):
def __init__(self, llm_key: str) -> None: def __init__(self, llm_key: str) -> None:
super().__init__(f"Retryable error while using LLMProvider {llm_key}") # Call BaseLLMError directly to avoid double-formatting the message through LLMProviderError.__init__
BaseLLMError.__init__(self, f"Retryable error while using LLMProvider {llm_key}")
class NoProviderEnabledError(BaseLLMError): class NoProviderEnabledError(BaseLLMError):

View file

@ -54,6 +54,9 @@ def _build_engine(database_string: str) -> AsyncEngine:
Always enables foreign key enforcement via PRAGMA. Always enables foreign key enforcement via PRAGMA.
""" """
if database_string.startswith("sqlite"): if database_string.startswith("sqlite"):
from skyvern.config import _ensure_sqlite_dir
_ensure_sqlite_dir(database_string)
is_memory = ":memory:" in database_string is_memory = ":memory:" in database_string
engine_kwargs: dict[str, Any] = { engine_kwargs: dict[str, Any] = {
"json_serializer": _custom_json_serializer, "json_serializer": _custom_json_serializer,
@ -327,6 +330,9 @@ class AgentDB(BaseAlchemyDB):
async def get_all_runs(self, *args: Any, **kwargs: Any) -> Any: async def get_all_runs(self, *args: Any, **kwargs: Any) -> Any:
return await self.workflow_runs.get_all_runs(*args, **kwargs) return await self.workflow_runs.get_all_runs(*args, **kwargs)
async def get_all_runs_v2(self, *args: Any, **kwargs: Any) -> Any:
return await self.workflow_runs.get_all_runs_v2(*args, **kwargs)
async def get_workflow_run(self, *args: Any, **kwargs: Any) -> Any: async def get_workflow_run(self, *args: Any, **kwargs: Any) -> Any:
return await self.workflow_runs.get_workflow_run(*args, **kwargs) return await self.workflow_runs.get_workflow_run(*args, **kwargs)
@ -444,22 +450,25 @@ class AgentDB(BaseAlchemyDB):
return await self.workflow_params.retrieve_action_plan(*args, **kwargs) return await self.workflow_params.retrieve_action_plan(*args, **kwargs)
async def create_task_run(self, *args: Any, **kwargs: Any) -> Any: async def create_task_run(self, *args: Any, **kwargs: Any) -> Any:
return await self.workflow_params.create_task_run(*args, **kwargs) return await self.tasks.create_task_run(*args, **kwargs)
async def update_task_run(self, *args: Any, **kwargs: Any) -> Any: async def update_task_run(self, *args: Any, **kwargs: Any) -> Any:
return await self.workflow_params.update_task_run(*args, **kwargs) return await self.tasks.update_task_run(*args, **kwargs)
async def sync_task_run_status(self, *args: Any, **kwargs: Any) -> Any:
return await self.tasks.sync_task_run_status(*args, **kwargs)
async def update_job_run_compute_cost(self, *args: Any, **kwargs: Any) -> Any: async def update_job_run_compute_cost(self, *args: Any, **kwargs: Any) -> Any:
return await self.workflow_params.update_job_run_compute_cost(*args, **kwargs) return await self.tasks.update_job_run_compute_cost(*args, **kwargs)
async def cache_task_run(self, *args: Any, **kwargs: Any) -> Any: async def cache_task_run(self, *args: Any, **kwargs: Any) -> Any:
return await self.workflow_params.cache_task_run(*args, **kwargs) return await self.tasks.cache_task_run(*args, **kwargs)
async def get_cached_task_run(self, *args: Any, **kwargs: Any) -> Any: async def get_cached_task_run(self, *args: Any, **kwargs: Any) -> Any:
return await self.workflow_params.get_cached_task_run(*args, **kwargs) return await self.tasks.get_cached_task_run(*args, **kwargs)
async def get_run(self, *args: Any, **kwargs: Any) -> Any: async def get_run(self, *args: Any, **kwargs: Any) -> Any:
return await self.workflow_params.get_run(*args, **kwargs) return await self.tasks.get_run(*args, **kwargs)
# -- Artifact delegates -- # -- Artifact delegates --

View file

@ -43,6 +43,22 @@ async def test_create_organization(agent_db: AgentDB) -> None:
assert retrieved_by_domain.domain == domain assert retrieved_by_domain.domain == domain
@pytest.mark.asyncio
async def test_create_organization_with_explicit_id(agent_db: AgentDB) -> None:
organization = await agent_db.create_organization(
organization_id="o_test_org",
organization_name="Explicit Id Organization",
domain="explicit.test",
)
assert organization.organization_id == "o_test_org"
retrieved_org = await agent_db.get_organization("o_test_org")
assert retrieved_org is not None
assert retrieved_org.organization_name == "Explicit Id Organization"
assert retrieved_org.domain == "explicit.test"
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_get_organization_not_found(agent_db: AgentDB) -> None: async def test_get_organization_not_found(agent_db: AgentDB) -> None:
retrieved_org = await agent_db.get_organization("non_existent_id") retrieved_org = await agent_db.get_organization("non_existent_id")

View file

@ -132,9 +132,11 @@ class OrganizationsMixin:
max_steps_per_run: int | None = None, max_steps_per_run: int | None = None,
max_retries_per_step: int | None = None, max_retries_per_step: int | None = None,
domain: str | None = None, domain: str | None = None,
organization_id: str | None = None,
) -> Organization: ) -> Organization:
async with self.Session() as session: async with self.Session() as session:
org = OrganizationModel( org = OrganizationModel(
organization_id=organization_id,
organization_name=organization_name, organization_name=organization_name,
webhook_callback_url=webhook_callback_url, webhook_callback_url=webhook_callback_url,
max_steps_per_run=max_steps_per_run, max_steps_per_run=max_steps_per_run,

View file

@ -14,13 +14,15 @@ from skyvern.forge.sdk.db.models import (
ActionModel, ActionModel,
StepModel, StepModel,
TaskModel, TaskModel,
TaskRunModel,
WorkflowRunModel, WorkflowRunModel,
) )
from skyvern.forge.sdk.db.utils import convert_to_step, convert_to_task, hydrate_action, serialize_proxy_location from skyvern.forge.sdk.db.utils import convert_to_step, convert_to_task, hydrate_action, serialize_proxy_location
from skyvern.forge.sdk.models import Step, StepStatus from skyvern.forge.sdk.models import Step, StepStatus
from skyvern.forge.sdk.schemas.runs import Run
from skyvern.forge.sdk.schemas.tasks import OrderBy, SortDirection, Task, TaskStatus from skyvern.forge.sdk.schemas.tasks import OrderBy, SortDirection, Task, TaskStatus
from skyvern.forge.sdk.utils.sanitization import sanitize_postgres_text from skyvern.forge.sdk.utils.sanitization import sanitize_postgres_text
from skyvern.schemas.runs import ProxyLocationInput from skyvern.schemas.runs import ProxyLocationInput, RunStatus, RunType
from skyvern.schemas.steps import AgentStepOutput from skyvern.schemas.steps import AgentStepOutput
from skyvern.webeye.actions.actions import Action from skyvern.webeye.actions.actions import Action
@ -737,3 +739,197 @@ class TasksMixin:
) )
await session.execute(stmt) await session.execute(stmt)
await session.commit() await session.commit()
async def sync_task_run_status(
self,
organization_id: str,
run_id: str,
status: str,
started_at: datetime | None = None,
finished_at: datetime | None = None,
) -> None:
"""Best-effort write-through: propagate status from source table to task_runs.
Does NOT raise if the task_runs row is missing (race at creation time).
"""
try:
async with self.Session() as session:
vals: dict[str, Any] = {"status": status}
if started_at is not None:
vals["started_at"] = started_at
if finished_at is not None:
vals["finished_at"] = finished_at
stmt = (
update(TaskRunModel)
.where(TaskRunModel.run_id == run_id)
.where(TaskRunModel.organization_id == organization_id)
.values(**vals)
)
await session.execute(stmt)
await session.commit()
except Exception:
LOG.warning(
"Best-effort task_run status sync failed",
run_id=run_id,
organization_id=organization_id,
status=status,
exc_info=True,
)
@db_operation("create_task_run")
async def create_task_run(
self,
task_run_type: RunType,
organization_id: str,
run_id: str,
title: str | None = None,
url: str | None = None,
url_hash: str | None = None,
status: RunStatus | None = None,
workflow_permanent_id: str | None = None,
parent_workflow_run_id: str | None = None,
debug_session_id: str | None = None,
# script_run, started_at, finished_at are intentionally omitted here —
# they are set via update_task_run() after the run starts/finishes (PRs 2-5).
) -> Run:
searchable_text = " ".join(filter(None, [title, url]))
async with self.Session() as session:
task_run = TaskRunModel(
task_run_type=task_run_type,
organization_id=organization_id,
run_id=run_id,
title=title,
url=url,
url_hash=url_hash,
status=status,
workflow_permanent_id=workflow_permanent_id,
parent_workflow_run_id=parent_workflow_run_id,
debug_session_id=debug_session_id,
searchable_text=searchable_text or None,
)
session.add(task_run)
await session.commit()
await session.refresh(task_run)
return Run.model_validate(task_run)
@db_operation("update_task_run")
async def update_task_run(
self,
organization_id: str,
run_id: str,
title: str | None = None,
url: str | None = None,
url_hash: str | None = None,
status: str | None = None,
started_at: datetime | None = None,
finished_at: datetime | None = None,
) -> None:
async with self.Session() as session:
task_run = (
await session.scalars(
select(TaskRunModel).filter_by(run_id=run_id).filter_by(organization_id=organization_id)
)
).first()
if not task_run:
raise NotFoundError(f"TaskRun {run_id} not found")
if title is not None:
task_run.title = title
if url is not None:
task_run.url = url
if url_hash is not None:
task_run.url_hash = url_hash
if status is not None:
task_run.status = status
if started_at is not None:
task_run.started_at = started_at
if finished_at is not None:
task_run.finished_at = finished_at
# Recompute searchable_text when title or url changes
if title is not None or url is not None:
task_run.searchable_text = " ".join(filter(None, [task_run.title, task_run.url])) or None
await session.commit()
@db_operation("update_job_run_compute_cost")
async def update_job_run_compute_cost(
self,
organization_id: str,
run_id: str,
instance_type: str | None = None,
vcpu_millicores: int | None = None,
memory_mb: int | None = None,
duration_ms: int | None = None,
compute_cost: float | None = None,
) -> None:
"""Update compute cost metrics for a job run."""
async with self.Session() as session:
task_run = (
await session.scalars(
select(TaskRunModel).filter_by(run_id=run_id).filter_by(organization_id=organization_id)
)
).first()
if not task_run:
LOG.warning(
"TaskRun not found for compute cost update",
run_id=run_id,
organization_id=organization_id,
)
return
if instance_type is not None:
task_run.instance_type = instance_type
if vcpu_millicores is not None:
task_run.vcpu_millicores = vcpu_millicores
if memory_mb is not None:
task_run.memory_mb = memory_mb
if duration_ms is not None:
task_run.duration_ms = duration_ms
if compute_cost is not None:
task_run.compute_cost = compute_cost
await session.commit()
@db_operation("cache_task_run")
async def cache_task_run(self, run_id: str, organization_id: str | None = None) -> Run:
async with self.Session() as session:
task_run = (
await session.scalars(
select(TaskRunModel).filter_by(organization_id=organization_id).filter_by(run_id=run_id)
)
).first()
if task_run:
task_run.cached = True
await session.commit()
await session.refresh(task_run)
return Run.model_validate(task_run)
raise NotFoundError(f"Run {run_id} not found")
@db_operation("get_cached_task_run")
async def get_cached_task_run(
self, task_run_type: RunType, url_hash: str | None = None, organization_id: str | None = None
) -> Run | None:
async with self.Session() as session:
query = select(TaskRunModel)
if task_run_type:
query = query.filter_by(task_run_type=task_run_type)
if url_hash:
query = query.filter_by(url_hash=url_hash)
if organization_id:
query = query.filter_by(organization_id=organization_id)
query = query.filter_by(cached=True).order_by(TaskRunModel.created_at.desc())
task_run = (await session.scalars(query)).first()
return Run.model_validate(task_run) if task_run else None
@db_operation("get_run")
async def get_run(
self,
run_id: str,
organization_id: str | None = None,
) -> Run | None:
async with self.Session() as session:
query = select(TaskRunModel).filter_by(run_id=run_id)
if organization_id:
query = query.filter_by(organization_id=organization_id)
task_run = (await session.scalars(query)).first()
return Run.model_validate(task_run) if task_run else None

View file

@ -24,7 +24,6 @@ from skyvern.forge.sdk.db.models import (
OutputParameterModel, OutputParameterModel,
TaskGenerationModel, TaskGenerationModel,
TaskModel, TaskModel,
TaskRunModel,
WorkflowCopilotChatMessageModel, WorkflowCopilotChatMessageModel,
WorkflowCopilotChatModel, WorkflowCopilotChatModel,
WorkflowParameterModel, WorkflowParameterModel,
@ -37,7 +36,6 @@ from skyvern.forge.sdk.db.utils import (
hydrate_action, hydrate_action,
) )
from skyvern.forge.sdk.schemas.ai_suggestions import AISuggestion from skyvern.forge.sdk.schemas.ai_suggestions import AISuggestion
from skyvern.forge.sdk.schemas.runs import Run
from skyvern.forge.sdk.schemas.task_generations import TaskGeneration from skyvern.forge.sdk.schemas.task_generations import TaskGeneration
from skyvern.forge.sdk.schemas.tasks import Task, TaskStatus from skyvern.forge.sdk.schemas.tasks import Task, TaskStatus
from skyvern.forge.sdk.schemas.workflow_copilot import ( from skyvern.forge.sdk.schemas.workflow_copilot import (
@ -59,7 +57,6 @@ from skyvern.forge.sdk.workflow.models.parameter import (
WorkflowParameter, WorkflowParameter,
WorkflowParameterType, WorkflowParameterType,
) )
from skyvern.schemas.runs import RunType
from skyvern.webeye.actions.actions import Action from skyvern.webeye.actions.actions import Action
if TYPE_CHECKING: if TYPE_CHECKING:
@ -567,135 +564,3 @@ class WorkflowParametersMixin:
actions = (await session.scalars(query)).all() actions = (await session.scalars(query)).all()
return [Action.model_validate(action) for action in actions] return [Action.model_validate(action) for action in actions]
@db_operation("create_task_run")
async def create_task_run(
self,
task_run_type: RunType,
organization_id: str,
run_id: str,
title: str | None = None,
url: str | None = None,
url_hash: str | None = None,
) -> Run:
async with self.Session() as session:
task_run = TaskRunModel(
task_run_type=task_run_type,
organization_id=organization_id,
run_id=run_id,
title=title,
url=url,
url_hash=url_hash,
)
session.add(task_run)
await session.commit()
await session.refresh(task_run)
return Run.model_validate(task_run)
@db_operation("update_task_run")
async def update_task_run(
self,
organization_id: str,
run_id: str,
title: str | None = None,
url: str | None = None,
url_hash: str | None = None,
) -> None:
async with self.Session() as session:
task_run = (
await session.scalars(
select(TaskRunModel).filter_by(run_id=run_id).filter_by(organization_id=organization_id)
)
).first()
if not task_run:
raise NotFoundError(f"TaskRun {run_id} not found")
if title:
task_run.title = title
if url:
task_run.url = url
if url_hash:
task_run.url_hash = url_hash
await session.commit()
@db_operation("update_job_run_compute_cost")
async def update_job_run_compute_cost(
self,
organization_id: str,
run_id: str,
instance_type: str | None = None,
vcpu_millicores: int | None = None,
memory_mb: int | None = None,
duration_ms: int | None = None,
compute_cost: float | None = None,
) -> None:
"""Update compute cost metrics for a job run."""
async with self.Session() as session:
task_run = (
await session.scalars(
select(TaskRunModel).filter_by(run_id=run_id).filter_by(organization_id=organization_id)
)
).first()
if not task_run:
LOG.warning(
"TaskRun not found for compute cost update",
run_id=run_id,
organization_id=organization_id,
)
return
if instance_type is not None:
task_run.instance_type = instance_type
if vcpu_millicores is not None:
task_run.vcpu_millicores = vcpu_millicores
if memory_mb is not None:
task_run.memory_mb = memory_mb
if duration_ms is not None:
task_run.duration_ms = duration_ms
if compute_cost is not None:
task_run.compute_cost = compute_cost
await session.commit()
@db_operation("cache_task_run")
async def cache_task_run(self, run_id: str, organization_id: str | None = None) -> Run:
async with self.Session() as session:
task_run = (
await session.scalars(
select(TaskRunModel).filter_by(organization_id=organization_id).filter_by(run_id=run_id)
)
).first()
if task_run:
task_run.cached = True
await session.commit()
await session.refresh(task_run)
return Run.model_validate(task_run)
raise NotFoundError(f"Run {run_id} not found")
@db_operation("get_cached_task_run")
async def get_cached_task_run(
self, task_run_type: RunType, url_hash: str | None = None, organization_id: str | None = None
) -> Run | None:
async with self.Session() as session:
query = select(TaskRunModel)
if task_run_type:
query = query.filter_by(task_run_type=task_run_type)
if url_hash:
query = query.filter_by(url_hash=url_hash)
if organization_id:
query = query.filter_by(organization_id=organization_id)
query = query.filter_by(cached=True).order_by(TaskRunModel.created_at.desc())
task_run = (await session.scalars(query)).first()
return Run.model_validate(task_run) if task_run else None
@db_operation("get_run")
async def get_run(
self,
run_id: str,
organization_id: str | None = None,
) -> Run | None:
async with self.Session() as session:
query = select(TaskRunModel).filter_by(run_id=run_id)
if organization_id:
query = query.filter_by(organization_id=organization_id)
task_run = (await session.scalars(query)).first()
return Run.model_validate(task_run) if task_run else None

View file

@ -13,6 +13,7 @@ from skyvern.forge.sdk.db.exceptions import NotFoundError
from skyvern.forge.sdk.db.mixins.base import read_retry from skyvern.forge.sdk.db.mixins.base import read_retry
from skyvern.forge.sdk.db.models import ( from skyvern.forge.sdk.db.models import (
TaskModel, TaskModel,
TaskRunModel,
WorkflowModel, WorkflowModel,
WorkflowParameterModel, WorkflowParameterModel,
WorkflowRunBlockModel, WorkflowRunBlockModel,
@ -383,6 +384,36 @@ class WorkflowRunsMixin:
return runs[lower:upper] return runs[lower:upper]
@read_retry()
async def get_all_runs_v2(
self,
organization_id: str,
page: int = 1,
page_size: int = 10,
status: list[str] | None = None,
search_key: str | None = None,
) -> list[TaskRunModel]:
async with self.Session() as session:
query = (
select(TaskRunModel)
.filter(TaskRunModel.organization_id == organization_id)
.filter(TaskRunModel.status.isnot(None))
.filter(TaskRunModel.parent_workflow_run_id.is_(None))
.filter(TaskRunModel.debug_session_id.is_(None))
)
if status:
query = query.filter(TaskRunModel.status.in_(status))
if search_key:
query = query.filter(TaskRunModel.searchable_text.icontains(search_key, autoescape=True))
offset = (page - 1) * page_size
query = query.order_by(TaskRunModel.created_at.desc()).offset(offset).limit(page_size)
result = await session.scalars(query)
return list(result.all())
@read_retry() @read_retry()
@db_operation("get_workflow_run", log_errors=False) @db_operation("get_workflow_run", log_errors=False)
async def get_workflow_run( async def get_workflow_run(

View file

@ -12,6 +12,7 @@ from sqlalchemy import (
Integer, Integer,
Numeric, Numeric,
String, String,
Text,
UnicodeText, UnicodeText,
UniqueConstraint, UniqueConstraint,
desc, desc,
@ -65,6 +66,7 @@ from skyvern.forge.sdk.db.id import (
generate_workflow_script_id, generate_workflow_script_id,
generate_workflow_template_id, generate_workflow_template_id,
) )
from skyvern.forge.sdk.schemas.runs import TERMINAL_STATUSES
from skyvern.forge.sdk.schemas.task_v2 import ThoughtType from skyvern.forge.sdk.schemas.task_v2 import ThoughtType
@ -966,6 +968,35 @@ class TaskRunModel(Base):
Index("task_run_org_url_index", "organization_id", "url_hash", "cached"), Index("task_run_org_url_index", "organization_id", "url_hash", "cached"),
Index("task_run_org_run_id_index", "organization_id", "run_id"), Index("task_run_org_run_id_index", "organization_id", "run_id"),
Index("ix_task_runs_org_created_at", "organization_id", "created_at"), Index("ix_task_runs_org_created_at", "organization_id", "created_at"),
Index(
"ix_task_runs_org_toplevel_created",
"organization_id",
desc("created_at"),
postgresql_using="btree",
postgresql_where=text("parent_workflow_run_id IS NULL AND debug_session_id IS NULL AND status IS NOT NULL"),
),
Index(
"ix_task_runs_org_status_created",
"organization_id",
"status",
desc("created_at"),
postgresql_using="btree",
),
Index(
"ix_task_runs_searchable_text_gin",
"searchable_text",
postgresql_using="gin",
postgresql_ops={"searchable_text": "gin_trgm_ops"},
),
Index(
"ix_task_runs_nonterminal",
"run_id",
"task_run_type",
postgresql_where=sqlalchemy.or_(
sqlalchemy.column("status").is_(None),
~sqlalchemy.column("status").in_(TERMINAL_STATUSES),
),
),
) )
task_run_id = Column(String, primary_key=True, default=generate_task_run_id) task_run_id = Column(String, primary_key=True, default=generate_task_run_id)
@ -976,6 +1007,17 @@ class TaskRunModel(Base):
url = Column(String, nullable=True) url = Column(String, nullable=True)
url_hash = Column(String, nullable=True) url_hash = Column(String, nullable=True)
cached = Column(Boolean, nullable=False, default=False) cached = Column(Boolean, nullable=False, default=False)
# Run history fields
# status is an open-ended str (not an enum) because task_runs covers multiple
# run types (task, workflow, observer) each with its own status set.
status = Column(String, nullable=True)
started_at = Column(DateTime, nullable=True)
finished_at = Column(DateTime, nullable=True)
workflow_permanent_id = Column(String, nullable=True)
script_run = Column(JSON, nullable=True)
parent_workflow_run_id = Column(String, nullable=True)
debug_session_id = Column(String, nullable=True)
searchable_text = Column(Text, nullable=True)
# Compute cost tracking fields # Compute cost tracking fields
instance_type = Column(String, nullable=True) instance_type = Column(String, nullable=True)
vcpu_millicores = Column(Integer, nullable=True) vcpu_millicores = Column(Integer, nullable=True)

View file

@ -129,9 +129,11 @@ class OrganizationsRepository(BaseRepository):
max_steps_per_run: int | None = None, max_steps_per_run: int | None = None,
max_retries_per_step: int | None = None, max_retries_per_step: int | None = None,
domain: str | None = None, domain: str | None = None,
organization_id: str | None = None,
) -> Organization: ) -> Organization:
async with self.Session() as session: async with self.Session() as session:
org = OrganizationModel( org = OrganizationModel(
organization_id=organization_id,
organization_name=organization_name, organization_name=organization_name,
webhook_callback_url=webhook_callback_url, webhook_callback_url=webhook_callback_url,
max_steps_per_run=max_steps_per_run, max_steps_per_run=max_steps_per_run,

View file

@ -1,5 +1,6 @@
from __future__ import annotations from __future__ import annotations
import asyncio
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from typing import Any, Sequence from typing import Any, Sequence
@ -15,13 +16,15 @@ from skyvern.forge.sdk.db.models import (
ActionModel, ActionModel,
StepModel, StepModel,
TaskModel, TaskModel,
TaskRunModel,
WorkflowRunModel, WorkflowRunModel,
) )
from skyvern.forge.sdk.db.utils import convert_to_step, convert_to_task, hydrate_action, serialize_proxy_location from skyvern.forge.sdk.db.utils import convert_to_step, convert_to_task, hydrate_action, serialize_proxy_location
from skyvern.forge.sdk.models import Step, StepStatus from skyvern.forge.sdk.models import Step, StepStatus
from skyvern.forge.sdk.schemas.runs import Run
from skyvern.forge.sdk.schemas.tasks import OrderBy, SortDirection, Task, TaskStatus from skyvern.forge.sdk.schemas.tasks import OrderBy, SortDirection, Task, TaskStatus
from skyvern.forge.sdk.utils.sanitization import sanitize_postgres_text from skyvern.forge.sdk.utils.sanitization import sanitize_postgres_text
from skyvern.schemas.runs import ProxyLocationInput from skyvern.schemas.runs import ProxyLocationInput, RunStatus, RunType
from skyvern.schemas.steps import AgentStepOutput from skyvern.schemas.steps import AgentStepOutput
from skyvern.webeye.actions.actions import Action from skyvern.webeye.actions.actions import Action
@ -29,6 +32,8 @@ LOG = structlog.get_logger()
class TasksRepository(BaseRepository): class TasksRepository(BaseRepository):
_background_tasks: set[asyncio.Task] = set() # noqa: RUF012
@db_operation("create_task") @db_operation("create_task")
async def create_task( async def create_task(
self, self,
@ -470,6 +475,22 @@ class TasksRepository(BaseRepository):
updated_task = await self.get_task(task_id, organization_id=organization_id) updated_task = await self.get_task(task_id, organization_id=organization_id)
if not updated_task: if not updated_task:
raise NotFoundError("Task not found") raise NotFoundError("Task not found")
# Best-effort fire-and-forget write-through to task_runs.
# Mirrors the WorkflowService pattern — cron catches any missed syncs.
if status is not None:
bg = asyncio.create_task(
self.sync_task_run_status(
organization_id=updated_task.organization_id or "",
run_id=updated_task.task_id,
status=status.value,
started_at=updated_task.started_at,
finished_at=updated_task.finished_at,
),
)
self._background_tasks.add(bg)
bg.add_done_callback(self._background_tasks.discard)
return updated_task return updated_task
else: else:
raise NotFoundError("Task not found") raise NotFoundError("Task not found")
@ -729,3 +750,197 @@ class TasksRepository(BaseRepository):
) )
await session.execute(stmt) await session.execute(stmt)
await session.commit() await session.commit()
async def sync_task_run_status(
self,
organization_id: str,
run_id: str,
status: str,
started_at: datetime | None = None,
finished_at: datetime | None = None,
) -> None:
"""Best-effort write-through: propagate status from source table to task_runs.
Does NOT raise if the task_runs row is missing (race at creation time).
"""
try:
async with self.Session() as session:
vals: dict[str, Any] = {"status": status}
if started_at is not None:
vals["started_at"] = started_at
if finished_at is not None:
vals["finished_at"] = finished_at
stmt = (
update(TaskRunModel)
.where(TaskRunModel.run_id == run_id)
.where(TaskRunModel.organization_id == organization_id)
.values(**vals)
)
await session.execute(stmt)
await session.commit()
except Exception:
LOG.warning(
"Best-effort task_run status sync failed",
run_id=run_id,
organization_id=organization_id,
status=status,
exc_info=True,
)
@db_operation("create_task_run")
async def create_task_run(
self,
task_run_type: RunType,
organization_id: str,
run_id: str,
title: str | None = None,
url: str | None = None,
url_hash: str | None = None,
status: RunStatus | None = None,
workflow_permanent_id: str | None = None,
parent_workflow_run_id: str | None = None,
debug_session_id: str | None = None,
# script_run, started_at, finished_at are intentionally omitted here —
# they are set via update_task_run() after the run starts/finishes (PRs 2-5).
) -> Run:
searchable_text = " ".join(filter(None, [title, url]))
async with self.Session() as session:
task_run = TaskRunModel(
task_run_type=task_run_type,
organization_id=organization_id,
run_id=run_id,
title=title,
url=url,
url_hash=url_hash,
status=status,
workflow_permanent_id=workflow_permanent_id,
parent_workflow_run_id=parent_workflow_run_id,
debug_session_id=debug_session_id,
searchable_text=searchable_text or None,
)
session.add(task_run)
await session.commit()
await session.refresh(task_run)
return Run.model_validate(task_run)
@db_operation("update_task_run")
async def update_task_run(
self,
organization_id: str,
run_id: str,
title: str | None = None,
url: str | None = None,
url_hash: str | None = None,
status: str | None = None,
started_at: datetime | None = None,
finished_at: datetime | None = None,
) -> None:
async with self.Session() as session:
task_run = (
await session.scalars(
select(TaskRunModel).filter_by(run_id=run_id).filter_by(organization_id=organization_id)
)
).first()
if not task_run:
raise NotFoundError(f"TaskRun {run_id} not found")
if title is not None:
task_run.title = title
if url is not None:
task_run.url = url
if url_hash is not None:
task_run.url_hash = url_hash
if status is not None:
task_run.status = status
if started_at is not None:
task_run.started_at = started_at
if finished_at is not None:
task_run.finished_at = finished_at
# Recompute searchable_text when title or url changes
if title is not None or url is not None:
task_run.searchable_text = " ".join(filter(None, [task_run.title, task_run.url])) or None
await session.commit()
@db_operation("update_job_run_compute_cost")
async def update_job_run_compute_cost(
self,
organization_id: str,
run_id: str,
instance_type: str | None = None,
vcpu_millicores: int | None = None,
memory_mb: int | None = None,
duration_ms: int | None = None,
compute_cost: float | None = None,
) -> None:
"""Update compute cost metrics for a job run."""
async with self.Session() as session:
task_run = (
await session.scalars(
select(TaskRunModel).filter_by(run_id=run_id).filter_by(organization_id=organization_id)
)
).first()
if not task_run:
LOG.warning(
"TaskRun not found for compute cost update",
run_id=run_id,
organization_id=organization_id,
)
return
if instance_type is not None:
task_run.instance_type = instance_type
if vcpu_millicores is not None:
task_run.vcpu_millicores = vcpu_millicores
if memory_mb is not None:
task_run.memory_mb = memory_mb
if duration_ms is not None:
task_run.duration_ms = duration_ms
if compute_cost is not None:
task_run.compute_cost = compute_cost
await session.commit()
@db_operation("cache_task_run")
async def cache_task_run(self, run_id: str, organization_id: str | None = None) -> Run:
async with self.Session() as session:
task_run = (
await session.scalars(
select(TaskRunModel).filter_by(organization_id=organization_id).filter_by(run_id=run_id)
)
).first()
if task_run:
task_run.cached = True
await session.commit()
await session.refresh(task_run)
return Run.model_validate(task_run)
raise NotFoundError(f"Run {run_id} not found")
@db_operation("get_cached_task_run")
async def get_cached_task_run(
self, task_run_type: RunType, url_hash: str | None = None, organization_id: str | None = None
) -> Run | None:
async with self.Session() as session:
query = select(TaskRunModel)
if task_run_type:
query = query.filter_by(task_run_type=task_run_type)
if url_hash:
query = query.filter_by(url_hash=url_hash)
if organization_id:
query = query.filter_by(organization_id=organization_id)
query = query.filter_by(cached=True).order_by(TaskRunModel.created_at.desc())
task_run = (await session.scalars(query)).first()
return Run.model_validate(task_run) if task_run else None
@db_operation("get_run")
async def get_run(
self,
run_id: str,
organization_id: str | None = None,
) -> Run | None:
async with self.Session() as session:
query = select(TaskRunModel).filter_by(run_id=run_id)
if organization_id:
query = query.filter_by(organization_id=organization_id)
task_run = (await session.scalars(query)).first()
return Run.model_validate(task_run) if task_run else None

View file

@ -26,7 +26,6 @@ from skyvern.forge.sdk.db.models import (
OutputParameterModel, OutputParameterModel,
TaskGenerationModel, TaskGenerationModel,
TaskModel, TaskModel,
TaskRunModel,
WorkflowCopilotChatMessageModel, WorkflowCopilotChatMessageModel,
WorkflowCopilotChatModel, WorkflowCopilotChatModel,
WorkflowParameterModel, WorkflowParameterModel,
@ -39,7 +38,6 @@ from skyvern.forge.sdk.db.utils import (
hydrate_action, hydrate_action,
) )
from skyvern.forge.sdk.schemas.ai_suggestions import AISuggestion from skyvern.forge.sdk.schemas.ai_suggestions import AISuggestion
from skyvern.forge.sdk.schemas.runs import Run
from skyvern.forge.sdk.schemas.task_generations import TaskGeneration from skyvern.forge.sdk.schemas.task_generations import TaskGeneration
from skyvern.forge.sdk.schemas.tasks import Task, TaskStatus from skyvern.forge.sdk.schemas.tasks import Task, TaskStatus
from skyvern.forge.sdk.schemas.workflow_copilot import ( from skyvern.forge.sdk.schemas.workflow_copilot import (
@ -61,7 +59,6 @@ from skyvern.forge.sdk.workflow.models.parameter import (
WorkflowParameter, WorkflowParameter,
WorkflowParameterType, WorkflowParameterType,
) )
from skyvern.schemas.runs import RunType
from skyvern.webeye.actions.actions import Action from skyvern.webeye.actions.actions import Action
LOG = structlog.get_logger() LOG = structlog.get_logger()
@ -564,135 +561,3 @@ class WorkflowParametersRepository(BaseRepository):
actions = (await session.scalars(query)).all() actions = (await session.scalars(query)).all()
return [Action.model_validate(action) for action in actions] return [Action.model_validate(action) for action in actions]
@db_operation("create_task_run")
async def create_task_run(
self,
task_run_type: RunType,
organization_id: str,
run_id: str,
title: str | None = None,
url: str | None = None,
url_hash: str | None = None,
) -> Run:
async with self.Session() as session:
task_run = TaskRunModel(
task_run_type=task_run_type,
organization_id=organization_id,
run_id=run_id,
title=title,
url=url,
url_hash=url_hash,
)
session.add(task_run)
await session.commit()
await session.refresh(task_run)
return Run.model_validate(task_run)
@db_operation("update_task_run")
async def update_task_run(
self,
organization_id: str,
run_id: str,
title: str | None = None,
url: str | None = None,
url_hash: str | None = None,
) -> None:
async with self.Session() as session:
task_run = (
await session.scalars(
select(TaskRunModel).filter_by(run_id=run_id).filter_by(organization_id=organization_id)
)
).first()
if not task_run:
raise NotFoundError(f"TaskRun {run_id} not found")
if title:
task_run.title = title
if url:
task_run.url = url
if url_hash:
task_run.url_hash = url_hash
await session.commit()
@db_operation("update_job_run_compute_cost")
async def update_job_run_compute_cost(
self,
organization_id: str,
run_id: str,
instance_type: str | None = None,
vcpu_millicores: int | None = None,
memory_mb: int | None = None,
duration_ms: int | None = None,
compute_cost: float | None = None,
) -> None:
"""Update compute cost metrics for a job run."""
async with self.Session() as session:
task_run = (
await session.scalars(
select(TaskRunModel).filter_by(run_id=run_id).filter_by(organization_id=organization_id)
)
).first()
if not task_run:
LOG.warning(
"TaskRun not found for compute cost update",
run_id=run_id,
organization_id=organization_id,
)
return
if instance_type is not None:
task_run.instance_type = instance_type
if vcpu_millicores is not None:
task_run.vcpu_millicores = vcpu_millicores
if memory_mb is not None:
task_run.memory_mb = memory_mb
if duration_ms is not None:
task_run.duration_ms = duration_ms
if compute_cost is not None:
task_run.compute_cost = compute_cost
await session.commit()
@db_operation("cache_task_run")
async def cache_task_run(self, run_id: str, organization_id: str | None = None) -> Run:
async with self.Session() as session:
task_run = (
await session.scalars(
select(TaskRunModel).filter_by(organization_id=organization_id).filter_by(run_id=run_id)
)
).first()
if task_run:
task_run.cached = True
await session.commit()
await session.refresh(task_run)
return Run.model_validate(task_run)
raise NotFoundError(f"Run {run_id} not found")
@db_operation("get_cached_task_run")
async def get_cached_task_run(
self, task_run_type: RunType, url_hash: str | None = None, organization_id: str | None = None
) -> Run | None:
async with self.Session() as session:
query = select(TaskRunModel)
if task_run_type:
query = query.filter_by(task_run_type=task_run_type)
if url_hash:
query = query.filter_by(url_hash=url_hash)
if organization_id:
query = query.filter_by(organization_id=organization_id)
query = query.filter_by(cached=True).order_by(TaskRunModel.created_at.desc())
task_run = (await session.scalars(query)).first()
return Run.model_validate(task_run) if task_run else None
@db_operation("get_run")
async def get_run(
self,
run_id: str,
organization_id: str | None = None,
) -> Run | None:
async with self.Session() as session:
query = select(TaskRunModel).filter_by(run_id=run_id)
if organization_id:
query = query.filter_by(organization_id=organization_id)
task_run = (await session.scalars(query)).first()
return Run.model_validate(task_run) if task_run else None

View file

@ -21,6 +21,7 @@ if TYPE_CHECKING:
from skyvern.forge.sdk.db._sentinels import _UNSET from skyvern.forge.sdk.db._sentinels import _UNSET
from skyvern.forge.sdk.db.models import ( from skyvern.forge.sdk.db.models import (
TaskModel, TaskModel,
TaskRunModel,
WorkflowModel, WorkflowModel,
WorkflowParameterModel, WorkflowParameterModel,
WorkflowRunBlockModel, WorkflowRunBlockModel,
@ -392,6 +393,36 @@ class WorkflowRunsRepository(BaseRepository):
return runs[lower:upper] return runs[lower:upper]
@read_retry()
async def get_all_runs_v2(
self,
organization_id: str,
page: int = 1,
page_size: int = 10,
status: list[str] | None = None,
search_key: str | None = None,
) -> list[TaskRunModel]:
async with self.Session() as session:
query = (
select(TaskRunModel)
.filter(TaskRunModel.organization_id == organization_id)
.filter(TaskRunModel.status.isnot(None))
.filter(TaskRunModel.parent_workflow_run_id.is_(None))
.filter(TaskRunModel.debug_session_id.is_(None))
)
if status:
query = query.filter(TaskRunModel.status.in_(status))
if search_key:
query = query.filter(TaskRunModel.searchable_text.icontains(search_key, autoescape=True))
offset = (page - 1) * page_size
query = query.order_by(TaskRunModel.created_at.desc()).offset(offset).limit(page_size)
result = await session.scalars(query)
return list(result.all())
@read_retry() @read_retry()
@db_operation("get_workflow_run", log_errors=False) @db_operation("get_workflow_run", log_errors=False)
async def get_workflow_run( async def get_workflow_run(

View file

@ -113,7 +113,9 @@ from skyvern.schemas.runs import (
BlockRunResponse, BlockRunResponse,
RunEngine, RunEngine,
RunResponse, RunResponse,
RunStatus,
RunType, RunType,
TaskRunListItem,
TaskRunRequest, TaskRunRequest,
TaskRunResponse, TaskRunResponse,
UploadFileResponse, UploadFileResponse,
@ -1082,7 +1084,6 @@ async def delete_workflow(
"/folders", "/folders",
response_model=Folder, response_model=Folder,
tags=["Workflow Folders"], tags=["Workflow Folders"],
include_in_schema=False,
openapi_extra={ openapi_extra={
"x-fern-sdk-method-name": "create_folder", "x-fern-sdk-method-name": "create_folder",
}, },
@ -1125,7 +1126,6 @@ async def create_folder(
"/folders/{folder_id}", "/folders/{folder_id}",
response_model=Folder, response_model=Folder,
tags=["Workflow Folders"], tags=["Workflow Folders"],
include_in_schema=False,
openapi_extra={ openapi_extra={
"x-fern-sdk-method-name": "get_folder", "x-fern-sdk-method-name": "get_folder",
}, },
@ -1170,7 +1170,6 @@ async def get_folder(
"/folders", "/folders",
response_model=list[Folder], response_model=list[Folder],
tags=["Workflow Folders"], tags=["Workflow Folders"],
include_in_schema=False,
openapi_extra={ openapi_extra={
"x-fern-sdk-method-name": "get_folders", "x-fern-sdk-method-name": "get_folders",
}, },
@ -1228,7 +1227,6 @@ async def get_folders(
"/folders/{folder_id}", "/folders/{folder_id}",
response_model=Folder, response_model=Folder,
tags=["Workflow Folders"], tags=["Workflow Folders"],
include_in_schema=False,
openapi_extra={ openapi_extra={
"x-fern-sdk-method-name": "update_folder", "x-fern-sdk-method-name": "update_folder",
}, },
@ -1275,7 +1273,6 @@ async def update_folder(
@base_router.delete( @base_router.delete(
"/folders/{folder_id}", "/folders/{folder_id}",
tags=["Workflow Folders"], tags=["Workflow Folders"],
include_in_schema=False,
openapi_extra={ openapi_extra={
"x-fern-sdk-method-name": "delete_folder", "x-fern-sdk-method-name": "delete_folder",
}, },
@ -1312,7 +1309,6 @@ async def delete_folder(
"/workflows/{workflow_permanent_id}/folder", "/workflows/{workflow_permanent_id}/folder",
response_model=Workflow, response_model=Workflow,
tags=["Workflow Folders"], tags=["Workflow Folders"],
include_in_schema=False,
openapi_extra={ openapi_extra={
"x-fern-sdk-method-name": "update_workflow_folder", "x-fern-sdk-method-name": "update_workflow_folder",
}, },
@ -2147,6 +2143,48 @@ async def get_runs(
return ORJSONResponse([run.model_dump() for run in runs]) return ORJSONResponse([run.model_dump() for run in runs])
# NOTE: v2 returns TaskRunListItem from the unified task_runs table,
# replacing the v1 response type (list[WorkflowRun | Task]) which
# merged two separate queries. The v1 endpoint is preserved for
# backwards compatibility until clients migrate.
@base_router.get(
"/runs",
tags=["agent"],
response_model=list[TaskRunListItem],
openapi_extra={
"x-fern-sdk-method-name": "get_runs_v2",
},
)
@base_router.get(
"/runs/",
response_model=list[TaskRunListItem],
include_in_schema=False,
)
async def get_runs_v2(
current_org: Organization = Depends(org_auth_service.get_current_org),
page: int = Query(1, ge=1, le=100),
page_size: int = Query(10, ge=1, le=100),
status: Annotated[list[RunStatus] | None, Query()] = None,
search_key: str | None = Query(
None,
min_length=3,
description="Case-insensitive substring search (min 3 chars for trigram index).",
examples=["login_url", "wr_abc123"],
),
) -> Response:
analytics.capture("skyvern-oss-agent-runs-v2-get")
rows = await app.DATABASE.get_all_runs_v2(
current_org.organization_id,
page=page,
page_size=page_size,
status=[s.value for s in status] if status else None,
search_key=search_key,
)
items = [TaskRunListItem.model_validate(row) for row in rows]
return ORJSONResponse([item.model_dump(mode="json") for item in items])
@legacy_base_router.get( @legacy_base_router.get(
"/tasks/{task_id}/steps", "/tasks/{task_id}/steps",
tags=["agent"], tags=["agent"],

View file

@ -1,8 +1,8 @@
from datetime import datetime from datetime import datetime
from pydantic import BaseModel, ConfigDict from pydantic import BaseModel, ConfigDict, Field
from skyvern.schemas.runs import RunType from skyvern.schemas.runs import TERMINAL_STATUSES, RunType # noqa: F401
class Run(BaseModel): class Run(BaseModel):
@ -15,6 +15,18 @@ class Run(BaseModel):
title: str | None = None title: str | None = None
url: str | None = None url: str | None = None
cached: bool = False cached: bool = False
# Run history fields
status: str | None = None
started_at: datetime | None = None
finished_at: datetime | None = None
workflow_permanent_id: str | None = None
# dict when script execution metadata is present (e.g. {"ai_fallback_triggered": True}),
# bool (True/False) when only the ran-as-script flag is needed, None when not a script run.
script_run: dict | bool | None = None
parent_workflow_run_id: str | None = None
debug_session_id: str | None = None
# Internal denormalized column for trigram search — excluded from serialization.
searchable_text: str | None = Field(default=None, exclude=True)
# Compute cost tracking fields # Compute cost tracking fields
instance_type: str | None = None instance_type: str | None = None
vcpu_millicores: int | None = None vcpu_millicores: int | None = None

View file

@ -1,13 +1,19 @@
import os import os
import time
from pathlib import Path from pathlib import Path
import jwt
import structlog import structlog
from dotenv import set_key from dotenv import set_key
from jwt.exceptions import PyJWTError
from pydantic import ValidationError
from skyvern.config import settings from skyvern.config import settings
from skyvern.forge import app from skyvern.forge import app
from skyvern.forge.sdk.core import security from skyvern.forge.sdk.core import security
from skyvern.forge.sdk.schemas.organizations import Organization, OrganizationAuthTokenType from skyvern.forge.sdk.db.enums import OrganizationAuthTokenType
from skyvern.forge.sdk.models import TokenPayload
from skyvern.forge.sdk.schemas.organizations import Organization
from skyvern.forge.sdk.services.org_auth_token_service import API_KEY_LIFETIME from skyvern.forge.sdk.services.org_auth_token_service import API_KEY_LIFETIME
from skyvern.utils.env_paths import resolve_backend_env_path, resolve_frontend_env_path from skyvern.utils.env_paths import resolve_backend_env_path, resolve_frontend_env_path
@ -42,6 +48,89 @@ async def ensure_local_org() -> Organization:
) )
async def ensure_local_org_with_id(organization_id: str) -> Organization:
"""Ensure the local org exists, preferring the caller-provided ID on first creation.
If a local org already exists for the shared local domain, this returns that
org unchanged even when its organization_id differs from the requested one.
"""
organization = await app.DATABASE.get_organization_by_domain(SKYVERN_LOCAL_DOMAIN)
if organization:
return organization
return await app.DATABASE.create_organization(
organization_id=organization_id,
organization_name=SKYVERN_LOCAL_ORG,
domain=SKYVERN_LOCAL_DOMAIN,
max_steps_per_run=10,
max_retries_per_step=3,
)
def _decode_local_api_key_payload(api_key: str) -> TokenPayload | None:
try:
payload = jwt.decode(
api_key,
settings.SECRET_KEY,
algorithms=[settings.SIGNATURE_ALGORITHM],
options={"verify_exp": False},
)
return TokenPayload(**payload)
except (PyJWTError, ValidationError):
return None
async def ensure_local_api_key(api_key: str) -> tuple[str, str] | None:
"""Ensure the provided API key remains usable for the local organization.
Preserves the externally provided key by syncing it into the database
instead of rewriting env files, but only when it is still a valid local JWT.
Returns ``None`` when the key cannot be preserved and the caller should
regenerate a new local key.
"""
payload = _decode_local_api_key_payload(api_key)
if payload is None:
LOG.warning("Existing local API key is not a valid JWT; regenerating", fingerprint=fingerprint_token(api_key))
return None
if payload.exp < time.time():
LOG.warning("Existing local API key is expired; regenerating", fingerprint=fingerprint_token(api_key))
return None
organization = await app.DATABASE.get_organization_by_domain(SKYVERN_LOCAL_DOMAIN)
if organization is None:
organization = await ensure_local_org_with_id(payload.sub)
elif organization.organization_id != payload.sub:
LOG.warning(
"Existing local organization does not match API key subject; regenerating",
existing_organization_id=organization.organization_id,
token_organization_id=payload.sub,
fingerprint=fingerprint_token(api_key),
)
return None
org_id = organization.organization_id
existing_token = await app.DATABASE.validate_org_auth_token(
organization_id=org_id,
token_type=OrganizationAuthTokenType.api,
token=api_key,
)
if existing_token is None:
await app.DATABASE.replace_org_auth_token(
organization_id=org_id,
token_type=OrganizationAuthTokenType.api,
token=api_key,
)
LOG.info(
"Local API key synced",
organization_id=org_id,
fingerprint=fingerprint_token(api_key),
)
settings.SKYVERN_API_KEY = api_key
os.environ["SKYVERN_API_KEY"] = api_key
return api_key, org_id
async def regenerate_local_api_key() -> tuple[str, str, str, str | None]: async def regenerate_local_api_key() -> tuple[str, str, str, str | None]:
"""Create a fresh API key for the local organization and persist it to env files. """Create a fresh API key for the local organization and persist it to env files.

View file

@ -299,6 +299,9 @@ def _get_workflow_definition_core_data(workflow_definition: WorkflowDefinition)
class WorkflowService: class WorkflowService:
# Prevent GC of fire-and-forget asyncio tasks (e.g. task_run sync).
_background_tasks: set[asyncio.Task] = set() # noqa: RUF012
@staticmethod @staticmethod
def _determine_cache_invalidation( def _determine_cache_invalidation(
previous_blocks: list[dict[str, Any]], previous_blocks: list[dict[str, Any]],
@ -1011,7 +1014,9 @@ class WorkflowService:
browser_session_id=browser_session_id, browser_session_id=browser_session_id,
workflow_run_id=workflow_run_id, workflow_run_id=workflow_run_id,
) )
failure_reason = f"Failed to begin browser session for workflow run: {str(e)}" failure_reason = (
f"Failed to begin browser session for workflow run: {get_user_facing_exception_message(e)}"
)
workflow_run = await self.mark_workflow_run_as_failed( workflow_run = await self.mark_workflow_run_as_failed(
workflow_run_id=workflow_run_id, workflow_run_id=workflow_run_id,
failure_reason=failure_reason, failure_reason=failure_reason,
@ -3623,8 +3628,51 @@ class WorkflowService:
trigger_type=workflow_run.trigger_type, trigger_type=workflow_run.trigger_type,
workflow_schedule_id=workflow_run.workflow_schedule_id, workflow_schedule_id=workflow_run.workflow_schedule_id,
) )
# Best-effort fire-and-forget write-through to task_runs table.
# Runs off the hot path so workflow status transitions stay fast.
bg = asyncio.create_task(
self._sync_task_run_from_workflow_run(workflow_run, workflow_run_id, status),
)
self._background_tasks.add(bg)
bg.add_done_callback(self._background_tasks.discard)
return workflow_run return workflow_run
async def _sync_task_run_from_workflow_run(
self,
workflow_run: WorkflowRun,
workflow_run_id: str,
status: WorkflowRunStatus,
) -> None:
"""Fire-and-forget: propagate workflow_run status to task_runs."""
try:
await app.DATABASE.sync_task_run_status(
organization_id=workflow_run.organization_id,
run_id=workflow_run_id,
status=status.value,
started_at=workflow_run.started_at,
finished_at=workflow_run.finished_at,
)
# Also sync task_v2 if this workflow_run backs an observer_cruise
task_v2 = await app.DATABASE.get_task_v2_by_workflow_run_id(
workflow_run_id=workflow_run_id,
organization_id=workflow_run.organization_id,
)
if task_v2:
await app.DATABASE.sync_task_run_status(
organization_id=workflow_run.organization_id,
run_id=task_v2.observer_cruise_id,
status=status.value,
started_at=workflow_run.started_at,
finished_at=workflow_run.finished_at,
)
except Exception:
LOG.warning(
"Failed to sync task_run status from workflow_run",
workflow_run_id=workflow_run_id,
exc_info=True,
)
async def mark_workflow_run_as_completed(self, workflow_run_id: str, run_with: str | None = None) -> WorkflowRun: async def mark_workflow_run_as_completed(self, workflow_run_id: str, run_with: str | None = None) -> WorkflowRun:
LOG.info( LOG.info(
f"Marking workflow run {workflow_run_id} as completed", f"Marking workflow run {workflow_run_id} as completed",

View file

@ -1,3 +1,4 @@
import copy
import os import os
import tempfile import tempfile
from datetime import timedelta from datetime import timedelta
@ -15,6 +16,57 @@ from skyvern.forge.sdk.api.llm.models import LLMConfig, LLMRouterConfig
_EMBEDDED_ACTIVE: bool = False _EMBEDDED_ACTIVE: bool = False
_BLOCKED_EMBEDDED_SETTINGS = frozenset({"OTEL_ENABLED", "ENABLE_CLEANUP_CRON"}) _BLOCKED_EMBEDDED_SETTINGS = frozenset({"OTEL_ENABLED", "ENABLE_CLEANUP_CRON"})
_SQLITE_OVERRIDE_VALUES: dict[str, Any] = {
"DATABASE_STRING": "sqlite+aiosqlite:///:memory:",
"DATABASE_REPLICA_STRING": None,
"ADDITIONAL_MODULES": [],
"OTEL_ENABLED": False,
"ENABLE_CLEANUP_CRON": False,
}
_BOOTSTRAP_RUNTIME_SETTINGS = frozenset({"LLM_KEY", "BROWSER_LOGS_ENABLED", "SKYVERN_API_KEY"})
# Keys mutated by _apply_sqlite_overrides and bootstrap-time request setup.
# SQLite override keys are derived from the actual override mapping so that
# snapshot coverage can't silently drift when new overrides are added.
# OTEL_ENABLED and ENABLE_CLEANUP_CRON overlap with _BLOCKED_EMBEDDED_SETTINGS —
# snapshotting them is a defensive no-op (blocked keys can't be mutated via
# settings_overrides, but _apply_sqlite_overrides sets them directly).
_SETTINGS_SNAPSHOT_KEYS = frozenset(_SQLITE_OVERRIDE_VALUES) | _BLOCKED_EMBEDDED_SETTINGS | _BOOTSTRAP_RUNTIME_SETTINGS
def _snapshot_settings() -> dict[str, dict[str, Any]]:
"""Capture current values of mutable settings keys across all targets."""
from skyvern.forge.sdk.settings_manager import SettingsManager # noqa: PLC0415
snapshots: dict[str, dict[str, Any]] = {}
targets = {"settings": settings}
mgr = SettingsManager.get_settings()
if mgr is not settings:
targets["mgr"] = mgr
for label, target in targets.items():
snap: dict[str, Any] = {}
for key in _SETTINGS_SNAPSHOT_KEYS:
if hasattr(target, key):
val = getattr(target, key)
snap[key] = copy.deepcopy(val) if isinstance(val, (list, dict)) else val
snapshots[label] = snap
return snapshots
def _restore_settings(snapshots: dict[str, dict[str, Any]]) -> None:
"""Restore settings from a snapshot taken by _snapshot_settings."""
from skyvern.forge.sdk.settings_manager import SettingsManager # noqa: PLC0415
targets = {"settings": settings}
mgr = SettingsManager.get_settings()
if mgr is not settings:
targets["mgr"] = mgr
for label, target in targets.items():
if label in snapshots:
for key, val in snapshots[label].items():
setattr(target, key, val)
def create_embedded_server( def create_embedded_server(
@ -39,9 +91,28 @@ def create_embedded_server(
async def handle_async_request(self, request: httpx.Request) -> httpx.Response: async def handle_async_request(self, request: httpx.Request) -> httpx.Response:
if self._transport is None: if self._transport is None:
snapshots = _snapshot_settings()
# Capture the current forge app instance so we can restore it
# if bootstrap fails partway through (after create_api_app sets
# a new instance but before bootstrap completes).
from skyvern.forge import app as forge_app_holder # noqa: PLC0415
prev_app_inst = object.__getattribute__(forge_app_holder, "_inst") # type: ignore[arg-type]
try: try:
await self._bootstrap(request) await self._bootstrap(request)
except Exception: except Exception as exc:
import structlog # noqa: PLC0415
structlog.get_logger().exception(
"Embedded bootstrap failed; restoring previous process state",
error_type=type(exc).__name__,
)
# Restore settings so a retry (or subsequent tests in the
# same process) sees the original values, not half-applied
# SQLite overrides.
_restore_settings(snapshots)
# Restore the previous forge app instance
forge_app_holder.set_app(prev_app_inst) # type: ignore[attr-defined]
# Reset the single-client guard so the user can retry # Reset the single-client guard so the user can retry
# after fixing the issue (e.g., missing API key). # after fixing the issue (e.g., missing API key).
global _EMBEDDED_ACTIVE global _EMBEDDED_ACTIVE
@ -82,7 +153,9 @@ def create_embedded_server(
raise ValueError(f"Invalid setting: {key}") raise ValueError(f"Invalid setting: {key}")
if not use_in_memory_db: if not use_in_memory_db:
self._api_key = os.getenv("SKYVERN_API_KEY") self._api_key = (
settings_overrides.get("SKYVERN_API_KEY") if settings_overrides is not None else None
) or os.getenv("SKYVERN_API_KEY")
if not self._api_key: if not self._api_key:
raise ValueError("SKYVERN_API_KEY is not set. Provide api_key or set SKYVERN_API_KEY in .env file.") raise ValueError("SKYVERN_API_KEY is not set. Provide api_key or set SKYVERN_API_KEY in .env file.")
@ -199,9 +272,7 @@ def _apply_sqlite_overrides() -> None:
if mgr_settings is not settings: if mgr_settings is not settings:
targets.append(mgr_settings) targets.append(mgr_settings)
for target in targets: for target in targets:
target.DATABASE_STRING = "sqlite+aiosqlite:///:memory:" for key, value in _SQLITE_OVERRIDE_VALUES.items():
target.DATABASE_REPLICA_STRING = None if hasattr(target, key):
target.ADDITIONAL_MODULES = [] copied = copy.deepcopy(value) if isinstance(value, (list, dict)) else value
target.OTEL_ENABLED = False setattr(target, key, copied)
if hasattr(target, "ENABLE_CLEANUP_CRON"):
target.ENABLE_CLEANUP_CRON = False

View file

@ -129,7 +129,7 @@ class Skyvern(AsyncSkyvern):
*, *,
llm_config: LLMRouterConfig | LLMConfig | None = None, llm_config: LLMRouterConfig | LLMConfig | None = None,
settings: dict[str, Any] | None = None, settings: dict[str, Any] | None = None,
use_in_memory_db: bool = False, use_in_memory_db: bool | None = None,
) -> "Skyvern": ) -> "Skyvern":
"""Local/embedded mode: Run Skyvern locally in-process. """Local/embedded mode: Run Skyvern locally in-process.
@ -139,23 +139,36 @@ class Skyvern(AsyncSkyvern):
overriding the LLM_KEY setting from your .env file. overriding the LLM_KEY setting from your .env file.
If not provided, uses the LLM configured via LLM_KEY in your .env file. If not provided, uses the LLM configured via LLM_KEY in your .env file.
Example 1 - Using .env configuration (simplest, recommended): Example 1 - Zero-config (simplest, recommended):
```python ```python
from skyvern import Skyvern from skyvern import Skyvern
# Uses LLM_KEY and other settings from your .env file # Auto-detects: uses .env/Postgres if configured, else in-memory SQLite.
# Created by running `skyvern quickstart`
skyvern = Skyvern.local() skyvern = Skyvern.local()
``` ```
Example 2 - Zero-config with in-memory database (no Postgres needed): Example 2 - Force in-memory SQLite (no .env needed):
```python ```python
from skyvern import Skyvern from skyvern import Skyvern
skyvern = Skyvern.local(use_in_memory_db=True) skyvern = Skyvern.local(use_in_memory_db=True)
``` ```
Example 3 - Custom LLM with environment variables: Example 3 - Force persistent mode:
```python
from skyvern import Skyvern
# Works with env vars, .env, or explicit settings overrides.
skyvern = Skyvern.local(
use_in_memory_db=False,
settings={
"DATABASE_STRING": "postgresql+psycopg://skyvern@localhost/skyvern",
"SKYVERN_API_KEY": "sk-...",
},
)
```
Example 4 - Custom LLM with environment variables:
```python ```python
from skyvern import Skyvern from skyvern import Skyvern
from skyvern.forge.sdk.api.llm.models import LLMConfig from skyvern.forge.sdk.api.llm.models import LLMConfig
@ -173,16 +186,22 @@ class Skyvern(AsyncSkyvern):
settings: Optional dictionary of Skyvern settings to override. settings: Optional dictionary of Skyvern settings to override.
These override the corresponding settings from your .env file. These override the corresponding settings from your .env file.
Example: {"MAX_STEPS_PER_RUN": 100, "BROWSER_TYPE": "chromium-headful"} Example: {"MAX_STEPS_PER_RUN": 100, "BROWSER_TYPE": "chromium-headful"}
use_in_memory_db: If True, use SQLite in-memory instead of PostgreSQL. use_in_memory_db: Controls the database backend for embedded mode.
No .env file or running Postgres instance required. Defaults to False.
Zero-config mode supports: - None (default): Auto-detect. If DATABASE_STRING is set in env, .env,
or settings overrides, use persistent mode (Postgres). Otherwise use
in-memory SQLite for zero-config operation.
- True: Force in-memory SQLite. No .env or Postgres required.
- False: Force persistent mode. Requires DATABASE_STRING and
SKYVERN_API_KEY from env, .env, or settings overrides.
In-memory mode supports:
- run_task(), extract(), click(), navigate() full browser automation - run_task(), extract(), click(), navigate() full browser automation
- Workflow CRUD (create, list, search, get, run) - Workflow CRUD (create, list, search, get, run)
- Artifacts saved to local temp directory (file:// URIs) - Artifacts saved to local temp directory (file:// URIs)
- Any LLM provider supported by litellm - Any LLM provider supported by litellm
Not supported in zero-config mode (requires Skyvern Cloud or Postgres): Not supported in in-memory mode (requires Skyvern Cloud or Postgres):
- Workflow scheduling (requires persistent database) - Workflow scheduling (requires persistent database)
- Cloud browser sessions (requires S3/Azure storage) - Cloud browser sessions (requires S3/Azure storage)
- Rate limiting (cloud-only) - Rate limiting (cloud-only)
@ -194,16 +213,38 @@ class Skyvern(AsyncSkyvern):
Returns: Returns:
Skyvern: A Skyvern instance running in local/embedded mode. Skyvern: A Skyvern instance running in local/embedded mode.
""" """
from dotenv import dotenv_values # noqa: PLC0415
from skyvern.library.embedded_server_factory import create_embedded_server # noqa: PLC0415 from skyvern.library.embedded_server_factory import create_embedded_server # noqa: PLC0415
from skyvern.utils.env_paths import resolve_backend_env_path # noqa: PLC0415
# Auto-detect mode when use_in_memory_db is not explicitly set.
# If DATABASE_STRING is configured anywhere, honor it (persistent mode).
# Otherwise fall back to zero-config in-memory SQLite.
if use_in_memory_db is None:
env_path = resolve_backend_env_path()
dotenv_config = dotenv_values(env_path) if env_path.exists() else {}
settings_overrides = settings or {}
explicit_db = (
settings_overrides.get("DATABASE_STRING")
or os.environ.get("DATABASE_STRING")
or dotenv_config.get("DATABASE_STRING")
)
use_in_memory_db = not bool(explicit_db)
if not use_in_memory_db: if not use_in_memory_db:
if not os.path.exists(".env"): env_path = resolve_backend_env_path()
raise ValueError("Please run `skyvern quickstart` to set up your local Skyvern environment") if env_path.exists():
load_dotenv(env_path)
load_dotenv(".env") settings_overrides = settings or {}
api_key = os.getenv("SKYVERN_API_KEY") api_key = settings_overrides.get("SKYVERN_API_KEY") or os.getenv("SKYVERN_API_KEY")
if not api_key: if not api_key:
raise ValueError("SKYVERN_API_KEY is not set. Provide api_key or set SKYVERN_API_KEY in .env file.") raise ValueError(
"Persistent local mode requires SKYVERN_API_KEY. "
"Set it in env/.env, pass settings={'SKYVERN_API_KEY': ...}, "
"or use use_in_memory_db=True for ephemeral SQLite."
)
obj = cls.__new__(cls) obj = cls.__new__(cls)

View file

@ -5,7 +5,7 @@ from enum import StrEnum
from typing import Annotated, Any, Literal, Union from typing import Annotated, Any, Literal, Union
from zoneinfo import ZoneInfo from zoneinfo import ZoneInfo
from pydantic import BaseModel, Field, field_validator from pydantic import BaseModel, ConfigDict, Field, field_validator
from skyvern.forge.sdk.schemas.files import FileInfo from skyvern.forge.sdk.schemas.files import FileInfo
from skyvern.forge.sdk.workflow.models.validators import normalize_run_with from skyvern.forge.sdk.workflow.models.validators import normalize_run_with
@ -362,7 +362,13 @@ class RunStatus(StrEnum):
canceled = "canceled" canceled = "canceled"
def is_final(self) -> bool: def is_final(self) -> bool:
return self in [self.failed, self.terminated, self.canceled, self.timed_out, self.completed] return self.value in TERMINAL_STATUSES
# Statuses that are final — once a row reaches one of these, it never changes.
# Single source of truth: used by the sync cron, the partial index, and any
# code that needs to know whether a run is "done".
TERMINAL_STATUSES = ("completed", "failed", "terminated", "canceled", "timed_out")
class TaskRunRequest(BaseModel): class TaskRunRequest(BaseModel):
@ -672,3 +678,31 @@ RunResponse = Annotated[Union[TaskRunResponse, WorkflowRunResponse], Field(discr
class BlockRunResponse(WorkflowRunResponse): class BlockRunResponse(WorkflowRunResponse):
block_labels: list[str] = Field(description="A whitelist of block labels; only these blocks will execute") block_labels: list[str] = Field(description="A whitelist of block labels; only these blocks will execute")
class TaskRunListItem(BaseModel):
"""Lightweight run-history item backed by the task_runs table."""
model_config = ConfigDict(from_attributes=True)
task_run_id: str
run_id: str
task_run_type: str
status: str
title: str | None = None
started_at: datetime | None = None
finished_at: datetime | None = None
created_at: datetime
workflow_permanent_id: str | None = None
script_run: bool = False
searchable_text: str | None = Field(default=None, exclude=True)
@field_validator("script_run", mode="before")
@classmethod
def coerce_script_run(cls, v: Any) -> bool:
"""Intentionally lossy: collapse dict metadata / bool / None → bool for the list view.
The full script execution metadata (dict) is available via the detail
endpoint's Run.script_run field. Do not rely on dict contents here.
"""
return bool(v)

View file

@ -14,7 +14,7 @@ from skyvern.forge.sdk.executor.factory import AsyncExecutorFactory
from skyvern.forge.sdk.schemas.organizations import Organization from skyvern.forge.sdk.schemas.organizations import Organization
from skyvern.forge.sdk.schemas.task_generations import TaskGeneration, TaskGenerationBase from skyvern.forge.sdk.schemas.task_generations import TaskGeneration, TaskGenerationBase
from skyvern.forge.sdk.schemas.tasks import Task, TaskRequest, TaskResponse, TaskStatus from skyvern.forge.sdk.schemas.tasks import Task, TaskRequest, TaskResponse, TaskStatus
from skyvern.schemas.runs import RunEngine, RunType from skyvern.schemas.runs import RunEngine, RunStatus, RunType
LOG = structlog.get_logger() LOG = structlog.get_logger()
@ -101,6 +101,7 @@ async def run_task(
title=task.title, title=task.title,
url=task.url, url=task.url,
url_hash=url_hash, url_hash=url_hash,
status=RunStatus.queued,
) )
if x_max_steps_override: if x_max_steps_override:
LOG.info( LOG.info(

View file

@ -40,7 +40,15 @@ from skyvern.forge.sdk.workflow.models.block import (
) )
from skyvern.forge.sdk.workflow.models.parameter import PARAMETER_TYPE, ContextParameter from skyvern.forge.sdk.workflow.models.parameter import PARAMETER_TYPE, ContextParameter
from skyvern.forge.sdk.workflow.models.workflow import Workflow, WorkflowRequestBody, WorkflowRun, WorkflowRunStatus from skyvern.forge.sdk.workflow.models.workflow import Workflow, WorkflowRequestBody, WorkflowRun, WorkflowRunStatus
from skyvern.schemas.runs import ProxyLocation, ProxyLocationInput, RunEngine, RunType, TaskRunRequest, TaskRunResponse from skyvern.schemas.runs import (
ProxyLocation,
ProxyLocationInput,
RunEngine,
RunStatus,
RunType,
TaskRunRequest,
TaskRunResponse,
)
from skyvern.schemas.workflows import ( from skyvern.schemas.workflows import (
BLOCK_YAML_TYPES, BLOCK_YAML_TYPES,
PARAMETER_YAML_TYPES, PARAMETER_YAML_TYPES,
@ -334,6 +342,9 @@ async def initialize_task_v2(
organization_id=organization.organization_id, organization_id=organization.organization_id,
run_id=task_v2.observer_cruise_id, run_id=task_v2.observer_cruise_id,
title=new_workflow.title, title=new_workflow.title,
url=task_v2.url,
url_hash=generate_url_hash(task_v2.url) if task_v2.url else None,
status=RunStatus.queued,
) )
except Exception: except Exception:
LOG.warning("Failed to update task 2.0", exc_info=True) LOG.warning("Failed to update task 2.0", exc_info=True)

View file

@ -60,6 +60,10 @@ async def prepare_workflow(
organization_id=organization.organization_id, organization_id=organization.organization_id,
run_id=workflow_run.workflow_run_id, run_id=workflow_run.workflow_run_id,
title=workflow.title, title=workflow.title,
status=RunStatus.queued,
workflow_permanent_id=workflow_id,
parent_workflow_run_id=parent_workflow_run_id,
debug_session_id=debug_session_id,
) )
if max_steps: if max_steps:

View file

@ -9,50 +9,31 @@ DATABASE_STRING to SQLite, and subsequent scenario tests (which expect
Postgres) fail with "no such table: organizations". Postgres) fail with "no such table: organizations".
""" """
import copy import os
import pytest import pytest
from skyvern.config import settings from skyvern.library.embedded_server_factory import _restore_settings, _snapshot_settings
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
def _restore_settings(): def _restore_settings_fixture(): # type: ignore[no-untyped-def] # pytest fixture
"""Save and restore global settings around each embedded test. """Save and restore global settings + forge app around each embedded test.
Embedded mode mutates the global settings singleton (DATABASE_STRING, Uses the shared snapshot/restore helpers from embedded_server_factory so
ADDITIONAL_MODULES, OTEL_ENABLED, etc.). Without restoration, subsequent that the key list stays in sync with what _apply_sqlite_overrides mutates.
Postgres-based scenario tests in the same pytest process see SQLite
settings and fail.
""" """
from skyvern.forge.sdk.settings_manager import SettingsManager from skyvern.forge import app as forge_app_holder
keys_to_save = [ snapshots = _snapshot_settings()
"DATABASE_STRING", prev_app_inst = object.__getattribute__(forge_app_holder, "_inst") # type: ignore[arg-type]
"DATABASE_REPLICA_STRING", prev_api_key = os.environ.get("SKYVERN_API_KEY")
"ADDITIONAL_MODULES",
"OTEL_ENABLED",
"LLM_KEY",
"BROWSER_LOGS_ENABLED",
]
original_values = {}
for key in keys_to_save:
val = getattr(settings, key, None)
original_values[key] = copy.deepcopy(val) if isinstance(val, (list, dict)) else val
mgr_originals = {}
mgr_settings = SettingsManager.get_settings()
if mgr_settings is not settings:
for key in keys_to_save:
if hasattr(mgr_settings, key):
val = getattr(mgr_settings, key)
mgr_originals[key] = copy.deepcopy(val) if isinstance(val, (list, dict)) else val
yield yield
for key, val in original_values.items(): _restore_settings(snapshots)
setattr(settings, key, val) forge_app_holder.set_app(prev_app_inst) # type: ignore[attr-defined]
if mgr_settings is not settings: if prev_api_key is None:
for key, val in mgr_originals.items(): os.environ.pop("SKYVERN_API_KEY", None)
setattr(mgr_settings, key, val) else:
os.environ["SKYVERN_API_KEY"] = prev_api_key

View file

@ -0,0 +1,430 @@
"""Tests for SQLite-first server mode.
Covers:
- _default_database_string() returns SQLite path under ~/.skyvern/
- Settings.is_sqlite() detection
- SQLite bootstrap in api_app lifespan (tables, org, token, idempotency)
- Settings snapshot/restore on bootstrap failure
- ForgeApp restoration on bootstrap failure
"""
from pathlib import Path
from unittest.mock import patch
import httpx
import pytest
import pytest_asyncio
from skyvern.config import _default_database_string
def test_default_database_string_is_sqlite() -> None:
"""_default_database_string returns a SQLite URL pointing at ~/.skyvern/data.db."""
result = _default_database_string()
assert result.startswith("sqlite+aiosqlite:///")
assert ".skyvern/data.db" in result
def test_default_database_string_is_pure(tmp_path: Path) -> None:
"""_default_database_string is a pure string computation — no side effects."""
fake_home = tmp_path / "fakehome"
with patch("skyvern.config.Path.home", return_value=fake_home):
result = _default_database_string()
assert not (fake_home / ".skyvern").exists(), "factory must not create directories"
assert ".skyvern/data.db" in result
def test_ensure_sqlite_dir_creates_directory(tmp_path: Path) -> None:
"""_ensure_sqlite_dir creates the parent directory for file-backed SQLite."""
from skyvern.config import _ensure_sqlite_dir
db_path = tmp_path / "subdir" / "data.db"
_ensure_sqlite_dir(f"sqlite+aiosqlite:///{db_path}")
assert (tmp_path / "subdir").is_dir()
def test_ensure_sqlite_dir_noop_for_memory() -> None:
"""_ensure_sqlite_dir is a no-op for in-memory SQLite."""
from skyvern.config import _ensure_sqlite_dir
_ensure_sqlite_dir("sqlite+aiosqlite:///:memory:") # should not raise
def test_ensure_sqlite_dir_noop_for_postgres() -> None:
"""_ensure_sqlite_dir is a no-op for Postgres URLs."""
from skyvern.config import _ensure_sqlite_dir
_ensure_sqlite_dir("postgresql+psycopg://localhost/test") # should not raise
def test_is_sqlite_true_for_sqlite_string() -> None:
"""is_sqlite() returns True when DATABASE_STRING starts with 'sqlite'."""
from skyvern.config import Settings
s = Settings(DATABASE_STRING="sqlite+aiosqlite:///test.db")
assert s.is_sqlite() is True
def test_is_sqlite_false_for_postgres_string() -> None:
"""is_sqlite() returns False for PostgreSQL strings."""
from skyvern.config import Settings
s = Settings(DATABASE_STRING="postgresql+psycopg://skyvern@localhost/skyvern")
assert s.is_sqlite() is False
@pytest_asyncio.fixture
async def sqlite_bootstrap_db(): # type: ignore[no-untyped-def] # pytest fixture
"""Swap in a disposable SQLite AgentDB for bootstrap tests."""
from skyvern.forge import app as forge_app
from skyvern.forge.sdk.db.agent_db import AgentDB
db = AgentDB("sqlite+aiosqlite:///:memory:")
original_db = forge_app.DATABASE
forge_app.DATABASE = db # type: ignore[assignment]
try:
yield db
finally:
forge_app.DATABASE = original_db # type: ignore[assignment]
await db.engine.dispose()
@pytest.fixture
def patched_env_writes(): # type: ignore[no-untyped-def] # pytest fixture
"""Mock env-file writes so bootstrap tests do not touch the repo .env."""
with patch("skyvern.forge.sdk.services.local_org_auth_token_service._write_env") as write_env:
yield write_env
@pytest.mark.asyncio
async def test_sqlite_bootstrap_creates_tables_and_org(sqlite_bootstrap_db, patched_env_writes) -> None:
"""_bootstrap_sqlite creates tables, org, and API key in a SQLite DB."""
from skyvern.forge.api_app import _bootstrap_sqlite
from skyvern.forge.sdk.db.enums import OrganizationAuthTokenType
from skyvern.forge.sdk.db.models import Base
from skyvern.forge.sdk.services.local_org_auth_token_service import SKYVERN_LOCAL_DOMAIN
async with sqlite_bootstrap_db.engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
await _bootstrap_sqlite()
org = await sqlite_bootstrap_db.get_organization_by_domain(SKYVERN_LOCAL_DOMAIN)
assert org is not None
assert org.organization_name == "Skyvern-local"
token = await sqlite_bootstrap_db.get_valid_org_auth_token(org.organization_id, OrganizationAuthTokenType.api)
assert token is not None
@pytest.mark.asyncio
async def test_sqlite_bootstrap_is_idempotent(sqlite_bootstrap_db, patched_env_writes) -> None:
"""Calling _bootstrap_sqlite twice does not create duplicate orgs."""
from skyvern.forge.api_app import _bootstrap_sqlite
from skyvern.forge.sdk.db.enums import OrganizationAuthTokenType
from skyvern.forge.sdk.db.models import Base
from skyvern.forge.sdk.services.local_org_auth_token_service import SKYVERN_LOCAL_DOMAIN
async with sqlite_bootstrap_db.engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
await _bootstrap_sqlite()
org1 = await sqlite_bootstrap_db.get_organization_by_domain(SKYVERN_LOCAL_DOMAIN)
# Second call should detect existing org and skip
await _bootstrap_sqlite()
org2 = await sqlite_bootstrap_db.get_organization_by_domain(SKYVERN_LOCAL_DOMAIN)
assert org1 is not None
assert org2 is not None
assert org1.organization_id == org2.organization_id
token = await sqlite_bootstrap_db.get_valid_org_auth_token(org1.organization_id, OrganizationAuthTokenType.api)
assert token is not None
@pytest.mark.asyncio
async def test_sqlite_bootstrap_from_empty_db(sqlite_bootstrap_db, patched_env_writes) -> None:
"""_bootstrap_sqlite creates tables AND org from a completely empty DB.
Unlike test_sqlite_bootstrap_creates_tables_and_org which pre-creates
tables, this test starts from scratch to cover the full first-start path.
"""
from skyvern.forge.api_app import _bootstrap_sqlite
from skyvern.forge.sdk.services.local_org_auth_token_service import SKYVERN_LOCAL_DOMAIN
# NO create_all — bootstrap must handle it
await _bootstrap_sqlite()
org = await sqlite_bootstrap_db.get_organization_by_domain(SKYVERN_LOCAL_DOMAIN)
assert org is not None
assert org.organization_name == "Skyvern-local"
@pytest.mark.asyncio
async def test_sqlite_bootstrap_syncs_existing_env_api_key(
monkeypatch: pytest.MonkeyPatch,
tmp_path: Path,
sqlite_bootstrap_db,
patched_env_writes,
) -> None:
"""An existing SKYVERN_API_KEY must become a valid token in a fresh SQLite DB."""
from skyvern.forge.api_app import _bootstrap_sqlite
from skyvern.forge.sdk.core.security import create_access_token
from skyvern.forge.sdk.db.enums import OrganizationAuthTokenType
from skyvern.forge.sdk.services.local_org_auth_token_service import SKYVERN_LOCAL_DOMAIN
from skyvern.forge.sdk.services.org_auth_service import resolve_org_from_api_key
monkeypatch.chdir(tmp_path)
expected_org_id = "o_existing_local"
existing_api_key = create_access_token(expected_org_id)
monkeypatch.setenv("SKYVERN_API_KEY", existing_api_key)
# Bootstrap reads settings.SKYVERN_API_KEY (the pydantic singleton), not os.environ directly
monkeypatch.setattr("skyvern.config.settings.SKYVERN_API_KEY", existing_api_key)
await _bootstrap_sqlite()
org = await sqlite_bootstrap_db.get_organization_by_domain(SKYVERN_LOCAL_DOMAIN)
assert org is not None
assert org.organization_id == expected_org_id
token = await sqlite_bootstrap_db.get_valid_org_auth_token(org.organization_id, OrganizationAuthTokenType.api)
assert token is not None
assert token.token == existing_api_key
validation = await resolve_org_from_api_key(existing_api_key, sqlite_bootstrap_db)
assert validation.organization.organization_id == expected_org_id
patched_env_writes.assert_not_called()
@pytest.mark.asyncio
async def test_sqlite_bootstrap_repairs_existing_org_without_token(
monkeypatch: pytest.MonkeyPatch,
tmp_path: Path,
sqlite_bootstrap_db,
patched_env_writes,
) -> None:
"""Bootstrap should self-heal an existing local org that has no API token."""
from skyvern.forge.api_app import _bootstrap_sqlite
from skyvern.forge.sdk.core.security import create_access_token
from skyvern.forge.sdk.db.enums import OrganizationAuthTokenType
from skyvern.forge.sdk.db.models import Base
from skyvern.forge.sdk.services.local_org_auth_token_service import (
SKYVERN_LOCAL_DOMAIN,
ensure_local_org_with_id,
)
from skyvern.forge.sdk.services.org_auth_service import resolve_org_from_api_key
monkeypatch.chdir(tmp_path)
expected_org_id = "o_existing_local"
existing_api_key = create_access_token(expected_org_id)
monkeypatch.setenv("SKYVERN_API_KEY", existing_api_key)
monkeypatch.setattr("skyvern.config.settings.SKYVERN_API_KEY", existing_api_key)
async with sqlite_bootstrap_db.engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
org = await ensure_local_org_with_id(expected_org_id)
assert org.organization_id
assert (
await sqlite_bootstrap_db.get_valid_org_auth_token(org.organization_id, OrganizationAuthTokenType.api) is None
)
await _bootstrap_sqlite()
repaired_org = await sqlite_bootstrap_db.get_organization_by_domain(SKYVERN_LOCAL_DOMAIN)
assert repaired_org is not None
token = await sqlite_bootstrap_db.get_valid_org_auth_token(
repaired_org.organization_id, OrganizationAuthTokenType.api
)
assert token is not None
assert token.token == existing_api_key
validation = await resolve_org_from_api_key(existing_api_key, sqlite_bootstrap_db)
assert validation.organization.organization_id == expected_org_id
patched_env_writes.assert_not_called()
@pytest.mark.asyncio
async def test_sqlite_bootstrap_regenerates_invalid_existing_env_api_key(
monkeypatch: pytest.MonkeyPatch,
tmp_path: Path,
sqlite_bootstrap_db,
patched_env_writes,
) -> None:
"""Bootstrap must replace an unusable env key with a valid local JWT."""
from skyvern.forge.api_app import _bootstrap_sqlite
from skyvern.forge.sdk.db.enums import OrganizationAuthTokenType
from skyvern.forge.sdk.services.local_org_auth_token_service import SKYVERN_LOCAL_DOMAIN
from skyvern.forge.sdk.services.org_auth_service import resolve_org_from_api_key
monkeypatch.chdir(tmp_path)
monkeypatch.setenv("SKYVERN_API_KEY", "existing-test-key")
await _bootstrap_sqlite()
org = await sqlite_bootstrap_db.get_organization_by_domain(SKYVERN_LOCAL_DOMAIN)
assert org is not None
token = await sqlite_bootstrap_db.get_valid_org_auth_token(org.organization_id, OrganizationAuthTokenType.api)
assert token is not None
assert token.token != "existing-test-key"
validation = await resolve_org_from_api_key(token.token, sqlite_bootstrap_db)
assert validation.organization.organization_id == org.organization_id
@pytest.mark.asyncio
async def test_local_allows_env_only_persistent_mode(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None:
"""Skyvern.local() should honor env-only persistent mode without requiring ./.env."""
from skyvern import Skyvern
monkeypatch.chdir(tmp_path)
monkeypatch.setenv("DATABASE_STRING", "postgresql+psycopg://skyvern@localhost/skyvern")
monkeypatch.setenv("SKYVERN_API_KEY", "dummy-key")
embedded_client = httpx.AsyncClient()
with patch(
"skyvern.library.embedded_server_factory.create_embedded_server", return_value=embedded_client
) as factory:
skyvern = Skyvern.local()
try:
assert factory.call_args.kwargs["use_in_memory_db"] is False
assert getattr(skyvern, "_embedded_client") is embedded_client
finally:
await skyvern.aclose()
@pytest.mark.asyncio
async def test_local_persistent_mode_accepts_settings_without_dotenv(
monkeypatch: pytest.MonkeyPatch, tmp_path: Path
) -> None:
"""Persistent mode should accept DATABASE_STRING and SKYVERN_API_KEY via settings overrides."""
from skyvern import Skyvern
monkeypatch.chdir(tmp_path)
embedded_client = httpx.AsyncClient()
overrides = {
"DATABASE_STRING": "postgresql+psycopg://skyvern@localhost/skyvern",
"SKYVERN_API_KEY": "dummy-key",
}
with patch(
"skyvern.library.embedded_server_factory.create_embedded_server", return_value=embedded_client
) as factory:
skyvern = Skyvern.local(use_in_memory_db=False, settings=overrides)
try:
assert factory.call_args.kwargs["use_in_memory_db"] is False
assert factory.call_args.kwargs["settings_overrides"] == overrides
assert getattr(skyvern, "_embedded_client") is embedded_client
finally:
await skyvern.aclose()
@pytest.mark.asyncio
async def test_create_embedded_server_uses_settings_api_key_in_persistent_mode() -> None:
"""Persistent embedded bootstrap should read SKYVERN_API_KEY from settings overrides."""
from skyvern.library.embedded_server_factory import create_embedded_server
seen_headers: dict[bytes, bytes] = {}
async def fake_app(scope, receive, send): # type: ignore[no-untyped-def]
nonlocal seen_headers
seen_headers = dict(scope["headers"])
await send(
{
"type": "http.response.start",
"status": 200,
"headers": [(b"content-type", b"application/json")],
}
)
await send({"type": "http.response.body", "body": b"[]"})
client = create_embedded_server(
settings_overrides={"SKYVERN_API_KEY": "dummy-key"},
use_in_memory_db=False,
)
try:
with patch("skyvern.library.embedded_server_factory.create_api_app", return_value=fake_app):
response = await client.get("/")
finally:
await client.aclose()
assert response.status_code == 200
assert seen_headers[b"x-api-key"] == b"dummy-key"
def test_settings_snapshot_restore_roundtrip() -> None:
"""Snapshot captures values and restore puts them back after mutation."""
from skyvern.config import settings
from skyvern.library.embedded_server_factory import _restore_settings, _snapshot_settings
original_db = settings.DATABASE_STRING
original_modules = settings.ADDITIONAL_MODULES[:]
snapshots = _snapshot_settings()
# Mutate settings
settings.DATABASE_STRING = "sqlite+aiosqlite:///:memory:"
settings.ADDITIONAL_MODULES = []
# Restore
_restore_settings(snapshots)
assert settings.DATABASE_STRING == original_db
assert settings.ADDITIONAL_MODULES == original_modules
def test_settings_snapshot_keys_cover_embedded_mutations() -> None:
"""Snapshot coverage should include both SQLite overrides and bootstrap-time mutations."""
from skyvern.library.embedded_server_factory import (
_BOOTSTRAP_RUNTIME_SETTINGS,
_SETTINGS_SNAPSHOT_KEYS,
_SQLITE_OVERRIDE_VALUES,
)
assert frozenset(_SQLITE_OVERRIDE_VALUES).issubset(_SETTINGS_SNAPSHOT_KEYS)
assert _BOOTSTRAP_RUNTIME_SETTINGS.issubset(_SETTINGS_SNAPSHOT_KEYS)
@pytest.mark.asyncio
async def test_bootstrap_failure_restores_settings() -> None:
"""If bootstrap fails, settings must be restored to pre-bootstrap values."""
from skyvern.config import settings
original_db = settings.DATABASE_STRING
from skyvern import Skyvern
# Create a client with a bad setting that will cause validation error
skyvern = Skyvern.local(
use_in_memory_db=True,
settings={"OTEL_ENABLED": True}, # Blocked setting
)
with pytest.raises(ValueError, match="Cannot override"):
await skyvern.get_workflows()
await skyvern.aclose()
# Settings should be restored to original values
assert settings.DATABASE_STRING == original_db
@pytest.mark.asyncio
async def test_bootstrap_failure_restores_forge_app() -> None:
"""If bootstrap fails, the forge app instance must be restored."""
from skyvern.forge import app as forge_app_holder
prev_inst = object.__getattribute__(forge_app_holder, "_inst")
from skyvern import Skyvern
skyvern = Skyvern.local(
use_in_memory_db=True,
settings={"OTEL_ENABLED": True}, # Blocked setting
)
with pytest.raises(ValueError, match="Cannot override"):
await skyvern.get_workflows()
await skyvern.aclose()
current_inst = object.__getattribute__(forge_app_holder, "_inst")
assert current_inst is prev_inst

View file

@ -0,0 +1,64 @@
"""Tests for task_run status write-through sync."""
from unittest.mock import AsyncMock, MagicMock
import pytest
from skyvern.forge.sdk.db.repositories.tasks import TasksRepository
@pytest.fixture
def mock_session():
session = AsyncMock()
session.__aenter__ = AsyncMock(return_value=session)
session.__aexit__ = AsyncMock(return_value=False)
return session
@pytest.fixture
def tasks_repo(mock_session):
repo = TasksRepository.__new__(TasksRepository)
repo.Session = MagicMock(return_value=mock_session)
repo.debug_enabled = False
repo._is_retryable_error_fn = None
return repo
@pytest.mark.asyncio
async def test_sync_task_run_status_updates_matching_row(tasks_repo, mock_session):
"""sync_task_run_status should UPDATE task_runs where run_id matches."""
await tasks_repo.sync_task_run_status(
organization_id="org_1",
run_id="wr_123",
status="failed",
)
mock_session.execute.assert_called_once()
call_args = mock_session.execute.call_args
# The SQL should be an UPDATE on task_runs
sql_text = str(call_args[0][0])
assert "task_runs" in sql_text
assert "status" in sql_text
@pytest.mark.asyncio
async def test_sync_task_run_status_no_raise_on_error(tasks_repo, mock_session):
"""sync_task_run_status should swallow exceptions (best-effort)."""
mock_session.execute.side_effect = Exception("DB error")
# Should NOT raise
await tasks_repo.sync_task_run_status(
organization_id="org_1",
run_id="nonexistent",
status="failed",
)
def test_terminal_statuses_match_run_status():
"""Guard: TERMINAL_STATUSES and RunStatus.is_final() must agree.
If this fails, a new terminal status was added to one but not the other.
Update TERMINAL_STATUSES in skyvern/schemas/runs.py (the single source of truth).
"""
from skyvern.forge.sdk.schemas.runs import TERMINAL_STATUSES
from skyvern.schemas.runs import RunStatus
assert set(TERMINAL_STATUSES) == {s.value for s in RunStatus if s.is_final()}

View file

@ -0,0 +1,51 @@
from types import SimpleNamespace
import pytest
from skyvern.forge import app
from skyvern.forge.sdk.core.hashing import generate_url_hash
from skyvern.schemas.runs import RunStatus, RunType
from skyvern.services.task_v2_service import DEFAULT_WORKFLOW_TITLE, initialize_task_v2
@pytest.mark.asyncio
async def test_initialize_task_v2_populates_task_run_url_when_user_url_is_known() -> None:
organization = SimpleNamespace(organization_id="org_123")
user_url = "https://example.com"
app.DATABASE.create_task_v2.return_value = SimpleNamespace(
observer_cruise_id="tsk_123",
workflow_run_id=None,
url=user_url,
)
app.WORKFLOW_SERVICE.create_empty_workflow.return_value = SimpleNamespace(
workflow_id="wf_123",
workflow_permanent_id="wpid_123",
title=DEFAULT_WORKFLOW_TITLE,
)
app.WORKFLOW_SERVICE.setup_workflow_run.return_value = SimpleNamespace(workflow_run_id="wr_123")
app.DATABASE.update_task_v2.return_value = SimpleNamespace(
observer_cruise_id="tsk_123",
workflow_run_id="wr_123",
workflow_id="wf_123",
workflow_permanent_id="wpid_123",
url=user_url,
)
app.DATABASE.create_task_run.return_value = SimpleNamespace(run_id="tsk_123")
await initialize_task_v2(
organization=organization,
user_prompt="Open the page",
user_url=user_url,
create_task_run=True,
)
app.DATABASE.create_task_run.assert_awaited_once_with(
task_run_type=RunType.task_v2,
organization_id="org_123",
run_id="tsk_123",
title=DEFAULT_WORKFLOW_TITLE,
url=user_url,
url_hash=generate_url_hash(user_url),
status=RunStatus.queued,
)

82
uv.lock generated
View file

@ -7,6 +7,10 @@ resolution-markers = [
"python_full_version < '3.12'", "python_full_version < '3.12'",
] ]
[options]
exclude-newer = "2026-03-24T13:53:47.750565Z"
exclude-newer-span = "P7D"
[manifest] [manifest]
constraints = [ constraints = [
{ name = "authlib", specifier = ">=1.6.9" }, { name = "authlib", specifier = ">=1.6.9" },
@ -654,16 +658,16 @@ wheels = [
[[package]] [[package]]
name = "build" name = "build"
version = "1.4.2" version = "1.4.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "colorama", marker = "os_name == 'nt'" }, { name = "colorama", marker = "os_name == 'nt'" },
{ name = "packaging" }, { name = "packaging" },
{ name = "pyproject-hooks" }, { name = "pyproject-hooks" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/6c/1d/ab15c8ac57f4ee8778d7633bc6685f808ab414437b8644f555389cdc875e/build-1.4.2.tar.gz", hash = "sha256:35b14e1ee329c186d3f08466003521ed7685ec15ecffc07e68d706090bf161d1", size = 83433, upload-time = "2026-03-25T14:20:27.659Z" } sdist = { url = "https://files.pythonhosted.org/packages/42/18/94eaffda7b329535d91f00fe605ab1f1e5cd68b2074d03f255c7d250687d/build-1.4.0.tar.gz", hash = "sha256:f1b91b925aa322be454f8330c6fb48b465da993d1e7e7e6fa35027ec49f3c936", size = 50054, upload-time = "2026-01-08T16:41:47.696Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/4a/57/3b7d4dd193ade4641c865bc2b93aeeb71162e81fc348b8dad020215601ed/build-1.4.2-py3-none-any.whl", hash = "sha256:7a4d8651ea877cb2a89458b1b198f2e69f536c95e89129dbf5d448045d60db88", size = 24643, upload-time = "2026-03-25T14:20:26.568Z" }, { url = "https://files.pythonhosted.org/packages/c5/0d/84a4380f930db0010168e0aa7b7a8fed9ba1835a8fbb1472bc6d0201d529/build-1.4.0-py3-none-any.whl", hash = "sha256:6a07c1b8eb6f2b311b96fcbdbce5dab5fe637ffda0fd83c9cac622e927501596", size = 24141, upload-time = "2026-01-08T16:41:46.453Z" },
] ]
[[package]] [[package]]
@ -764,7 +768,7 @@ wheels = [
[[package]] [[package]]
name = "cfn-lint" name = "cfn-lint"
version = "1.47.1" version = "1.47.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "aws-sam-translator" }, { name = "aws-sam-translator" },
@ -775,9 +779,9 @@ dependencies = [
{ name = "sympy" }, { name = "sympy" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/10/34/e66811016e7709cab78b0cf896437b922d7537986ac727344663b6cc2044/cfn_lint-1.47.1.tar.gz", hash = "sha256:b2eedbcee3aa104602f79933e3ad74c01f0fa1e226b70327118926fd78d8d3f1", size = 3672271, upload-time = "2026-03-24T15:59:34.526Z" } sdist = { url = "https://files.pythonhosted.org/packages/61/f2/67074ab011d47e973b57e1efb6adf798fbaab3873b9a3edcf6f8b9c33639/cfn_lint-1.47.0.tar.gz", hash = "sha256:29a9377febd6c8fa30903d437a010bcf042426b220d009101c6cf8523819291f", size = 3673549, upload-time = "2026-03-17T17:14:45.95Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/a5/88/19802ef0e1ef6259c4bc4b58226c0e7ff8b7ae93806ca32354c007e3480a/cfn_lint-1.47.1-py3-none-any.whl", hash = "sha256:3a4b5dba0fd03c24f2bc0e112a88ad90fa29014971e881b8f1e297d22f398a97", size = 5299292, upload-time = "2026-03-24T15:59:31.86Z" }, { url = "https://files.pythonhosted.org/packages/16/2e/c552923e89004a268f94520de836525523132dd76ec52d3476a076d9dda2/cfn_lint-1.47.0-py3-none-any.whl", hash = "sha256:1148f37f2733cdb361897746eda903b5414ad5dd7e5df9ea634ac9f6c67975c9", size = 5281656, upload-time = "2026-03-17T17:14:43.607Z" },
] ]
[[package]] [[package]]
@ -1412,7 +1416,7 @@ wheels = [
[[package]] [[package]]
name = "google-cloud-aiplatform" name = "google-cloud-aiplatform"
version = "1.143.0" version = "1.142.0"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "docstring-parser" }, { name = "docstring-parser" },
@ -1428,9 +1432,9 @@ dependencies = [
{ name = "pydantic" }, { name = "pydantic" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/a7/08/939fb05870fdf155410a927e22f5b053d49f18e215618e102fba1d8bb147/google_cloud_aiplatform-1.143.0.tar.gz", hash = "sha256:1f0124a89795a6b473deb28724dd37d95334205df3a9c9c48d0b8d7a3d5d5cc4", size = 10215389, upload-time = "2026-03-25T18:30:15.444Z" } sdist = { url = "https://files.pythonhosted.org/packages/41/0d/3063a0512d60cf18854a279e00ccb796429545464345ef821cf77cb93d05/google_cloud_aiplatform-1.142.0.tar.gz", hash = "sha256:87b49e002703dc14885093e9b264587db84222bef5f70f5a442d03f41beecdd1", size = 10207993, upload-time = "2026-03-20T22:49:13.797Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/90/14/16323e604e79dc63b528268f97a841c2c29dd8eb16395de6bf530c1a5ebe/google_cloud_aiplatform-1.143.0-py2.py3-none-any.whl", hash = "sha256:78df97d044859f743a9cc48b89a260d33579b0d548b1589bb3ae9f4c2afc0c5a", size = 8392705, upload-time = "2026-03-25T18:30:11.496Z" }, { url = "https://files.pythonhosted.org/packages/59/8b/f29646d3fa940f0e38cfcc12137f4851856b50d7486a3c05103ebc78d82d/google_cloud_aiplatform-1.142.0-py2.py3-none-any.whl", hash = "sha256:17c91db9b613cbbafb2c36335b123686aeb2b4b8448be5134b565ae07165a39a", size = 8388991, upload-time = "2026-03-20T22:49:10.334Z" },
] ]
[[package]] [[package]]
@ -1900,7 +1904,7 @@ wheels = [
[[package]] [[package]]
name = "huggingface-hub" name = "huggingface-hub"
version = "1.8.0" version = "1.7.2"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "filelock" }, { name = "filelock" },
@ -1913,9 +1917,9 @@ dependencies = [
{ name = "typer" }, { name = "typer" },
{ name = "typing-extensions" }, { name = "typing-extensions" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/8e/2a/a847fd02261cd051da218baf99f90ee7c7040c109a01833db4f838f25256/huggingface_hub-1.8.0.tar.gz", hash = "sha256:c5627b2fd521e00caf8eff4ac965ba988ea75167fad7ee72e17f9b7183ec63f3", size = 735839, upload-time = "2026-03-25T16:01:28.152Z" } sdist = { url = "https://files.pythonhosted.org/packages/19/15/eafc1c57bf0f8afffb243dcd4c0cceb785e956acc17bba4d9bf2ae21fc9c/huggingface_hub-1.7.2.tar.gz", hash = "sha256:7f7e294e9bbb822e025bdb2ada025fa4344d978175a7f78e824d86e35f7ab43b", size = 724684, upload-time = "2026-03-20T10:36:08.767Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/a9/ae/8a3a16ea4d202cb641b51d2681bdd3d482c1c592d7570b3fa264730829ce/huggingface_hub-1.8.0-py3-none-any.whl", hash = "sha256:d3eb5047bd4e33c987429de6020d4810d38a5bef95b3b40df9b17346b7f353f2", size = 625208, upload-time = "2026-03-25T16:01:26.603Z" }, { url = "https://files.pythonhosted.org/packages/08/de/3ad061a05f74728927ded48c90b73521b9a9328c85d841bdefb30e01fb85/huggingface_hub-1.7.2-py3-none-any.whl", hash = "sha256:288f33a0a17b2a73a1359e2a5fd28d1becb2c121748c6173ab8643fb342c850e", size = 618036, upload-time = "2026-03-20T10:36:06.824Z" },
] ]
[[package]] [[package]]
@ -3192,26 +3196,26 @@ wheels = [
[[package]] [[package]]
name = "nh3" name = "nh3"
version = "0.3.4" version = "0.3.3"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/4e/86/f8d3a7c9bd1bbaa181f6312c757e0b74d25f71ecf84ea3c0dc5e0f01840d/nh3-0.3.4.tar.gz", hash = "sha256:96709a379997c1b28c8974146ca660b0dcd3794f4f6d50c1ea549bab39ac6ade", size = 19520, upload-time = "2026-03-25T10:57:30.789Z" } sdist = { url = "https://files.pythonhosted.org/packages/cc/37/ab55eb2b05e334ff9a1ad52c556ace1f9c20a3f63613a165d384d5387657/nh3-0.3.3.tar.gz", hash = "sha256:185ed41b88c910b9ca8edc89ca3b4be688a12cb9de129d84befa2f74a0039fee", size = 18968, upload-time = "2026-02-14T09:35:15.664Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/4a/57/a97955bc95960cfb1f0517043d60a121f4ba93fde252d4d9ffd3c2a9eead/nh3-0.3.4-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:d8bebcb20ab4b91858385cd98fe58046ec4a624275b45ef9b976475604f45b49", size = 1439519, upload-time = "2026-03-25T10:57:12.019Z" }, { url = "https://files.pythonhosted.org/packages/13/3e/aef8cf8e0419b530c95e96ae93a5078e9b36c1e6613eeb1df03a80d5194e/nh3-0.3.3-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e8ee96156f7dfc6e30ecda650e480c5ae0a7d38f0c6fafc3c1c655e2500421d9", size = 1448640, upload-time = "2026-02-14T09:34:49.316Z" },
{ url = "https://files.pythonhosted.org/packages/2b/60/c9a33361da8cde7c7760f091cd10467bc470634e4eea31c8bb70935b00a4/nh3-0.3.4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d825722a1e8cbc87d7ca1e47ffb1d2a6cf343ad4c1b8465becf7cadcabcdfd0", size = 833798, upload-time = "2026-03-25T10:57:13.264Z" }, { url = "https://files.pythonhosted.org/packages/ca/43/d2011a4f6c0272cb122eeff40062ee06bb2b6e57eabc3a5e057df0d582df/nh3-0.3.3-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45fe0d6a607264910daec30360c8a3b5b1500fd832d21b2da608256287bcb92d", size = 839405, upload-time = "2026-02-14T09:34:50.779Z" },
{ url = "https://files.pythonhosted.org/packages/6b/19/9487790780b8c94eacca37866c1270b747a4af8e244d43b3b550fddbbf62/nh3-0.3.4-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4aa8b43e68c26b68069a3b6cef09de166d1d7fa140cf8d77e409a46cbf742e44", size = 820414, upload-time = "2026-03-25T10:57:14.236Z" }, { url = "https://files.pythonhosted.org/packages/f8/f3/965048510c1caf2a34ed04411a46a04a06eb05563cd06f1aa57b71eb2bc8/nh3-0.3.3-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5bc1d4b30ba1ba896669d944b6003630592665974bd11a3dc2f661bde92798a7", size = 825849, upload-time = "2026-02-14T09:34:52.622Z" },
{ url = "https://files.pythonhosted.org/packages/6b/b4/c6a340dd321d20b1e4a663307032741da045685c87403926c43656f6f5ec/nh3-0.3.4-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f5f214618ad5eff4f2a6b13a8d4da4d9e7f37c569d90a13fb9f0caaf7d04fe21", size = 1061531, upload-time = "2026-03-25T10:57:15.384Z" }, { url = "https://files.pythonhosted.org/packages/78/99/b4bbc6ad16329d8db2c2c320423f00b549ca3b129c2b2f9136be2606dbb0/nh3-0.3.3-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f433a2dd66545aad4a720ad1b2150edcdca75bfff6f4e6f378ade1ec138d5e77", size = 1068303, upload-time = "2026-02-14T09:34:54.179Z" },
{ url = "https://files.pythonhosted.org/packages/c4/49/f6b4b474e0032e4bcbb7174b44e4cf6915670e09c62421deb06ccfcb88b8/nh3-0.3.4-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3390e4333883673a684ce16c1716b481e91782d6f56dec5c85fed9feedb23382", size = 1021889, upload-time = "2026-03-25T10:57:16.454Z" }, { url = "https://files.pythonhosted.org/packages/3f/34/3420d97065aab1b35f3e93ce9c96c8ebd423ce86fe84dee3126790421a2a/nh3-0.3.3-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52e973cb742e95b9ae1b35822ce23992428750f4b46b619fe86eba4205255b30", size = 1029316, upload-time = "2026-02-14T09:34:56.186Z" },
{ url = "https://files.pythonhosted.org/packages/43/da/e52a6941746d1f974752af3fc8591f1dbcdcf7fd8c726c7d99f444ba820e/nh3-0.3.4-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18a2e44ccb29cbb45071b8f3f2dab9ebfb41a6516f328f91f1f1fd18196239a4", size = 912965, upload-time = "2026-03-25T10:57:17.624Z" }, { url = "https://files.pythonhosted.org/packages/f1/9a/99eda757b14e596fdb2ca5f599a849d9554181aa899274d0d183faef4493/nh3-0.3.3-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c730617bdc15d7092dcc0469dc2826b914c8f874996d105b4bc3842a41c1cd9", size = 919944, upload-time = "2026-02-14T09:34:57.886Z" },
{ url = "https://files.pythonhosted.org/packages/d6/b7/ec1cbc6b297a808c513f59f501656389623fc09ad6a58c640851289c7854/nh3-0.3.4-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0961a27dc2057c38d0364cb05880e1997ae1c80220cbc847db63213720b8f304", size = 804975, upload-time = "2026-03-25T10:57:18.994Z" }, { url = "https://files.pythonhosted.org/packages/6f/84/c0dc75c7fb596135f999e59a410d9f45bdabb989f1cb911f0016d22b747b/nh3-0.3.3-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e98fa3dbfd54e25487e36ba500bc29bca3a4cab4ffba18cfb1a35a2d02624297", size = 811461, upload-time = "2026-02-14T09:34:59.65Z" },
{ url = "https://files.pythonhosted.org/packages/a9/56/b1275aa2c6510191eed76178da4626b0900402439cb9f27d6b9bf7c6d5e9/nh3-0.3.4-cp38-abi3-manylinux_2_31_riscv64.whl", hash = "sha256:9337517edb7c10228252cce2898e20fb3d77e32ffaccbb3c66897927d74215a0", size = 833400, upload-time = "2026-03-25T10:57:20.086Z" }, { url = "https://files.pythonhosted.org/packages/7e/ec/b1bf57cab6230eec910e4863528dc51dcf21b57aaf7c88ee9190d62c9185/nh3-0.3.3-cp38-abi3-manylinux_2_31_riscv64.whl", hash = "sha256:3a62b8ae7c235481715055222e54c682422d0495a5c73326807d4e44c5d14691", size = 840360, upload-time = "2026-02-14T09:35:01.444Z" },
{ url = "https://files.pythonhosted.org/packages/7c/a5/5d574ffa3c6e49a5364d1b25ebad165501c055340056671493beb467a15e/nh3-0.3.4-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d866701affe67a5171b916b5c076e767a74c6a9efb7fb2006eb8d3c5f9a293d5", size = 854277, upload-time = "2026-03-25T10:57:21.433Z" }, { url = "https://files.pythonhosted.org/packages/37/5e/326ae34e904dde09af1de51219a611ae914111f0970f2f111f4f0188f57e/nh3-0.3.3-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc305a2264868ec8fa16548296f803d8fd9c1fa66cd28b88b605b1bd06667c0b", size = 859872, upload-time = "2026-02-14T09:35:03.348Z" },
{ url = "https://files.pythonhosted.org/packages/79/36/8aeb2ab21517cefa212db109e41024e02650716cb42bf293d0a88437a92d/nh3-0.3.4-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:47d749d99ae005ab19517224140b280dd56e77b33afb82f9b600e106d0458003", size = 1022021, upload-time = "2026-03-25T10:57:22.433Z" }, { url = "https://files.pythonhosted.org/packages/09/38/7eba529ce17ab4d3790205da37deabb4cb6edcba15f27b8562e467f2fc97/nh3-0.3.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:90126a834c18af03bfd6ff9a027bfa6bbf0e238527bc780a24de6bd7cc1041e2", size = 1023550, upload-time = "2026-02-14T09:35:04.829Z" },
{ url = "https://files.pythonhosted.org/packages/9c/95/9fd860997685e64abe2d5a995ca2eb5004c0fb6d6585429612a7871548b9/nh3-0.3.4-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:f987cb56458323405e8e5ea827e1befcf141ffa0c0ac797d6d02e6b646056d9a", size = 1103526, upload-time = "2026-03-25T10:57:23.487Z" }, { url = "https://files.pythonhosted.org/packages/05/a2/556fdecd37c3681b1edee2cf795a6799c6ed0a5551b2822636960d7e7651/nh3-0.3.3-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:24769a428e9e971e4ccfb24628f83aaa7dc3c8b41b130c8ddc1835fa1c924489", size = 1105212, upload-time = "2026-02-14T09:35:06.821Z" },
{ url = "https://files.pythonhosted.org/packages/7d/0d/df545070614c1007f0109bb004230226c9000e7857c9785583ec25cda9d7/nh3-0.3.4-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:883d5a6d6ee8078c4afc8e96e022fe579c4c265775ff6ee21e39b8c542cabab3", size = 1068050, upload-time = "2026-03-25T10:57:24.624Z" }, { url = "https://files.pythonhosted.org/packages/dd/e3/5db0b0ad663234967d83702277094687baf7c498831a2d3ad3451c11770f/nh3-0.3.3-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:b7a18ee057761e455d58b9d31445c3e4b2594cff4ddb84d2e331c011ef46f462", size = 1069970, upload-time = "2026-02-14T09:35:08.504Z" },
{ url = "https://files.pythonhosted.org/packages/94/d5/17b016df52df052f714c53be71df26a1943551d9931e9383b92c998b88f8/nh3-0.3.4-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:75643c22f5092d8e209f766ee8108c400bc1e44760fc94d2d638eb138d18f853", size = 1046037, upload-time = "2026-03-25T10:57:25.799Z" }, { url = "https://files.pythonhosted.org/packages/79/b2/2ea21b79c6e869581ce5f51549b6e185c4762233591455bf2a326fb07f3b/nh3-0.3.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5a4b2c1f3e6f3cbe7048e17f4fefad3f8d3e14cc0fd08fb8599e0d5653f6b181", size = 1047588, upload-time = "2026-02-14T09:35:09.911Z" },
{ url = "https://files.pythonhosted.org/packages/51/39/49f737907e6ab2b4ca71855d3bd63dd7958862e9c8b94fb4e5b18ccf6988/nh3-0.3.4-cp38-abi3-win32.whl", hash = "sha256:72e4e9ca1c4bd41b4a28b0190edc2e21e3f71496acd36a0162858e1a28db3d7e", size = 609542, upload-time = "2026-03-25T10:57:27.112Z" }, { url = "https://files.pythonhosted.org/packages/e2/92/2e434619e658c806d9c096eed2cdff9a883084299b7b19a3f0824eb8e63d/nh3-0.3.3-cp38-abi3-win32.whl", hash = "sha256:e974850b131fdffa75e7ad8e0d9c7a855b96227b093417fdf1bd61656e530f37", size = 616179, upload-time = "2026-02-14T09:35:11.366Z" },
{ url = "https://files.pythonhosted.org/packages/73/4f/af8e9071d7464575a7316831938237ffc9d92d27f163dbdd964b1309cd9b/nh3-0.3.4-cp38-abi3-win_amd64.whl", hash = "sha256:c10b1f0c741e257a5cb2978d6bac86e7c784ab20572724b20c6402c2e24bce75", size = 624244, upload-time = "2026-03-25T10:57:28.302Z" }, { url = "https://files.pythonhosted.org/packages/73/88/1ce287ef8649dc51365b5094bd3713b76454838140a32ab4f8349973883c/nh3-0.3.3-cp38-abi3-win_amd64.whl", hash = "sha256:2efd17c0355d04d39e6d79122b42662277ac10a17ea48831d90b46e5ef7e4fc0", size = 631159, upload-time = "2026-02-14T09:35:12.77Z" },
{ url = "https://files.pythonhosted.org/packages/44/0c/37695d6b0168f6714b5c492331636a9e6123d6ec22d25876c68d06eab1b8/nh3-0.3.4-cp38-abi3-win_arm64.whl", hash = "sha256:43ad4eedee7e049b9069bc015b7b095d320ed6d167ecec111f877de1540656e9", size = 616649, upload-time = "2026-03-25T10:57:29.623Z" }, { url = "https://files.pythonhosted.org/packages/31/f1/b4835dbde4fb06f29db89db027576d6014081cd278d9b6751facc3e69e43/nh3-0.3.3-cp38-abi3-win_arm64.whl", hash = "sha256:b838e619f483531483d26d889438e53a880510e832d2aafe73f93b7b1ac2bce2", size = 616645, upload-time = "2026-02-14T09:35:14.062Z" },
] ]
[[package]] [[package]]
@ -5175,7 +5179,7 @@ wheels = [
[[package]] [[package]]
name = "requests" name = "requests"
version = "2.33.0" version = "2.32.5"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "certifi" }, { name = "certifi" },
@ -5183,9 +5187,9 @@ dependencies = [
{ name = "idna" }, { name = "idna" },
{ name = "urllib3" }, { name = "urllib3" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/34/64/8860370b167a9721e8956ae116825caff829224fbca0ca6e7bf8ddef8430/requests-2.33.0.tar.gz", hash = "sha256:c7ebc5e8b0f21837386ad0e1c8fe8b829fa5f544d8df3b2253bff14ef29d7652", size = 134232, upload-time = "2026-03-25T15:10:41.586Z" } sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/56/5d/c814546c2333ceea4ba42262d8c4d55763003e767fa169adc693bd524478/requests-2.33.0-py3-none-any.whl", hash = "sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b", size = 65017, upload-time = "2026-03-25T15:10:40.382Z" }, { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
] ]
[[package]] [[package]]
@ -6120,16 +6124,16 @@ wheels = [
[[package]] [[package]]
name = "types-boto3" name = "types-boto3"
version = "1.42.75" version = "1.42.74"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "botocore-stubs" }, { name = "botocore-stubs" },
{ name = "types-s3transfer" }, { name = "types-s3transfer" },
{ name = "typing-extensions", marker = "python_full_version < '3.12'" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/ed/e0/f4cb0e91d088bb96f5e4ed18b0201d802047ed69d8d62561eb7b81bd3881/types_boto3-1.42.75.tar.gz", hash = "sha256:2a41afd0cbc31171fdf93215d46fa9cfa450dc52b6775bccfbac633ce30d8617", size = 101716, upload-time = "2026-03-24T21:54:22.815Z" } sdist = { url = "https://files.pythonhosted.org/packages/66/d2/5f1c3dfd75993084c8cf5d31e169c342fb394d7905d6438428245661c39b/types_boto3-1.42.74.tar.gz", hash = "sha256:8013a2dfc1ba398217d2d2dc6b54b37494df65cbae363f9430af4824697cb655", size = 101691, upload-time = "2026-03-23T19:56:24.402Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/ec/d5/4477cfd9e3f41b4dd7492b998c80985e279926ad06686fd169b77b78a6e1/types_boto3-1.42.75-py3-none-any.whl", hash = "sha256:c285c4d8e9358ed95feff5fe108847e52223ae344cc24699db29a4ec4fa37b43", size = 69908, upload-time = "2026-03-24T21:54:09.684Z" }, { url = "https://files.pythonhosted.org/packages/6c/b1/8836349f6c9a3bf2066fb8eaec74180406f0a11e8ce00fc33232f97a8c38/types_boto3-1.42.74-py3-none-any.whl", hash = "sha256:3791c49c694b5c6d980e38994032eff4ed90ab4f7b1ad4fb34f3501b7fc60d02", size = 69907, upload-time = "2026-03-23T19:56:19.283Z" },
] ]
[package.optional-dependencies] [package.optional-dependencies]
@ -6139,14 +6143,14 @@ full = [
[[package]] [[package]]
name = "types-boto3-full" name = "types-boto3-full"
version = "1.42.75" version = "1.42.74"
source = { registry = "https://pypi.org/simple" } source = { registry = "https://pypi.org/simple" }
dependencies = [ dependencies = [
{ name = "typing-extensions", marker = "python_full_version < '3.12'" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" },
] ]
sdist = { url = "https://files.pythonhosted.org/packages/d0/96/e69885de1e3018b3319fd65e537cb9adf30fc56ad2a72ecff4d86293f737/types_boto3_full-1.42.75.tar.gz", hash = "sha256:3c1ca5f6c153aef801449717f21f37c79a7e7a08c7fc744a606600c4ab8b0dbe", size = 8500715, upload-time = "2026-03-25T01:45:21.413Z" } sdist = { url = "https://files.pythonhosted.org/packages/c9/4d/5fa22d7c99b57e9f92c522b10d96caab0dc638880863cc86901e7f9316da/types_boto3_full-1.42.74.tar.gz", hash = "sha256:827c31e18c1cc8720b826b3c3b8f7be3612c5fc3800d61d0591e9d65257ecb0a", size = 8499614, upload-time = "2026-03-24T01:27:55.394Z" }
wheels = [ wheels = [
{ url = "https://files.pythonhosted.org/packages/d4/9a/30c19c379728d835674b47842a0620e566eb661fc345e45d9b8f87656c68/types_boto3_full-1.42.75-py3-none-any.whl", hash = "sha256:95d8e378d67f032fea1440fbb90361c68a47fdf41e8c1ffd86539375d6d845ea", size = 12908515, upload-time = "2026-03-25T01:45:17.202Z" }, { url = "https://files.pythonhosted.org/packages/34/c0/9b55d89c3ec01aa91518cae52b73a03af6ad8c07e46551d865f40039fc23/types_boto3_full-1.42.74-py3-none-any.whl", hash = "sha256:654f814175af53caa1c34498883c6edcdb8555066e19bbe800f98d1d8b862d29", size = 12908188, upload-time = "2026-03-24T01:27:51.64Z" },
] ]
[[package]] [[package]]