mirror of
https://github.com/Skyvern-AI/skyvern.git
synced 2025-09-14 09:19:40 +00:00
feat: migrate workflow use_cache field to generate_script (#3154)
Co-authored-by: devsy-bot <no-reply@devsy.ai> Co-authored-by: Shuchang Zheng <wintonzheng0325@gmail.com>
This commit is contained in:
parent
16596e5c61
commit
1a3938a756
13 changed files with 56 additions and 25 deletions
|
@ -0,0 +1,33 @@
|
|||
"""workflow.use_cache -> generate_script
|
||||
|
||||
Revision ID: 944ef972e5a8
|
||||
Revises: a027553be970
|
||||
Create Date: 2025-08-10 15:57:46.353038+00:00
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "944ef972e5a8"
|
||||
down_revision: Union[str, None] = "a027553be970"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column("workflows", sa.Column("generate_script", sa.Boolean(), nullable=False))
|
||||
op.drop_column("workflows", "use_cache")
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column("workflows", sa.Column("use_cache", sa.BOOLEAN(), autoincrement=False, nullable=False))
|
||||
op.drop_column("workflows", "generate_script")
|
||||
# ### end Alembic commands ###
|
|
@ -170,7 +170,7 @@ function WorkflowDebugger() {
|
|||
extraHttpHeaders: workflow.extra_http_headers
|
||||
? JSON.stringify(workflow.extra_http_headers)
|
||||
: null,
|
||||
useScriptCache: workflow.use_cache,
|
||||
useScriptCache: workflow.generate_script,
|
||||
scriptCacheKey: workflow.cache_key,
|
||||
};
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ function WorkflowEditor() {
|
|||
extraHttpHeaders: workflow.extra_http_headers
|
||||
? JSON.stringify(workflow.extra_http_headers)
|
||||
: null,
|
||||
useScriptCache: workflow.use_cache,
|
||||
useScriptCache: workflow.generate_script,
|
||||
scriptCacheKey: workflow.cache_key,
|
||||
};
|
||||
|
||||
|
|
|
@ -137,7 +137,7 @@ function StartNode({ id, data }: NodeProps<StartNode>) {
|
|||
<OrgWalled className="flex flex-col gap-4">
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<Label>Use Script Cache</Label>
|
||||
<Label>Generate Script</Label>
|
||||
<HelpTooltip content="Generate & use cached scripts for faster execution." />
|
||||
<Switch
|
||||
className="ml-auto"
|
||||
|
@ -150,8 +150,8 @@ function StartNode({ id, data }: NodeProps<StartNode>) {
|
|||
</div>
|
||||
<div className="space-y-2">
|
||||
<div className="flex gap-2">
|
||||
<Label>Script Cache Key</Label>
|
||||
<HelpTooltip content="A templated name, comprised of one or more of your parameters, that defines the key for your script cache." />
|
||||
<Label>Script Key</Label>
|
||||
<HelpTooltip content="A constant string or templated name, comprised of one or more of your parameters. It's the uinique key for a workflow script." />
|
||||
</div>
|
||||
<Input
|
||||
value={inputs.scriptCacheKey ?? ""}
|
||||
|
|
|
@ -504,7 +504,7 @@ export type WorkflowApiResponse = {
|
|||
created_at: string;
|
||||
modified_at: string;
|
||||
deleted_at: string | null;
|
||||
use_cache: boolean;
|
||||
generate_script: boolean;
|
||||
cache_key: string | null;
|
||||
};
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ export type WorkflowCreateYAMLRequest = {
|
|||
is_saved_task?: boolean;
|
||||
max_screenshot_scrolls?: number | null;
|
||||
extra_http_headers?: Record<string, string> | null;
|
||||
use_cache?: boolean;
|
||||
generate_script?: boolean;
|
||||
cache_key?: string | null;
|
||||
};
|
||||
|
||||
|
|
|
@ -110,7 +110,7 @@ const useWorkflowSave = () => {
|
|||
max_screenshot_scrolls: saveData.settings.maxScreenshotScrolls,
|
||||
totp_verification_url: saveData.workflow.totp_verification_url,
|
||||
extra_http_headers: extraHttpHeaders,
|
||||
use_cache: saveData.settings.useScriptCache,
|
||||
generate_script: saveData.settings.useScriptCache,
|
||||
cache_key: saveData.settings.scriptCacheKey,
|
||||
workflow_definition: {
|
||||
parameters: saveData.parameters,
|
||||
|
|
|
@ -1363,7 +1363,7 @@ class AgentDB:
|
|||
version: int | None = None,
|
||||
is_saved_task: bool = False,
|
||||
status: WorkflowStatus = WorkflowStatus.published,
|
||||
use_cache: bool = False,
|
||||
generate_script: bool = False,
|
||||
cache_key: str | None = None,
|
||||
) -> Workflow:
|
||||
async with self.Session() as session:
|
||||
|
@ -1382,7 +1382,7 @@ class AgentDB:
|
|||
model=model,
|
||||
is_saved_task=is_saved_task,
|
||||
status=status,
|
||||
use_cache=use_cache,
|
||||
generate_script=generate_script,
|
||||
cache_key=cache_key,
|
||||
)
|
||||
if workflow_permanent_id:
|
||||
|
@ -1562,7 +1562,7 @@ class AgentDB:
|
|||
description: str | None = None,
|
||||
workflow_definition: dict[str, Any] | None = None,
|
||||
version: int | None = None,
|
||||
use_cache: bool | None = None,
|
||||
generate_script: bool | None = None,
|
||||
cache_key: str | None = None,
|
||||
) -> Workflow:
|
||||
try:
|
||||
|
@ -1581,8 +1581,8 @@ class AgentDB:
|
|||
workflow.workflow_definition = workflow_definition
|
||||
if version is not None:
|
||||
workflow.version = version
|
||||
if use_cache is not None:
|
||||
workflow.use_cache = use_cache
|
||||
if generate_script is not None:
|
||||
workflow.generate_script = generate_script
|
||||
if cache_key is not None:
|
||||
workflow.cache_key = cache_key
|
||||
await session.commit()
|
||||
|
|
|
@ -241,7 +241,7 @@ class WorkflowModel(Base):
|
|||
persist_browser_session = Column(Boolean, default=False, nullable=False)
|
||||
model = Column(JSON, nullable=True)
|
||||
status = Column(String, nullable=False, default="published")
|
||||
use_cache = Column(Boolean, default=False, nullable=False)
|
||||
generate_script = Column(Boolean, default=False, nullable=False)
|
||||
cache_key = Column(String, nullable=True)
|
||||
|
||||
created_at = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
|
||||
|
|
|
@ -262,7 +262,7 @@ def convert_to_workflow(workflow_model: WorkflowModel, debug_enabled: bool = Fal
|
|||
deleted_at=workflow_model.deleted_at,
|
||||
status=WorkflowStatus(workflow_model.status),
|
||||
extra_http_headers=workflow_model.extra_http_headers,
|
||||
use_cache=workflow_model.use_cache,
|
||||
generate_script=workflow_model.generate_script,
|
||||
cache_key=workflow_model.cache_key,
|
||||
)
|
||||
|
||||
|
|
|
@ -80,7 +80,7 @@ class Workflow(BaseModel):
|
|||
status: WorkflowStatus = WorkflowStatus.published
|
||||
max_screenshot_scrolls: int | None = None
|
||||
extra_http_headers: dict[str, str] | None = None
|
||||
use_cache: bool = False
|
||||
generate_script: bool = False
|
||||
cache_key: str | None = None
|
||||
|
||||
created_at: datetime
|
||||
|
|
|
@ -447,5 +447,5 @@ class WorkflowCreateYAMLRequest(BaseModel):
|
|||
max_screenshot_scrolls: int | None = None
|
||||
extra_http_headers: dict[str, str] | None = None
|
||||
status: WorkflowStatus = WorkflowStatus.published
|
||||
use_cache: bool = False
|
||||
generate_script: bool = False
|
||||
cache_key: str | None = None
|
||||
|
|
|
@ -621,8 +621,8 @@ class WorkflowService:
|
|||
organization_id=organization_id,
|
||||
)
|
||||
|
||||
# TODO: generate script for workflow if the workflow.use_cache is True AND there's no script cached for the workflow
|
||||
if workflow.use_cache:
|
||||
# generate script for workflow if the workflow.generate_script is True AND there's no script cached for the workflow
|
||||
if workflow.generate_script:
|
||||
await self.generate_script_for_workflow(workflow=workflow, workflow_run=workflow_run)
|
||||
|
||||
return workflow_run
|
||||
|
@ -645,7 +645,7 @@ class WorkflowService:
|
|||
is_saved_task: bool = False,
|
||||
status: WorkflowStatus = WorkflowStatus.published,
|
||||
extra_http_headers: dict[str, str] | None = None,
|
||||
use_cache: bool = False,
|
||||
generate_script: bool = False,
|
||||
cache_key: str | None = None,
|
||||
) -> Workflow:
|
||||
return await app.DATABASE.create_workflow(
|
||||
|
@ -665,7 +665,7 @@ class WorkflowService:
|
|||
is_saved_task=is_saved_task,
|
||||
status=status,
|
||||
extra_http_headers=extra_http_headers,
|
||||
use_cache=use_cache,
|
||||
generate_script=generate_script,
|
||||
cache_key=cache_key,
|
||||
)
|
||||
|
||||
|
@ -1548,7 +1548,7 @@ class WorkflowService:
|
|||
version=existing_version + 1,
|
||||
is_saved_task=request.is_saved_task,
|
||||
status=request.status,
|
||||
use_cache=request.use_cache,
|
||||
generate_script=request.generate_script,
|
||||
cache_key=request.cache_key,
|
||||
)
|
||||
else:
|
||||
|
@ -1567,7 +1567,7 @@ class WorkflowService:
|
|||
extra_http_headers=request.extra_http_headers,
|
||||
is_saved_task=request.is_saved_task,
|
||||
status=request.status,
|
||||
use_cache=request.use_cache,
|
||||
generate_script=request.generate_script,
|
||||
cache_key=request.cache_key,
|
||||
)
|
||||
# Keeping track of the new workflow id to delete it if an error occurs during the creation process
|
||||
|
@ -2334,5 +2334,3 @@ class WorkflowService:
|
|||
workflow_id=workflow.workflow_id,
|
||||
workflow_run_id=workflow_run.workflow_run_id,
|
||||
)
|
||||
|
||||
return
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue