mirror of
https://github.com/Skyvern-AI/skyvern.git
synced 2025-09-14 09:19:40 +00:00
add cache_key to workflows table (#3112)
This commit is contained in:
parent
800c030e5c
commit
31aa7d6973
6 changed files with 46 additions and 11 deletions
|
@ -0,0 +1,33 @@
|
||||||
|
"""add cache_key to workflows table
|
||||||
|
|
||||||
|
Revision ID: f2e78df26c97
|
||||||
|
Revises: dd29417b397c
|
||||||
|
Create Date: 2025-08-06 07:04:57.428538+00:00
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "f2e78df26c97"
|
||||||
|
down_revision: Union[str, None] = "dd29417b397c"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("workflows", sa.Column("cache_key", sa.String(), nullable=True))
|
||||||
|
op.drop_column("workflows", "cache_project_id")
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
op.add_column("workflows", sa.Column("cache_project_id", sa.VARCHAR(), autoincrement=False, nullable=True))
|
||||||
|
op.drop_column("workflows", "cache_key")
|
||||||
|
# ### end Alembic commands ###
|
|
@ -1358,7 +1358,7 @@ class AgentDB:
|
||||||
is_saved_task: bool = False,
|
is_saved_task: bool = False,
|
||||||
status: WorkflowStatus = WorkflowStatus.published,
|
status: WorkflowStatus = WorkflowStatus.published,
|
||||||
use_cache: bool = False,
|
use_cache: bool = False,
|
||||||
cache_project_id: str | None = None,
|
cache_key: str | None = None,
|
||||||
) -> Workflow:
|
) -> Workflow:
|
||||||
async with self.Session() as session:
|
async with self.Session() as session:
|
||||||
workflow = WorkflowModel(
|
workflow = WorkflowModel(
|
||||||
|
@ -1377,7 +1377,7 @@ class AgentDB:
|
||||||
is_saved_task=is_saved_task,
|
is_saved_task=is_saved_task,
|
||||||
status=status,
|
status=status,
|
||||||
use_cache=use_cache,
|
use_cache=use_cache,
|
||||||
cache_project_id=cache_project_id,
|
cache_key=cache_key,
|
||||||
)
|
)
|
||||||
if workflow_permanent_id:
|
if workflow_permanent_id:
|
||||||
workflow.workflow_permanent_id = workflow_permanent_id
|
workflow.workflow_permanent_id = workflow_permanent_id
|
||||||
|
@ -1557,7 +1557,7 @@ class AgentDB:
|
||||||
workflow_definition: dict[str, Any] | None = None,
|
workflow_definition: dict[str, Any] | None = None,
|
||||||
version: int | None = None,
|
version: int | None = None,
|
||||||
use_cache: bool | None = None,
|
use_cache: bool | None = None,
|
||||||
cache_project_id: str | None = None,
|
cache_key: str | None = None,
|
||||||
) -> Workflow:
|
) -> Workflow:
|
||||||
try:
|
try:
|
||||||
async with self.Session() as session:
|
async with self.Session() as session:
|
||||||
|
@ -1577,8 +1577,8 @@ class AgentDB:
|
||||||
workflow.version = version
|
workflow.version = version
|
||||||
if use_cache is not None:
|
if use_cache is not None:
|
||||||
workflow.use_cache = use_cache
|
workflow.use_cache = use_cache
|
||||||
if cache_project_id is not None:
|
if cache_key is not None:
|
||||||
workflow.cache_project_id = cache_project_id
|
workflow.cache_key = cache_key
|
||||||
await session.commit()
|
await session.commit()
|
||||||
await session.refresh(workflow)
|
await session.refresh(workflow)
|
||||||
return convert_to_workflow(workflow, self.debug_enabled)
|
return convert_to_workflow(workflow, self.debug_enabled)
|
||||||
|
|
|
@ -239,7 +239,7 @@ class WorkflowModel(Base):
|
||||||
model = Column(JSON, nullable=True)
|
model = Column(JSON, nullable=True)
|
||||||
status = Column(String, nullable=False, default="published")
|
status = Column(String, nullable=False, default="published")
|
||||||
use_cache = Column(Boolean, default=False, nullable=False)
|
use_cache = Column(Boolean, default=False, nullable=False)
|
||||||
cache_project_id = Column(String, nullable=True)
|
cache_key = Column(String, nullable=True)
|
||||||
|
|
||||||
created_at = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
|
created_at = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
|
||||||
modified_at = Column(
|
modified_at = Column(
|
||||||
|
|
|
@ -80,6 +80,8 @@ class Workflow(BaseModel):
|
||||||
status: WorkflowStatus = WorkflowStatus.published
|
status: WorkflowStatus = WorkflowStatus.published
|
||||||
max_screenshot_scrolls: int | None = None
|
max_screenshot_scrolls: int | None = None
|
||||||
extra_http_headers: dict[str, str] | None = None
|
extra_http_headers: dict[str, str] | None = None
|
||||||
|
use_cache: bool = False
|
||||||
|
cache_key: str | None = None
|
||||||
|
|
||||||
created_at: datetime
|
created_at: datetime
|
||||||
modified_at: datetime
|
modified_at: datetime
|
||||||
|
|
|
@ -444,4 +444,4 @@ class WorkflowCreateYAMLRequest(BaseModel):
|
||||||
extra_http_headers: dict[str, str] | None = None
|
extra_http_headers: dict[str, str] | None = None
|
||||||
status: WorkflowStatus = WorkflowStatus.published
|
status: WorkflowStatus = WorkflowStatus.published
|
||||||
use_cache: bool = False
|
use_cache: bool = False
|
||||||
cache_project_id: str | None = None
|
cache_key: str | None = None
|
||||||
|
|
|
@ -636,7 +636,7 @@ class WorkflowService:
|
||||||
status: WorkflowStatus = WorkflowStatus.published,
|
status: WorkflowStatus = WorkflowStatus.published,
|
||||||
extra_http_headers: dict[str, str] | None = None,
|
extra_http_headers: dict[str, str] | None = None,
|
||||||
use_cache: bool = False,
|
use_cache: bool = False,
|
||||||
cache_project_id: str | None = None,
|
cache_key: str | None = None,
|
||||||
) -> Workflow:
|
) -> Workflow:
|
||||||
return await app.DATABASE.create_workflow(
|
return await app.DATABASE.create_workflow(
|
||||||
title=title,
|
title=title,
|
||||||
|
@ -656,7 +656,7 @@ class WorkflowService:
|
||||||
status=status,
|
status=status,
|
||||||
extra_http_headers=extra_http_headers,
|
extra_http_headers=extra_http_headers,
|
||||||
use_cache=use_cache,
|
use_cache=use_cache,
|
||||||
cache_project_id=cache_project_id,
|
cache_key=cache_key,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def get_workflow(self, workflow_id: str, organization_id: str | None = None) -> Workflow:
|
async def get_workflow(self, workflow_id: str, organization_id: str | None = None) -> Workflow:
|
||||||
|
@ -1539,7 +1539,7 @@ class WorkflowService:
|
||||||
is_saved_task=request.is_saved_task,
|
is_saved_task=request.is_saved_task,
|
||||||
status=request.status,
|
status=request.status,
|
||||||
use_cache=request.use_cache,
|
use_cache=request.use_cache,
|
||||||
cache_project_id=request.cache_project_id,
|
cache_key=request.cache_key,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
workflow = await self.create_workflow(
|
workflow = await self.create_workflow(
|
||||||
|
@ -1558,7 +1558,7 @@ class WorkflowService:
|
||||||
is_saved_task=request.is_saved_task,
|
is_saved_task=request.is_saved_task,
|
||||||
status=request.status,
|
status=request.status,
|
||||||
use_cache=request.use_cache,
|
use_cache=request.use_cache,
|
||||||
cache_project_id=request.cache_project_id,
|
cache_key=request.cache_key,
|
||||||
)
|
)
|
||||||
# Keeping track of the new workflow id to delete it if an error occurs during the creation process
|
# Keeping track of the new workflow id to delete it if an error occurs during the creation process
|
||||||
new_workflow_id = workflow.workflow_id
|
new_workflow_id = workflow.workflow_id
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue