add cached steps flag to db (is_script_cached in Step) (#4202)

This commit is contained in:
Marc Kelechava 2025-12-04 13:47:46 -08:00 committed by GitHub
parent b9b3d9bb69
commit 944c95f456
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 46 additions and 0 deletions

View file

@ -0,0 +1,31 @@
"""db migration created_by steps
Revision ID: 152354699b93
Revises: 44e95cb21d98
Create Date: 2025-12-04 21:20:20.166647+00:00
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "152354699b93"
down_revision: Union[str, None] = "44e95cb21d98"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.add_column("steps", sa.Column("created_by", sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("steps", "created_by")
# ### end Alembic commands ###

View file

@ -257,6 +257,7 @@ class AgentDB:
retry_index: int,
organization_id: str | None = None,
status: StepStatus = StepStatus.created,
created_by: str | None = None,
) -> Step:
try:
async with self.Session() as session:
@ -266,6 +267,7 @@ class AgentDB:
retry_index=retry_index,
status=status,
organization_id=organization_id,
created_by=created_by,
)
session.add(new_step)
await session.commit()
@ -595,6 +597,7 @@ class AgentDB:
incremental_output_tokens: int | None = None,
incremental_reasoning_tokens: int | None = None,
incremental_cached_tokens: int | None = None,
created_by: str | None = None,
) -> Step:
try:
async with self.Session() as session:
@ -627,6 +630,8 @@ class AgentDB:
step.reasoning_token_count = incremental_reasoning_tokens + (step.reasoning_token_count or 0)
if incremental_cached_tokens is not None:
step.cached_token_count = incremental_cached_tokens + (step.cached_token_count or 0)
if created_by is not None:
step.created_by = created_by
await session.commit()
updated_step = await self.get_step(step_id, organization_id)

View file

@ -142,6 +142,7 @@ class StepModel(Base):
cached_token_count = Column(Integer, default=0)
step_cost = Column(Numeric, default=0)
finished_at = Column(DateTime, nullable=True)
created_by = Column(String, nullable=True)
class OrganizationModel(Base):

View file

@ -234,6 +234,7 @@ def convert_to_step(step_model: StepModel, debug_enabled: bool = False) -> Step:
reasoning_token_count=step_model.reasoning_token_count,
cached_token_count=step_model.cached_token_count,
step_cost=step_model.step_cost,
created_by=step_model.created_by,
)

View file

@ -71,6 +71,7 @@ class Step(BaseModel):
reasoning_token_count: int | None = None
cached_token_count: int | None = None
step_cost: float = 0
created_by: str | None = None
is_speculative: bool = False
speculative_original_status: StepStatus | None = None
speculative_llm_metadata: SpeculativeLLMMetadata | None = None

View file

@ -398,6 +398,7 @@ async def _create_workflow_block_run_and_task(
url: str | None = None,
label: str | None = None,
model: dict[str, Any] | None = None,
created_by: str | None = None,
) -> tuple[str | None, str | None, str | None]:
"""
Create a workflow block run and optionally a task if workflow_run_id is available in context.
@ -460,6 +461,7 @@ async def _create_workflow_block_run_and_task(
retry_index=0,
organization_id=organization_id,
status=StepStatus.running,
created_by=created_by,
)
step_id = step.step_id
# reset the action order to 0
@ -1334,6 +1336,7 @@ async def run_task(
url=url,
label=cache_key,
model=model,
created_by="script",
)
prompt = _render_template_with_label(prompt, cache_key)
# set the prompt in the RunContext
@ -1419,6 +1422,7 @@ async def download(
url=url,
label=cache_key,
model=model,
created_by="script",
)
prompt = _render_template_with_label(prompt, cache_key)
# set the prompt in the RunContext
@ -1499,6 +1503,7 @@ async def action(
url=url,
label=cache_key,
model=model,
created_by="script",
)
prompt = _render_template_with_label(prompt, cache_key)
# set the prompt in the RunContext
@ -1578,6 +1583,7 @@ async def login(
url=url,
label=cache_key,
model=model,
created_by="script",
)
prompt = _render_template_with_label(prompt, cache_key)
if totp_url:
@ -1660,6 +1666,7 @@ async def extract(
url=url,
label=cache_key,
model=model,
created_by="script",
)
prompt = _render_template_with_label(prompt, cache_key)
# set the prompt in the RunContext