fix(sheets): auto-extend grid + friendlier error for rich writes that overflow column count (#5613)

This commit is contained in:
Aaron Perez 2026-04-22 21:44:18 -05:00 committed by GitHub
parent 4bd49d0afa
commit 051b96d661
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 260 additions and 1 deletions

View file

@ -7,6 +7,8 @@ import {
useNodes,
useReactFlow,
} from "@xyflow/react";
import { ReloadIcon } from "@radix-ui/react-icons";
import { useParams } from "react-router-dom";
import type { StartNode } from "./types";
import { AppNode } from "..";
import {
@ -15,6 +17,16 @@ import {
AccordionItem,
AccordionTrigger,
} from "@/components/ui/accordion";
import {
Dialog,
DialogClose,
DialogContent,
DialogDescription,
DialogFooter,
DialogHeader,
DialogTitle,
DialogTrigger,
} from "@/components/ui/dialog";
import {
Select,
SelectContent,
@ -41,6 +53,7 @@ import { MAX_SCREENSHOT_SCROLLS_DEFAULT } from "../Taskv2Node/types";
import { KeyValueInput } from "@/components/KeyValueInput";
import { placeholders } from "@/routes/workflows/editor/helpContent";
import { useToggleScriptForNodeCallback } from "@/routes/workflows/hooks/useToggleScriptForNodeCallback";
import { useResetProfileMutation } from "@/routes/workflows/hooks/useResetProfileMutation";
import { useWorkflowSettingsStore } from "@/store/WorkflowSettingsStore";
import { Flippable } from "@/components/Flippable";
import { useRerender } from "@/hooks/useRerender";
@ -69,12 +82,19 @@ function StartNode({ id, data, parentId }: NodeProps<StartNode>) {
const nodes = useNodes<AppNode>();
const edges = useEdges();
const [facing, setFacing] = useState<"front" | "back">("front");
const [isResetProfileDialogOpen, setIsResetProfileDialogOpen] =
useState(false);
const { workflowPermanentId } = useParams();
const blockScriptStore = useBlockScriptStore();
const recordingStore = useRecordingStore();
const script = blockScriptStore.scripts.__start_block__;
const rerender = useRerender({ prefix: "accordion" });
const toggleScriptForNodeCallback = useToggleScriptForNodeCallback();
const isRecording = recordingStore.isRecording;
const resetProfileMutation = useResetProfileMutation({
workflowPermanentId,
onSuccess: () => setIsResetProfileDialogOpen(false),
});
// Local state for webhook URL to fix race condition where data.webhookCallbackUrl
// isn't updated yet when user clicks "Test Webhook" after typing
@ -394,6 +414,52 @@ function StartNode({ id, data, parentId }: NodeProps<StartNode>) {
}}
/>
</div>
{data.persistBrowserSession && workflowPermanentId && (
<Dialog
open={isResetProfileDialogOpen}
onOpenChange={setIsResetProfileDialogOpen}
>
<DialogTrigger asChild>
<Button
type="button"
variant="secondary"
size="sm"
className="nopan"
>
<ReloadIcon className="mr-2 h-3 w-3" />
Reset Profile
</Button>
</DialogTrigger>
<DialogContent>
<DialogHeader>
<DialogTitle>Reset saved profile?</DialogTitle>
<DialogDescription>
Clears the saved browser profile for this
workflow. The next run will start from a fresh
browser state. Use this if the saved profile
is stuck or producing errors.
</DialogDescription>
</DialogHeader>
<DialogFooter>
<DialogClose asChild>
<Button variant="secondary">Cancel</Button>
</DialogClose>
<Button
variant="destructive"
onClick={() => {
resetProfileMutation.mutate();
}}
disabled={resetProfileMutation.isPending}
>
{resetProfileMutation.isPending && (
<ReloadIcon className="mr-2 h-4 w-4 animate-spin" />
)}
Reset Profile
</Button>
</DialogFooter>
</DialogContent>
</Dialog>
)}
</div>
<div className="space-y-2">
<div className="flex items-center gap-2">

View file

@ -0,0 +1,49 @@
import { useMutation } from "@tanstack/react-query";
import { isAxiosError } from "axios";
import { getClient } from "@/api/AxiosClient";
import { toast } from "@/components/ui/use-toast";
import { useCredentialGetter } from "@/hooks/useCredentialGetter";
const useResetProfileMutation = ({
workflowPermanentId,
onSuccess,
}: {
workflowPermanentId?: string;
onSuccess?: () => void;
}) => {
const credentialGetter = useCredentialGetter();
return useMutation({
mutationFn: async () => {
if (!workflowPermanentId) {
throw new Error("workflowPermanentId is required");
}
const client = await getClient(credentialGetter, "sans-api-v1");
return client
.post(`/workflows/${workflowPermanentId}/browser_session/reset_profile`)
.then((response) => response.data);
},
onSuccess: () => {
toast({
variant: "success",
title: "Profile Reset",
description:
"The saved browser profile has been cleared. The next run will start with a fresh session.",
});
onSuccess?.();
},
onError: (error) => {
const description = isAxiosError(error)
? (error.response?.data?.detail ?? error.message)
: error.message;
toast({
variant: "destructive",
title: "Failed to Reset Profile",
description,
});
},
});
};
export { useResetProfileMutation };

View file

@ -696,6 +696,26 @@ class AgentFunction:
"""Resolve a tab title to its numeric sheetId. OSS no-op."""
return None
async def google_sheets_get_grid_properties(
self,
*,
access_token: str,
spreadsheet_id: str,
sheet_title: str,
) -> Any | None:
"""Return the named tab's grid dimensions (sheet_id, column_count, row_count). OSS no-op."""
return None
async def google_sheets_get_grid_properties_by_id(
self,
*,
access_token: str,
spreadsheet_id: str,
sheet_id: int,
) -> Any | None:
"""Return grid dimensions for a sheet matched by numeric sheetId. OSS no-op."""
return None
async def generate_async_operations(
self,
organization: Organization,

View file

@ -15,6 +15,8 @@ from skyvern.schemas.google_sheets import (
GoogleSheetsAPIError,
a1_to_grid_range,
build_a1,
build_append_dimension_request,
column_index_to_letter,
column_letters_to_index,
extract_a1_sheet_prefix,
extract_spreadsheet_id,
@ -307,11 +309,39 @@ class RichSheetsInput:
sheet_title: str | None
_COLUMN_OVERFLOW_RE = re.compile(
r"Attempting to write column:\s*(\d+),?\s*beyond the last requested column of:\s*(\d+)",
re.IGNORECASE,
)
def _maybe_rewrite_column_overflow(message: str) -> str | None:
"""Translate Google's 0-indexed 'attempting to write column N' error into a letter-friendly message.
Returns the rewritten message, or None if the pattern does not match.
"""
match = _COLUMN_OVERFLOW_RE.search(message)
if not match:
return None
write_col_idx = int(match.group(1))
max_col_idx = int(match.group(2))
sheet_columns = max_col_idx + 1
write_letter = column_index_to_letter(write_col_idx)
max_letter = column_index_to_letter(max_col_idx)
return (
f"sheet has {sheet_columns} columns (last column is {max_letter}), "
f"but this write needs column {write_letter}. "
f"Widen the sheet, narrow the data, or remove the leading column offset on the range."
)
def _failure_reason_from_sheets_error(action: str, exc: GoogleSheetsAPIError) -> str:
if exc.status == 403 and exc.code == "reconnect_required":
return f"Reconnect the Google account: {exc.message}"
if exc.status == 429:
return f"Google Sheets rate limit on {action}: {exc.message}"
rewritten = _maybe_rewrite_column_overflow(exc.message)
if rewritten is not None:
return f"Google Sheets {action} failed: {rewritten}"
return f"Google Sheets {action} failed (HTTP {exc.status}): {exc.message}"
@ -753,6 +783,7 @@ class GoogleSheetsWriteBlock(Block):
fields_mask = "userEnteredValue,userEnteredFormat,note,hyperlink"
requests: list[dict[str, Any]] = []
required_width = 0
if self.write_mode == "append":
append_col_offset = leading_column_offset(a1)
@ -773,6 +804,8 @@ class GoogleSheetsWriteBlock(Block):
)
merge_origin_row: int | None = None
merge_origin_col = append_col_offset
if padded_cells:
required_width = max(len(row) for row in padded_cells)
else:
rows_payload = [{"values": row} for row in rich.cells]
try:
@ -797,6 +830,7 @@ class GoogleSheetsWriteBlock(Block):
)
merge_origin_row = grid_range["startRowIndex"]
merge_origin_col = grid_range["startColumnIndex"]
required_width = grid_range["endColumnIndex"]
for merge in rich.merges:
if self.write_mode == "append":
@ -804,6 +838,7 @@ class GoogleSheetsWriteBlock(Block):
# updatedRange to shift merges correctly. Skip for now.
continue
row_offset = merge_origin_row or 0
end_col = merge["end_column_index"] + merge_origin_col
requests.append(
{
"mergeCells": {
@ -812,12 +847,71 @@ class GoogleSheetsWriteBlock(Block):
"startRowIndex": merge["start_row_index"] + row_offset,
"endRowIndex": merge["end_row_index"] + row_offset,
"startColumnIndex": merge["start_column_index"] + merge_origin_col,
"endColumnIndex": merge["end_column_index"] + merge_origin_col,
"endColumnIndex": end_col,
},
"mergeType": "MERGE_ALL",
}
}
)
if end_col > required_width:
required_width = end_col
# Per-tab cap is unconditional: the write can never succeed past ZZZ regardless
# of whether grid lookup is available, so fail fast before touching Google.
max_columns = MAX_COLUMN_INDEX + 1
if required_width > max_columns:
last_letter = column_index_to_letter(MAX_COLUMN_INDEX)
needed_letter = column_index_to_letter(required_width - 1)
sheet_label = self.sheet_name or extract_a1_sheet_prefix(a1) or rich.sheet_title or "destination"
failure_reason = (
f"Sheet '{sheet_label}' cannot fit this write: needs column "
f"{needed_letter} but the Google Sheets per-tab limit is {max_columns} "
f"columns ({last_letter}). Narrow the data or split it across tabs."
)
error_data = {"status_code": 400, "code": "column_overflow", "error": failure_reason}
await self.record_output_parameter_value(workflow_run_context, workflow_run_id, error_data)
return await self.build_block_result(
success=False,
failure_reason=failure_reason,
output_parameter_value=error_data,
status=BlockStatus.failed,
workflow_run_block_id=workflow_run_block_id,
organization_id=organization_id,
)
# Pre-flight the destination's column_count and prepend an appendDimension
# so a write that's wider than the current grid succeeds in the same atomic batch.
# If the grid lookup is unavailable (no AgentFunction impl, transient error of any
# kind including transport failures), fall through; the existing error mapper
# will produce a friendly message if the eventual write fails.
if required_width > 0:
grid_props: Any = None
try:
grid_props = await app.AGENT_FUNCTION.google_sheets_get_grid_properties_by_id(
access_token=access_token,
spreadsheet_id=spreadsheet_id,
sheet_id=sheet_id,
)
except Exception as e:
LOG.warning(
"Failed to fetch grid properties for pre-flight; falling through to write",
spreadsheet_id=spreadsheet_id,
sheet_id=sheet_id,
error=str(e),
)
# Duck-type the response: cloud returns SheetGridProperties; the OSS no-op
# returns None. Anything else (e.g. an AsyncMock in unrelated test fixtures)
# is treated as no-info to avoid false-positive failures.
current_column_count = getattr(grid_props, "column_count", None)
if isinstance(current_column_count, int) and current_column_count < required_width:
requests.insert(
0,
build_append_dimension_request(
sheet_id=sheet_id,
dimension="COLUMNS",
length=required_width - current_column_count,
),
)
try:
payload = await app.AGENT_FUNCTION.google_sheets_batch_update(

View file

@ -1,4 +1,5 @@
import re
from typing import Any
_SPREADSHEET_URL_RE = re.compile(r"/spreadsheets(?:/u/\d+)?/d/([a-zA-Z0-9-_]+)")
_BARE_ID_RE = re.compile(r"^[a-zA-Z0-9-_]{20,}$")
@ -51,6 +52,35 @@ def column_letters_to_index(letters: str) -> int:
return index - 1
def column_index_to_letter(index: int) -> str:
# 0 -> "A", 25 -> "Z", 26 -> "AA"
if index < 0:
raise ValueError(f"column index must be non-negative, got: {index}")
letters: list[str] = []
n = index
while True:
letters.append(chr(ord("A") + (n % 26)))
n = n // 26 - 1
if n < 0:
break
return "".join(reversed(letters))
def build_append_dimension_request(*, sheet_id: int, dimension: str, length: int) -> dict[str, Any]:
"""Build a Sheets batchUpdate appendDimension request adding `length` columns or rows."""
if dimension not in ("COLUMNS", "ROWS"):
raise ValueError(f"dimension must be COLUMNS or ROWS, got: {dimension!r}")
if length <= 0:
raise ValueError(f"length must be positive, got: {length}")
return {
"appendDimension": {
"sheetId": sheet_id,
"dimension": dimension,
"length": length,
}
}
def strip_a1_sheet_prefix(a1: str) -> str:
if a1.startswith("'"):
idx = 1