[Feature] Adding Azure Blob Storage support to File Upload workflow block (#3130)
Some checks failed
Build Skyvern SDK and publish to PyPI / check-version-change (push) Has been cancelled
Build Skyvern SDK and publish to PyPI / run-ci (push) Has been cancelled
Build Skyvern SDK and publish to PyPI / build-sdk (push) Has been cancelled
Run tests and pre-commit / Run tests and pre-commit hooks (push) Waiting to run
Run tests and pre-commit / Frontend Lint and Build (push) Waiting to run
Publish Fern Docs / run (push) Waiting to run

This commit is contained in:
Trevor Sullivan 2025-08-07 22:59:37 -06:00 committed by GitHub
parent 71f71b8e77
commit b3e17c12b3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
17 changed files with 667 additions and 169 deletions

207
poetry.lock generated
View file

@ -1,4 +1,4 @@
# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand.
[[package]]
name = "about-time"
@ -678,6 +678,136 @@ files = [
botocore = ">=1.11.3"
wrapt = "*"
[[package]]
name = "azure-core"
version = "1.35.0"
description = "Microsoft Azure Core Library for Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "azure_core-1.35.0-py3-none-any.whl", hash = "sha256:8db78c72868a58f3de8991eb4d22c4d368fae226dac1002998d6c50437e7dad1"},
{file = "azure_core-1.35.0.tar.gz", hash = "sha256:c0be528489485e9ede59b6971eb63c1eaacf83ef53001bfe3904e475e972be5c"},
]
[package.dependencies]
requests = ">=2.21.0"
six = ">=1.11.0"
typing-extensions = ">=4.6.0"
[package.extras]
aio = ["aiohttp (>=3.0)"]
tracing = ["opentelemetry-api (>=1.26,<2.0)"]
[[package]]
name = "azure-identity"
version = "1.24.0"
description = "Microsoft Azure Identity Library for Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "azure_identity-1.24.0-py3-none-any.whl", hash = "sha256:9e04997cde0ab02ed66422c74748548e620b7b29361c72ce622acab0267ff7c4"},
{file = "azure_identity-1.24.0.tar.gz", hash = "sha256:6c3a40b2a70af831e920b89e6421e8dcd4af78a0cb38b9642d86c67643d4930c"},
]
[package.dependencies]
azure-core = ">=1.31.0"
cryptography = ">=2.5"
msal = ">=1.30.0"
msal-extensions = ">=1.2.0"
typing-extensions = ">=4.0.0"
[[package]]
name = "azure-keyvault"
version = "4.2.0"
description = "Microsoft Azure Key Vault Client Libraries for Python"
optional = false
python-versions = "*"
groups = ["main"]
files = [
{file = "azure-keyvault-4.2.0.zip", hash = "sha256:731add108a3e29ab4fd501a3c477256c286c34d0996b383fb6a3945462933761"},
{file = "azure_keyvault-4.2.0-py2.py3-none-any.whl", hash = "sha256:16b29039244cbe8b940c98a0d795626d76d2a579cb9b8c559983ad208082c0de"},
]
[package.dependencies]
azure-keyvault-certificates = ">=4.4,<5.0"
azure-keyvault-keys = ">=4.5,<5.0"
azure-keyvault-secrets = ">=4.4,<5.0"
[[package]]
name = "azure-keyvault-certificates"
version = "4.10.0"
description = "Microsoft Corporation Key Vault Certificates Client Library for Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "azure_keyvault_certificates-4.10.0-py3-none-any.whl", hash = "sha256:fa76cbc329274cb5f4ab61b0ed7d209d44377df4b4d6be2fd01e741c2fbb83a9"},
{file = "azure_keyvault_certificates-4.10.0.tar.gz", hash = "sha256:004ff47a73152f9f40f678e5a07719b753a3ca86f0460bfeaaf6a23304872e05"},
]
[package.dependencies]
azure-core = ">=1.31.0"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
[[package]]
name = "azure-keyvault-keys"
version = "4.11.0"
description = "Microsoft Corporation Key Vault Keys Client Library for Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "azure_keyvault_keys-4.11.0-py3-none-any.whl", hash = "sha256:fa5febd5805f0fed4c0a1d13c9096081c72a6fa36ccae1299a137f34280eda53"},
{file = "azure_keyvault_keys-4.11.0.tar.gz", hash = "sha256:f257b1917a2c3a88983e3f5675a6419449eb262318888d5b51e1cb3bed79779a"},
]
[package.dependencies]
azure-core = ">=1.31.0"
cryptography = ">=2.1.4"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
[[package]]
name = "azure-keyvault-secrets"
version = "4.10.0"
description = "Microsoft Corporation Key Vault Secrets Client Library for Python"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "azure_keyvault_secrets-4.10.0-py3-none-any.whl", hash = "sha256:9dbde256077a4ee1a847646671580692e3f9bea36bcfc189c3cf2b9a94eb38b9"},
{file = "azure_keyvault_secrets-4.10.0.tar.gz", hash = "sha256:666fa42892f9cee749563e551a90f060435ab878977c95265173a8246d546a36"},
]
[package.dependencies]
azure-core = ">=1.31.0"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
[[package]]
name = "azure-storage-blob"
version = "12.26.0"
description = "Microsoft Azure Blob Storage Client Library for Python"
optional = false
python-versions = ">=3.8"
groups = ["main"]
files = [
{file = "azure_storage_blob-12.26.0-py3-none-any.whl", hash = "sha256:8c5631b8b22b4f53ec5fff2f3bededf34cfef111e2af613ad42c9e6de00a77fe"},
{file = "azure_storage_blob-12.26.0.tar.gz", hash = "sha256:5dd7d7824224f7de00bfeb032753601c982655173061e242f13be6e26d78d71f"},
]
[package.dependencies]
azure-core = ">=1.30.0"
cryptography = ">=2.1.4"
isodate = ">=0.6.1"
typing-extensions = ">=4.6.0"
[package.extras]
aio = ["azure-core[aio] (>=1.30.0)"]
[[package]]
name = "babel"
version = "2.17.0"
@ -2386,7 +2516,7 @@ description = "Lightweight in-process concurrent programming"
optional = false
python-versions = ">=3.9"
groups = ["main"]
markers = "python_version == \"3.12\" or python_version == \"3.13\""
markers = "python_version >= \"3.12\""
files = [
{file = "greenlet-3.2.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:1afd685acd5597349ee6d7a88a8bec83ce13c106ac78c196ee9dde7c04fe87be"},
{file = "greenlet-3.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:761917cac215c61e9dc7324b2606107b3b292a8349bdebb31503ab4de3f559ac"},
@ -3062,6 +3192,18 @@ widgetsnbextension = ">=4.0.14,<4.1.0"
[package.extras]
test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"]
[[package]]
name = "isodate"
version = "0.7.2"
description = "An ISO 8601 date/time/duration parser and formatter"
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"},
{file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"},
]
[[package]]
name = "isoduration"
version = "20.11.0"
@ -3195,7 +3337,7 @@ description = "Low-level, pure Python DBus protocol wrapper."
optional = false
python-versions = ">=3.7"
groups = ["dev"]
markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and sys_platform == \"linux\""
markers = "sys_platform == \"linux\" and platform_machine != \"ppc64le\" and platform_machine != \"s390x\""
files = [
{file = "jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683"},
{file = "jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732"},
@ -4287,6 +4429,44 @@ docs = ["sphinx"]
gmpy = ["gmpy2 (>=2.1.0a4) ; platform_python_implementation != \"PyPy\""]
tests = ["pytest (>=4.6)"]
[[package]]
name = "msal"
version = "1.33.0"
description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect."
optional = false
python-versions = ">=3.7"
groups = ["main"]
files = [
{file = "msal-1.33.0-py3-none-any.whl", hash = "sha256:c0cd41cecf8eaed733ee7e3be9e040291eba53b0f262d3ae9c58f38b04244273"},
{file = "msal-1.33.0.tar.gz", hash = "sha256:836ad80faa3e25a7d71015c990ce61f704a87328b1e73bcbb0623a18cbf17510"},
]
[package.dependencies]
cryptography = ">=2.5,<48"
PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]}
requests = ">=2.0.0,<3"
[package.extras]
broker = ["pymsalruntime (>=0.14,<0.19) ; python_version >= \"3.6\" and platform_system == \"Windows\"", "pymsalruntime (>=0.17,<0.19) ; python_version >= \"3.8\" and platform_system == \"Darwin\"", "pymsalruntime (>=0.18,<0.19) ; python_version >= \"3.8\" and platform_system == \"Linux\""]
[[package]]
name = "msal-extensions"
version = "1.3.1"
description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism."
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
{file = "msal_extensions-1.3.1-py3-none-any.whl", hash = "sha256:96d3de4d034504e969ac5e85bae8106c8373b5c6568e4c8fa7af2eca9dbe6bca"},
{file = "msal_extensions-1.3.1.tar.gz", hash = "sha256:c5b0fd10f65ef62b5f1d62f4251d51cbcaf003fcedae8c91b040a488614be1a4"},
]
[package.dependencies]
msal = ">=1.29,<2"
[package.extras]
portalocker = ["portalocker (>=1.4,<4)"]
[[package]]
name = "multidict"
version = "6.6.3"
@ -4841,7 +5021,7 @@ description = "ONNX Runtime is a runtime accelerator for Machine Learning models
optional = false
python-versions = ">=3.10"
groups = ["main"]
markers = "python_version == \"3.12\" or python_version == \"3.13\""
markers = "python_version >= \"3.12\""
files = [
{file = "onnxruntime-1.22.0-cp310-cp310-macosx_13_0_universal2.whl", hash = "sha256:85d8826cc8054e4d6bf07f779dc742a363c39094015bdad6a08b3c18cfe0ba8c"},
{file = "onnxruntime-1.22.0-cp310-cp310-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:468c9502a12f6f49ec335c2febd22fdceecc1e4cc96dfc27e419ba237dff5aff"},
@ -5957,7 +6137,7 @@ description = "A high-level API to automate web browsers"
optional = false
python-versions = ">=3.9"
groups = ["main"]
markers = "python_version == \"3.12\" or python_version == \"3.13\""
markers = "python_version >= \"3.12\""
files = [
{file = "playwright-1.53.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:48a1a15ce810f0ffe512b6050de9871ea193b41dd3cc1bbed87b8431012419ba"},
{file = "playwright-1.53.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a701f9498a5b87e3f929ec01cea3109fbde75821b19c7ba4bba54f6127b94f76"},
@ -6254,7 +6434,7 @@ description = "PostgreSQL database adapter for Python"
optional = false
python-versions = ">=3.7"
groups = ["main"]
markers = "python_version == \"3.12\" or python_version == \"3.11\""
markers = "python_version < \"3.13\""
files = [
{file = "psycopg-3.1.18-py3-none-any.whl", hash = "sha256:4d5a0a5a8590906daa58ebd5f3cfc34091377354a1acced269dd10faf55da60e"},
{file = "psycopg-3.1.18.tar.gz", hash = "sha256:31144d3fb4c17d78094d9e579826f047d4af1da6a10427d91dfcfb6ecdf6f12b"},
@ -6307,7 +6487,7 @@ description = "PostgreSQL database adapter for Python -- C optimisation distribu
optional = false
python-versions = ">=3.7"
groups = ["main"]
markers = "(python_version == \"3.12\" or python_version == \"3.11\") and implementation_name != \"pypy\""
markers = "python_version < \"3.13\" and implementation_name != \"pypy\""
files = [
{file = "psycopg_binary-3.1.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c323103dfa663b88204cf5f028e83c77d7a715f9b6f51d2bbc8184b99ddd90a"},
{file = "psycopg_binary-3.1.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:887f8d856c91510148be942c7acd702ccf761a05f59f8abc123c22ab77b5a16c"},
@ -6758,7 +6938,7 @@ description = "A rough port of Node.js's EventEmitter to Python with a few trick
optional = false
python-versions = ">=3.8"
groups = ["main"]
markers = "python_version == \"3.12\" or python_version == \"3.13\""
markers = "python_version >= \"3.12\""
files = [
{file = "pyee-13.0.0-py3-none-any.whl", hash = "sha256:48195a3cddb3b1515ce0695ed76036b5ccc2ef3a9f963ff9f77aec0139845498"},
{file = "pyee-13.0.0.tar.gz", hash = "sha256:b391e3c5a434d1f5118a25615001dbc8f669cf410ab67d04c4d4e07c55481c37"},
@ -6809,6 +6989,9 @@ files = [
{file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"},
]
[package.dependencies]
cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""}
[package.extras]
crypto = ["cryptography (>=3.4.0)"]
dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"]
@ -7071,7 +7254,7 @@ description = "A (partial) reimplementation of pywin32 using ctypes/cffi"
optional = false
python-versions = ">=3.6"
groups = ["dev"]
markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and sys_platform == \"win32\""
markers = "sys_platform == \"win32\" and platform_machine != \"ppc64le\" and platform_machine != \"s390x\""
files = [
{file = "pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755"},
{file = "pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8"},
@ -7811,7 +7994,7 @@ description = "Python bindings to FreeDesktop.org Secret Service API"
optional = false
python-versions = ">=3.6"
groups = ["dev"]
markers = "platform_machine != \"ppc64le\" and platform_machine != \"s390x\" and sys_platform == \"linux\""
markers = "sys_platform == \"linux\" and platform_machine != \"ppc64le\" and platform_machine != \"s390x\""
files = [
{file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"},
{file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"},
@ -9154,7 +9337,7 @@ description = "Fast implementation of asyncio event loop on top of libuv"
optional = false
python-versions = ">=3.8.0"
groups = ["main"]
markers = "sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\""
markers = "platform_python_implementation != \"PyPy\" and sys_platform != \"win32\" and sys_platform != \"cygwin\""
files = [
{file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"},
{file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"},
@ -9789,4 +9972,4 @@ type = ["pytest-mypy"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.11,<3.14"
content-hash = "274075d3ec560283468f8aab5ec9a6653481dc503035d1b93428ec75ed916b3a"
content-hash = "d227fed32608a260a4ea178e74bc025c2550e76316285830b0138c3742ddcfa5"

View file

@ -7,6 +7,7 @@ readme = "README.md"
packages = [{ include = "skyvern" }, { include = "alembic" }]
[tool.poetry.dependencies]
azure-storage-blob = ">=12.26.0"
python = ">=3.11,<3.14"
python-dotenv = "^1.0.0"
openai = ">=1.68.2"
@ -80,6 +81,8 @@ curlparser = "^0.1.0"
lmnr = {extras = ["all"], version = "^0.7.0"}
openpyxl = "^3.1.5"
pandas = "^2.3.1"
azure-identity = "^1.24.0"
azure-keyvault = "^4.2.0"
[tool.poetry.group.dev.dependencies]
isort = "^5.13.2"

View file

@ -93,6 +93,9 @@ export const helpTooltips = {
aws_secret_access_key:
"The AWS secret access key to use to upload the file to S3.",
region_name: "The AWS region",
azure_storage_account_name: "The Azure Storage Account Name.",
azure_storage_account_key: "The Azure Storage Account Key.",
azure_blob_container_name: "The Azure Blob Container Name.",
},
download: {
...baseHelpTooltipContent,

View file

@ -10,6 +10,13 @@ import { useDebugStore } from "@/store/useDebugStore";
import { cn } from "@/util/utils";
import { NodeHeader } from "../components/NodeHeader";
import { useParams } from "react-router-dom";
import {
Select,
SelectContent,
SelectItem,
SelectTrigger,
SelectValue,
} from "@/components/ui/select";
function FileUploadNode({ id, data }: NodeProps<FileUploadNode>) {
const { updateNodeData } = useReactFlow();
@ -22,11 +29,14 @@ function FileUploadNode({ id, data }: NodeProps<FileUploadNode>) {
const [inputs, setInputs] = useState({
storageType: data.storageType,
awsAccessKeyId: data.awsAccessKeyId,
awsSecretAccessKey: data.awsSecretAccessKey,
s3Bucket: data.s3Bucket,
regionName: data.regionName,
path: data.path,
awsAccessKeyId: data.awsAccessKeyId ?? "",
awsSecretAccessKey: data.awsSecretAccessKey ?? "",
s3Bucket: data.s3Bucket ?? "",
regionName: data.regionName ?? "",
path: data.path ?? "",
azureStorageAccountName: data.azureStorageAccountName ?? "",
azureStorageAccountKey: data.azureStorageAccountKey ?? "",
azureBlobContainerName: data.azureBlobContainerName ?? "",
});
function handleChange(key: string, value: unknown) {
@ -77,12 +87,23 @@ function FileUploadNode({ id, data }: NodeProps<FileUploadNode>) {
content={helpTooltips["fileUpload"]["storage_type"]}
/>
</div>
<Input
value={data.storageType}
className="nopan text-xs"
disabled
/>
<Select
value={inputs.storageType}
onValueChange={(value) => handleChange("storageType", value)}
disabled={!editable}
>
<SelectTrigger className="nopan text-xs">
<SelectValue placeholder="Select storage type" />
</SelectTrigger>
<SelectContent>
<SelectItem value="s3">Amazon S3</SelectItem>
<SelectItem value="azure">Azure Blob Storage</SelectItem>
</SelectContent>
</Select>
</div>
{inputs.storageType === "s3" && (
<>
<div className="space-y-2">
<div className="flex items-center gap-2">
<Label className="text-sm text-slate-400">
@ -97,7 +118,7 @@ function FileUploadNode({ id, data }: NodeProps<FileUploadNode>) {
onChange={(value) => {
handleChange("awsAccessKeyId", value);
}}
value={inputs.awsAccessKeyId}
value={inputs.awsAccessKeyId as string}
className="nopan text-xs"
/>
</div>
@ -107,12 +128,14 @@ function FileUploadNode({ id, data }: NodeProps<FileUploadNode>) {
AWS Secret Access Key
</Label>
<HelpTooltip
content={helpTooltips["fileUpload"]["aws_secret_access_key"]}
content={
helpTooltips["fileUpload"]["aws_secret_access_key"]
}
/>
</div>
<Input
type="password"
value={inputs.awsSecretAccessKey}
value={inputs.awsSecretAccessKey as string}
className="nopan text-xs"
onChange={(event) => {
handleChange("awsSecretAccessKey", event.target.value);
@ -122,14 +145,16 @@ function FileUploadNode({ id, data }: NodeProps<FileUploadNode>) {
<div className="space-y-2">
<div className="flex items-center gap-2">
<Label className="text-sm text-slate-400">S3 Bucket</Label>
<HelpTooltip content={helpTooltips["fileUpload"]["s3_bucket"]} />
<HelpTooltip
content={helpTooltips["fileUpload"]["s3_bucket"]}
/>
</div>
<WorkflowBlockInputTextarea
nodeId={id}
onChange={(value) => {
handleChange("s3Bucket", value);
}}
value={inputs.s3Bucket}
value={inputs.s3Bucket as string}
className="nopan text-xs"
/>
</div>
@ -145,7 +170,7 @@ function FileUploadNode({ id, data }: NodeProps<FileUploadNode>) {
onChange={(value) => {
handleChange("regionName", value);
}}
value={inputs.regionName}
value={inputs.regionName as string}
className="nopan text-xs"
/>
</div>
@ -161,10 +186,77 @@ function FileUploadNode({ id, data }: NodeProps<FileUploadNode>) {
onChange={(value) => {
handleChange("path", value);
}}
value={inputs.path}
value={inputs.path as string}
className="nopan text-xs"
/>
</div>
</>
)}
{inputs.storageType === "azure" && (
<>
<div className="space-y-2">
<div className="flex items-center gap-2">
<Label className="text-sm text-slate-400">
Storage Account Name
</Label>
<HelpTooltip
content={
helpTooltips["fileUpload"]["azure_storage_account_name"]
}
/>
</div>
<WorkflowBlockInputTextarea
nodeId={id}
onChange={(value) => {
handleChange("azureStorageAccountName", value);
}}
value={inputs.azureStorageAccountName as string}
className="nopan text-xs"
/>
</div>
<div className="space-y-2">
<div className="flex items-center gap-2">
<Label className="text-sm text-slate-400">
Storage Account Key
</Label>
<HelpTooltip
content={
helpTooltips["fileUpload"]["azure_storage_account_key"]
}
/>
</div>
<Input
type="password"
value={inputs.azureStorageAccountKey as string}
className="nopan text-xs"
onChange={(event) => {
handleChange("azureStorageAccountKey", event.target.value);
}}
/>
</div>
<div className="space-y-2">
<div className="flex items-center gap-2">
<Label className="text-sm text-slate-400">
Blob Container Name
</Label>
<HelpTooltip
content={
helpTooltips["fileUpload"]["azure_blob_container_name"]
}
/>
</div>
<WorkflowBlockInputTextarea
nodeId={id}
onChange={(value) => {
handleChange("azureBlobContainerName", value);
}}
value={inputs.azureBlobContainerName as string}
className="nopan text-xs"
/>
</div>
</>
)}
</div>
</div>
</div>

View file

@ -5,11 +5,14 @@ import { debuggableWorkflowBlockTypes } from "@/routes/workflows/types/workflowT
export type FileUploadNodeData = NodeBaseData & {
path: string;
editable: boolean;
storageType: string;
s3Bucket: string;
awsAccessKeyId: string;
awsSecretAccessKey: string;
regionName: string;
storageType: "s3" | "azure";
s3Bucket: string | null;
awsAccessKeyId: string | null;
awsSecretAccessKey: string | null;
regionName: string | null;
azureStorageAccountName: string | null;
azureStorageAccountKey: string | null;
azureBlobContainerName: string | null;
};
export type FileUploadNode = Node<FileUploadNodeData, "fileUpload">;
@ -20,10 +23,13 @@ export const fileUploadNodeDefaultData: FileUploadNodeData = {
storageType: "s3",
label: "",
path: "",
s3Bucket: "",
awsAccessKeyId: "",
awsSecretAccessKey: "",
regionName: "",
s3Bucket: null,
awsAccessKeyId: null,
awsSecretAccessKey: null,
regionName: null,
azureStorageAccountName: null,
azureStorageAccountKey: null,
azureBlobContainerName: null,
continueOnFailure: false,
model: null,
} as const;

View file

@ -522,10 +522,13 @@ function convertToNode(
...commonData,
path: block.path,
storageType: block.storage_type,
s3Bucket: block.s3_bucket,
awsAccessKeyId: block.aws_access_key_id,
awsSecretAccessKey: block.aws_secret_access_key,
regionName: block.region_name,
s3Bucket: block.s3_bucket ?? "",
awsAccessKeyId: block.aws_access_key_id ?? "",
awsSecretAccessKey: block.aws_secret_access_key ?? "",
regionName: block.region_name ?? "",
azureStorageAccountName: block.azure_storage_account_name ?? "",
azureStorageAccountKey: block.azure_storage_account_key ?? "",
azureBlobContainerName: block.azure_blob_container_name ?? "",
},
};
}
@ -1249,10 +1252,13 @@ function getWorkflowBlock(node: WorkflowBlockNode): BlockYAML {
block_type: "file_upload",
path: node.data.path,
storage_type: node.data.storageType,
s3_bucket: node.data.s3Bucket,
aws_access_key_id: node.data.awsAccessKeyId,
aws_secret_access_key: node.data.awsSecretAccessKey,
region_name: node.data.regionName,
s3_bucket: node.data.s3Bucket ?? "",
aws_access_key_id: node.data.awsAccessKeyId ?? "",
aws_secret_access_key: node.data.awsSecretAccessKey ?? "",
region_name: node.data.regionName ?? "",
azure_storage_account_name: node.data.azureStorageAccountName ?? "",
azure_storage_account_key: node.data.azureStorageAccountKey ?? "",
azure_blob_container_name: node.data.azureBlobContainerName ?? "",
};
}
case "fileParser": {
@ -2013,10 +2019,13 @@ function convertBlocksToBlockYAML(
block_type: "file_upload",
path: block.path,
storage_type: block.storage_type,
s3_bucket: block.s3_bucket,
aws_access_key_id: block.aws_access_key_id,
aws_secret_access_key: block.aws_secret_access_key,
region_name: block.region_name,
s3_bucket: block.s3_bucket ?? "",
aws_access_key_id: block.aws_access_key_id ?? "",
aws_secret_access_key: block.aws_secret_access_key ?? "",
region_name: block.region_name ?? "",
azure_storage_account_name: block.azure_storage_account_name ?? "",
azure_storage_account_key: block.azure_storage_account_key ?? "",
azure_blob_container_name: block.azure_blob_container_name ?? "",
};
return blockYaml;
}

View file

@ -331,11 +331,14 @@ export type UploadToS3Block = WorkflowBlockBase & {
export type FileUploadBlock = WorkflowBlockBase & {
block_type: "file_upload";
path: string;
storage_type: string;
s3_bucket: string;
region_name: string;
aws_access_key_id: string;
aws_secret_access_key: string;
storage_type: "s3" | "azure";
s3_bucket: string | null;
region_name: string | null;
aws_access_key_id: string | null;
aws_secret_access_key: string | null;
azure_storage_account_name: string | null;
azure_storage_account_key: string | null;
azure_blob_container_name: string | null;
};
export type SendEmailBlock = WorkflowBlockBase & {

View file

@ -290,6 +290,9 @@ export type FileUploadBlockYAML = BlockYAMLBase & {
region_name: string;
aws_access_key_id: string;
aws_secret_access_key: string;
azure_storage_account_name?: string | null;
azure_storage_account_key?: string | null;
azure_blob_container_name?: string | null;
};
export type SendEmailBlockYAML = BlockYAMLBase & {

View file

@ -76,6 +76,10 @@ class Settings(BaseSettings):
MAX_UPLOAD_FILE_SIZE: int = 10 * 1024 * 1024 # 10 MB
PRESIGNED_URL_EXPIRATION: int = 60 * 60 * 24 # 24 hours
# Azure Blob Storage settings
AZURE_STORAGE_ACCOUNT_NAME: str | None = None
AZURE_STORAGE_ACCOUNT_KEY: str | None = None
SKYVERN_TELEMETRY: bool = True
ANALYTICS_ID: str = "anonymous"

View file

@ -19,6 +19,7 @@ AUTO_COMPLETION_POTENTIAL_VALUES_COUNT = 3
DROPDOWN_MENU_MAX_DISTANCE = 100
BROWSER_DOWNLOADING_SUFFIX = ".crdownload"
MAX_UPLOAD_FILE_COUNT = 50
AZURE_BLOB_STORAGE_MAX_UPLOAD_FILE_COUNT = 50
DEFAULT_MAX_SCREENSHOT_SCROLLS = 3
# reserved fields for navigation payload

View file

@ -0,0 +1,58 @@
import structlog
from azure.identity.aio import DefaultAzureCredential
from azure.keyvault.secrets.aio import SecretClient
from azure.storage.blob.aio import BlobServiceClient
LOG = structlog.get_logger()
class AsyncAzureClient:
def __init__(self, account_name: str, account_key: str):
self.account_name = account_name
self.account_key = account_key
self.blob_service_client = BlobServiceClient(
account_url=f"https://{account_name}.blob.core.windows.net",
credential=account_key,
)
self.credential = DefaultAzureCredential()
async def get_secret(self, secret_name: str) -> str | None:
try:
# Azure Key Vault URL format: https://<your-key-vault-name>.vault.azure.net
# Assuming the secret_name is actually the Key Vault URL and the secret name
# This needs to be clarified or passed as separate parameters
# For now, let's assume secret_name is the actual secret name and Key Vault URL is in settings.
key_vault_url = f"https://{self.account_name}.vault.azure.net" # Placeholder, adjust as needed
secret_client = SecretClient(vault_url=key_vault_url, credential=self.credential)
secret = await secret_client.get_secret(secret_name)
return secret.value
except Exception as e:
LOG.exception("Failed to get secret from Azure Key Vault.", secret_name=secret_name, error=e)
return None
finally:
await self.credential.close()
async def upload_file_from_path(self, container_name: str, blob_name: str, file_path: str) -> None:
try:
container_client = self.blob_service_client.get_container_client(container_name)
# Create the container if it doesn't exist
try:
await container_client.create_container()
except Exception as e:
LOG.info("Azure container already exists or failed to create", container_name=container_name, error=e)
with open(file_path, "rb") as data:
await container_client.upload_blob(name=blob_name, data=data, overwrite=True)
LOG.info("File uploaded to Azure Blob Storage", container_name=container_name, blob_name=blob_name)
except Exception as e:
LOG.error(
"Failed to upload file to Azure Blob Storage",
container_name=container_name,
blob_name=blob_name,
error=e,
)
raise e
async def close(self) -> None:
await self.blob_service_client.close()
await self.credential.close()

View file

@ -14,6 +14,7 @@ from skyvern.exceptions import (
)
from skyvern.forge import app
from skyvern.forge.sdk.api.aws import AsyncAWSClient
from skyvern.forge.sdk.api.azure import AsyncAzureClient
from skyvern.forge.sdk.db.enums import OrganizationAuthTokenType
from skyvern.forge.sdk.schemas.credentials import PasswordCredential
from skyvern.forge.sdk.schemas.organizations import Organization
@ -24,6 +25,7 @@ from skyvern.forge.sdk.workflow.exceptions import OutputParameterKeyCollisionErr
from skyvern.forge.sdk.workflow.models.parameter import (
PARAMETER_TYPE,
AWSSecretParameter,
AzureSecretParameter,
BitwardenCreditCardDataParameter,
BitwardenLoginCredentialParameter,
BitwardenSensitiveInformationParameter,
@ -50,6 +52,7 @@ class WorkflowRunContext:
async def init(
cls,
aws_client: AsyncAWSClient,
azure_client: AsyncAzureClient | None,
organization: Organization,
workflow_parameter_tuples: list[tuple[WorkflowParameter, "WorkflowRunParameter"]],
workflow_output_parameters: list[OutputParameter],
@ -63,7 +66,7 @@ class WorkflowRunContext:
],
) -> Self:
# key is label name
workflow_run_context = cls(aws_client=aws_client)
workflow_run_context = cls(aws_client=aws_client, azure_client=azure_client)
for parameter, run_parameter in workflow_parameter_tuples:
if parameter.workflow_parameter_type == WorkflowParameterType.CREDENTIAL_ID:
await workflow_run_context.register_secret_workflow_parameter_value(
@ -88,6 +91,8 @@ class WorkflowRunContext:
for secrete_parameter in secret_parameters:
if isinstance(secrete_parameter, AWSSecretParameter):
await workflow_run_context.register_aws_secret_parameter_value(secrete_parameter)
elif isinstance(secrete_parameter, AzureSecretParameter):
await workflow_run_context.register_azure_secret_parameter_value(secrete_parameter)
elif isinstance(secrete_parameter, CredentialParameter):
await workflow_run_context.register_credential_parameter_value(secrete_parameter, organization)
elif isinstance(secrete_parameter, OnePasswordCredentialParameter):
@ -115,12 +120,13 @@ class WorkflowRunContext:
return workflow_run_context
def __init__(self, aws_client: AsyncAWSClient) -> None:
def __init__(self, aws_client: AsyncAWSClient, azure_client: AsyncAzureClient | None) -> None:
self.blocks_metadata: dict[str, BlockMetadata] = {}
self.parameters: dict[str, PARAMETER_TYPE] = {}
self.values: dict[str, Any] = {}
self.secrets: dict[str, Any] = {}
self._aws_client = aws_client
self._azure_client = azure_client
def get_parameter(self, key: str) -> Parameter:
return self.parameters[key]
@ -316,6 +322,23 @@ class WorkflowRunContext:
self.values[parameter.key] = random_secret_id
self.parameters[parameter.key] = parameter
async def register_azure_secret_parameter_value(
self,
parameter: AzureSecretParameter,
) -> None:
# If the parameter is an Azure secret, fetch the secret value and store it in the secrets dict
# The value of the parameter will be the random secret id with format `secret_<uuid>`.
# We'll replace the random secret id with the actual secret value when we need to use it.
if self._azure_client is None:
LOG.error("Azure client not initialized, cannot register Azure secret parameter value")
raise ValueError("Azure client not initialized")
secret_value = await self._azure_client.get_secret(parameter.azure_key)
if secret_value is not None:
random_secret_id = self.generate_random_secret_id()
self.secrets[random_secret_id] = secret_value
self.values[parameter.key] = random_secret_id
self.parameters[parameter.key] = parameter
async def register_onepassword_credential_parameter_value(
self, parameter: OnePasswordCredentialParameter, organization: Organization
) -> None:
@ -801,6 +824,7 @@ class WorkflowRunContext:
parameter,
(
AWSSecretParameter,
AzureSecretParameter,
BitwardenLoginCredentialParameter,
BitwardenCreditCardDataParameter,
BitwardenSensitiveInformationParameter,
@ -823,6 +847,7 @@ class WorkflowRunContext:
class WorkflowContextManager:
aws_client: AsyncAWSClient
azure_client: AsyncAzureClient | None
workflow_run_contexts: dict[str, WorkflowRunContext]
parameters: dict[str, PARAMETER_TYPE]
@ -831,6 +856,12 @@ class WorkflowContextManager:
def __init__(self) -> None:
self.aws_client = AsyncAWSClient()
self.azure_client = None
if settings.AZURE_STORAGE_ACCOUNT_NAME and settings.AZURE_STORAGE_ACCOUNT_KEY:
self.azure_client = AsyncAzureClient(
account_name=settings.AZURE_STORAGE_ACCOUNT_NAME,
account_key=settings.AZURE_STORAGE_ACCOUNT_KEY,
)
self.workflow_run_contexts = {}
def _validate_workflow_run_context(self, workflow_run_id: str) -> None:
@ -854,6 +885,7 @@ class WorkflowContextManager:
) -> WorkflowRunContext:
workflow_run_context = await WorkflowRunContext.init(
self.aws_client,
self.azure_client,
organization,
workflow_parameter_tuples,
workflow_output_parameters,

View file

@ -31,7 +31,11 @@ from pypdf import PdfReader
from pypdf.errors import PdfReadError
from skyvern.config import settings
from skyvern.constants import GET_DOWNLOADED_FILES_TIMEOUT, MAX_UPLOAD_FILE_COUNT
from skyvern.constants import (
AZURE_BLOB_STORAGE_MAX_UPLOAD_FILE_COUNT,
GET_DOWNLOADED_FILES_TIMEOUT,
MAX_UPLOAD_FILE_COUNT,
)
from skyvern.exceptions import (
ContextParameterValueNotFound,
MissingBrowserState,
@ -43,6 +47,7 @@ from skyvern.exceptions import (
from skyvern.forge import app
from skyvern.forge.prompts import prompt_engine
from skyvern.forge.sdk.api.aws import AsyncAWSClient
from skyvern.forge.sdk.api.azure import AsyncAzureClient
from skyvern.forge.sdk.api.files import (
calculate_sha256_for_file,
create_named_temporary_file,
@ -1872,6 +1877,9 @@ class FileUploadBlock(Block):
aws_access_key_id: str | None = None
aws_secret_access_key: str | None = None
region_name: str | None = None
azure_storage_account_name: str | None = None
azure_storage_account_key: str | None = None
azure_blob_container_name: str | None = None
path: str | None = None
def get_all_parameters(
@ -1893,6 +1901,15 @@ class FileUploadBlock(Block):
if self.aws_secret_access_key and workflow_run_context.has_parameter(self.aws_secret_access_key):
parameters.append(workflow_run_context.get_parameter(self.aws_secret_access_key))
if self.azure_storage_account_name and workflow_run_context.has_parameter(self.azure_storage_account_name):
parameters.append(workflow_run_context.get_parameter(self.azure_storage_account_name))
if self.azure_storage_account_key and workflow_run_context.has_parameter(self.azure_storage_account_key):
parameters.append(workflow_run_context.get_parameter(self.azure_storage_account_key))
if self.azure_blob_container_name and workflow_run_context.has_parameter(self.azure_blob_container_name):
parameters.append(workflow_run_context.get_parameter(self.azure_blob_container_name))
return parameters
def format_potential_template_parameters(self, workflow_run_context: WorkflowRunContext) -> None:
@ -1910,6 +1927,18 @@ class FileUploadBlock(Block):
self.aws_secret_access_key = self.format_block_parameter_template_from_workflow_run_context(
self.aws_secret_access_key, workflow_run_context
)
if self.azure_storage_account_name:
self.azure_storage_account_name = self.format_block_parameter_template_from_workflow_run_context(
self.azure_storage_account_name, workflow_run_context
)
if self.azure_storage_account_key:
self.azure_storage_account_key = self.format_block_parameter_template_from_workflow_run_context(
self.azure_storage_account_key, workflow_run_context
)
if self.azure_blob_container_name:
self.azure_blob_container_name = self.format_block_parameter_template_from_workflow_run_context(
self.azure_blob_container_name, workflow_run_context
)
def _get_s3_uri(self, workflow_run_id: str, path: str) -> str:
s3_suffix = f"{workflow_run_id}/{uuid.uuid4()}_{Path(path).name}"
@ -1917,6 +1946,10 @@ class FileUploadBlock(Block):
return f"s3://{self.s3_bucket}/{s3_suffix}"
return f"s3://{self.s3_bucket}/{self.path}/{s3_suffix}"
def _get_azure_blob_uri(self, workflow_run_id: str, file_path: str) -> str:
blob_name = Path(file_path).name
return f"https://{self.azure_storage_account_name}.blob.core.windows.net/{self.azure_blob_container_name}/{workflow_run_id}/{uuid.uuid4()}_{blob_name}"
async def execute(
self,
workflow_run_id: str,
@ -1930,12 +1963,29 @@ class FileUploadBlock(Block):
# get all parameters into a dictionary
# data validate before uploading
missing_parameters = []
if self.storage_type == FileStorageType.S3:
if not self.s3_bucket:
missing_parameters.append("s3_bucket")
if not self.aws_access_key_id:
missing_parameters.append("aws_access_key_id")
if not self.aws_secret_access_key:
missing_parameters.append("aws_secret_access_key")
elif self.storage_type == FileStorageType.AZURE:
if not self.azure_storage_account_name or self.azure_storage_account_name == "":
missing_parameters.append("azure_storage_account_name")
if not self.azure_storage_account_key or self.azure_storage_account_key == "":
missing_parameters.append("azure_storage_account_key")
if not self.azure_blob_container_name or self.azure_blob_container_name == "":
missing_parameters.append("azure_blob_container_name")
else:
return await self.build_block_result(
success=False,
failure_reason=f"Unsupported storage type: {self.storage_type}",
output_parameter_value=None,
status=BlockStatus.failed,
workflow_run_block_id=workflow_run_block_id,
organization_id=organization_id,
)
if missing_parameters:
return await self.build_block_result(
@ -1961,57 +2011,87 @@ class FileUploadBlock(Block):
download_files_path = str(get_path_for_workflow_download_directory(workflow_run_id).absolute())
s3_uris = []
uploaded_uris = []
try:
workflow_run_context = self.get_workflow_run_context(workflow_run_id)
files_to_upload = []
if os.path.isdir(download_files_path):
files = os.listdir(download_files_path)
max_file_count = (
MAX_UPLOAD_FILE_COUNT
if self.storage_type == FileStorageType.S3
else AZURE_BLOB_STORAGE_MAX_UPLOAD_FILE_COUNT
)
if len(files) > max_file_count:
raise ValueError(f"Too many files in the directory, not uploading. Max: {max_file_count}")
for file in files:
if os.path.isdir(os.path.join(download_files_path, file)):
LOG.warning("FileUploadBlock: Skipping directory", file=file)
continue
files_to_upload.append(os.path.join(download_files_path, file))
else:
files_to_upload.append(download_files_path)
if self.storage_type == FileStorageType.S3:
actual_aws_access_key_id = (
workflow_run_context.get_original_secret_value_or_none(self.aws_access_key_id) or self.aws_access_key_id
workflow_run_context.get_original_secret_value_or_none(self.aws_access_key_id)
or self.aws_access_key_id
)
actual_aws_secret_access_key = (
workflow_run_context.get_original_secret_value_or_none(self.aws_secret_access_key)
or self.aws_secret_access_key
)
client = AsyncAWSClient(
aws_client = AsyncAWSClient(
aws_access_key_id=actual_aws_access_key_id,
aws_secret_access_key=actual_aws_secret_access_key,
region_name=self.region_name,
)
# is the file path a file or a directory?
if os.path.isdir(download_files_path):
# get all files in the directory, if there are more than 25 files, we will not upload them
files = os.listdir(download_files_path)
if len(files) > MAX_UPLOAD_FILE_COUNT:
raise ValueError("Too many files in the directory, not uploading")
for file in files:
# if the file is a directory, we will not upload it
if os.path.isdir(os.path.join(download_files_path, file)):
LOG.warning("FileUploadBlock: Skipping directory", file=file)
continue
file_path = os.path.join(download_files_path, file)
for file_path in files_to_upload:
s3_uri = self._get_s3_uri(workflow_run_id, file_path)
s3_uris.append(s3_uri)
await client.upload_file_from_path(uri=s3_uri, file_path=file_path, raise_exception=True)
uploaded_uris.append(s3_uri)
await aws_client.upload_file_from_path(uri=s3_uri, file_path=file_path, raise_exception=True)
LOG.info("FileUploadBlock: File(s) uploaded to S3", file_path=self.path)
elif self.storage_type == FileStorageType.AZURE:
actual_azure_storage_account_name = (
workflow_run_context.get_original_secret_value_or_none(self.azure_storage_account_name)
or self.azure_storage_account_name
)
actual_azure_storage_account_key = (
workflow_run_context.get_original_secret_value_or_none(self.azure_storage_account_key)
or self.azure_storage_account_key
)
azure_client = AsyncAzureClient(
account_name=actual_azure_storage_account_name or "",
account_key=actual_azure_storage_account_key or "",
)
for file_path in files_to_upload:
blob_name = Path(file_path).name
azure_uri = self._get_azure_blob_uri(workflow_run_id, file_path)
uploaded_uris.append(azure_uri)
await azure_client.upload_file_from_path(
container_name=self.azure_blob_container_name or "", blob_name=blob_name, file_path=file_path
)
LOG.info("FileUploadBlock: File(s) uploaded to Azure Blob Storage", file_path=self.path)
else:
s3_uri = self._get_s3_uri(workflow_run_id, download_files_path)
s3_uris.append(s3_uri)
await client.upload_file_from_path(uri=s3_uri, file_path=download_files_path, raise_exception=True)
# This case should ideally be caught by the initial validation
raise ValueError(f"Unsupported storage type: {self.storage_type}")
except Exception as e:
LOG.exception("FileUploadBlock: Failed to upload file to S3", file_path=self.path)
LOG.exception("FileUploadBlock: Failed to upload file", file_path=self.path, storage_type=self.storage_type)
return await self.build_block_result(
success=False,
failure_reason=f"Failed to upload file to S3: {str(e)}",
failure_reason=f"Failed to upload file to {self.storage_type}: {str(e)}",
output_parameter_value=None,
status=BlockStatus.failed,
workflow_run_block_id=workflow_run_block_id,
organization_id=organization_id,
)
LOG.info("FileUploadBlock: File(s) uploaded to S3", file_path=self.path)
await self.record_output_parameter_value(workflow_run_context, workflow_run_id, s3_uris)
await self.record_output_parameter_value(workflow_run_context, workflow_run_id, uploaded_uris)
return await self.build_block_result(
success=True,
failure_reason=None,
output_parameter_value=s3_uris,
output_parameter_value=uploaded_uris,
status=BlockStatus.completed,
workflow_run_block_id=workflow_run_block_id,
organization_id=organization_id,

View file

@ -3,3 +3,4 @@ from enum import StrEnum
class FileStorageType(StrEnum):
S3 = "s3"
AZURE = "azure"

View file

@ -21,6 +21,7 @@ class ParameterType(StrEnum):
ONEPASSWORD = "onepassword"
OUTPUT = "output"
CREDENTIAL = "credential"
AZURE_SECRET = "azure_secret"
class Parameter(BaseModel, abc.ABC):
@ -49,6 +50,18 @@ class AWSSecretParameter(Parameter):
deleted_at: datetime | None = None
class AzureSecretParameter(Parameter):
parameter_type: Literal[ParameterType.AZURE_SECRET] = ParameterType.AZURE_SECRET
azure_secret_parameter_id: str
workflow_id: str
azure_key: str
created_at: datetime
modified_at: datetime
deleted_at: datetime | None = None
class BitwardenLoginCredentialParameter(Parameter):
parameter_type: Literal[ParameterType.BITWARDEN_LOGIN_CREDENTIAL] = ParameterType.BITWARDEN_LOGIN_CREDENTIAL
# parameter fields
@ -214,6 +227,7 @@ ParameterSubclasses = Union[
WorkflowParameter,
ContextParameter,
AWSSecretParameter,
AzureSecretParameter,
BitwardenLoginCredentialParameter,
BitwardenSensitiveInformationParameter,
BitwardenCreditCardDataParameter,

View file

@ -218,6 +218,9 @@ class FileUploadBlockYAML(BlockYAML):
aws_access_key_id: str | None = None
aws_secret_access_key: str | None = None
region_name: str | None = None
azure_storage_account_name: str | None = None
azure_storage_account_key: str | None = None
azure_blob_container_name: str | None = None
path: str | None = None

View file

@ -1902,6 +1902,9 @@ class WorkflowService:
aws_access_key_id=block_yaml.aws_access_key_id,
aws_secret_access_key=block_yaml.aws_secret_access_key,
region_name=block_yaml.region_name,
azure_storage_account_name=block_yaml.azure_storage_account_name,
azure_storage_account_key=block_yaml.azure_storage_account_key,
azure_blob_container_name=block_yaml.azure_blob_container_name,
path=block_yaml.path,
continue_on_failure=block_yaml.continue_on_failure,
)