Rebuild fern client sdk to 1.06 (#4331)

This commit is contained in:
Marc Kelechava 2025-12-19 12:16:02 -08:00 committed by GitHub
parent 08ca5a0b45
commit 9788138861
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
50 changed files with 2169 additions and 909 deletions

View file

@ -172,6 +172,8 @@ navigation:
- POST /v1/runs/{run_id}/cancel
- POST /v1/runs/{run_id}/retry_webhook
- POST /v1/run/tasks/login
- endpoint: POST /v1/run/tasks/download_files
hidden: true
- GET /v1/runs/{run_id}/timeline
- section: Workflows
contents:

View file

@ -1,6 +1,6 @@
[project]
name = "skyvern"
version = "1.0.3"
version = "1.0.6"
description = ""
authors = [{ name = "Skyvern AI", email = "info@skyvern.com" }]
requires-python = ">=3.11,<3.14"

View file

@ -12,6 +12,7 @@ The Skyvern TypeScript library provides convenient access to the Skyvern APIs fr
- [Usage](#usage)
- [Request and Response Types](#request-and-response-types)
- [Exception Handling](#exception-handling)
- [File Uploads](#file-uploads)
- [Advanced](#advanced)
- [Additional Headers](#additional-headers)
- [Additional Query String Parameters](#additional-query-string-parameters)
@ -56,7 +57,7 @@ following namespace:
```typescript
import { Skyvern } from "@skyvern/client";
const request: Skyvern.SetWorkflowTemplateStatusV1WorkflowsWorkflowPermanentIdTemplatePutRequest = {
const request: Skyvern.RunTaskRequest = {
...
};
```
@ -81,6 +82,50 @@ try {
}
```
## File Uploads
You can upload files using the client:
```typescript
import { createReadStream } from "fs";
import { SkyvernClient } from "@skyvern/client";
import * as fs from "fs";
const client = new SkyvernClient({ apiKey: "YOUR_API_KEY" });
await client.uploadFile({
file: fs.createReadStream("/path/to/your/file")
});
```
The client accepts a variety of types for file upload parameters:
* Stream types: `fs.ReadStream`, `stream.Readable`, and `ReadableStream`
* Buffered types: `Buffer`, `Blob`, `File`, `ArrayBuffer`, `ArrayBufferView`, and `Uint8Array`
### Metadata
You can configure metadata when uploading a file:
```typescript
const file: Uploadable.WithMetadata = {
data: createReadStream("path/to/file"),
filename: "my-file", // optional
contentType: "audio/mpeg", // optional
contentLength: 1949, // optional
};
```
Alternatively, you can upload a file directly from a file path:
```typescript
const file : Uploadable.FromPath = {
path: "path/to/file",
filename: "my-file", // optional
contentType: "audio/mpeg", // optional
contentLength: 1949, // optional
};
```
The metadata is used to set the `Content-Length`, `Content-Type`, and `Content-Disposition` headers. If not provided, the client will attempt to determine them automatically.
For example, `fs.ReadStream` has a `path` property which the SDK uses to retrieve the file size from the filesystem without loading it into memory.
## Advanced
### Additional Headers

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
{
"name": "@skyvern/client",
"version": "1.0.3",
"version": "1.0.6",
"private": false,
"repository": {
"type": "git",

View file

@ -68,83 +68,6 @@ await client.runSdkAction({
</details>
##
## Workflows
<details><summary><code>client.workflows.<a href="/src/api/resources/workflows/client/Client.ts">setWorkflowTemplateStatus</a>(workflowPermanentId, { ...params }) -> Record<string, unknown></code></summary>
<dl>
<dd>
#### 📝 Description
<dl>
<dd>
<dl>
<dd>
Set or unset a workflow as a template.
Template status is stored at the workflow_permanent_id level (not per-version),
meaning all versions of a workflow share the same template status.
</dd>
</dl>
</dd>
</dl>
#### 🔌 Usage
<dl>
<dd>
<dl>
<dd>
```typescript
await client.workflows.setWorkflowTemplateStatus("workflow_permanent_id", {
is_template: true
});
```
</dd>
</dl>
</dd>
</dl>
#### ⚙️ Parameters
<dl>
<dd>
<dl>
<dd>
**workflowPermanentId:** `string`
</dd>
</dl>
<dl>
<dd>
**request:** `Skyvern.SetWorkflowTemplateStatusV1WorkflowsWorkflowPermanentIdTemplatePutRequest`
</dd>
</dl>
<dl>
<dd>
**requestOptions:** `Workflows.RequestOptions`
</dd>
</dl>
</dd>
</dl>
</dd>
</dl>
</details>
## Scripts
<details><summary><code>client.scripts.<a href="/src/api/resources/scripts/client/Client.ts">runScript</a>(scriptId) -> unknown</code></summary>
<dl>

View file

@ -2,7 +2,6 @@
import * as Skyvern from "./api/index.js";
import { Scripts } from "./api/resources/scripts/client/Client.js";
import { Workflows } from "./api/resources/workflows/client/Client.js";
import type { BaseClientOptions, BaseRequestOptions } from "./BaseClient.js";
import { mergeHeaders, mergeOnlyDefinedHeaders } from "./core/headers.js";
import * as core from "./core/index.js";
@ -17,7 +16,6 @@ export declare namespace SkyvernClient {
export class SkyvernClient {
protected readonly _options: SkyvernClient.Options;
protected _workflows: Workflows | undefined;
protected _scripts: Scripts | undefined;
constructor(_options: SkyvernClient.Options = {}) {
@ -28,8 +26,8 @@ export class SkyvernClient {
"x-api-key": _options?.apiKey,
"X-Fern-Language": "JavaScript",
"X-Fern-SDK-Name": "@skyvern/client",
"X-Fern-SDK-Version": "1.0.3",
"User-Agent": "@skyvern/client/1.0.3",
"X-Fern-SDK-Version": "1.0.6",
"User-Agent": "@skyvern/client/1.0.6",
"X-Fern-Runtime": core.RUNTIME.type,
"X-Fern-Runtime-Version": core.RUNTIME.version,
},
@ -38,10 +36,6 @@ export class SkyvernClient {
};
}
public get workflows(): Workflows {
return (this._workflows ??= new Workflows(this._options));
}
public get scripts(): Scripts {
return (this._scripts ??= new Scripts(this._options));
}
@ -948,6 +942,7 @@ export class SkyvernClient {
* Retry sending the webhook for a run
*
* @param {string} runId - The id of the task run or the workflow run.
* @param {Skyvern.RetryRunWebhookRequest} request
* @param {SkyvernClient.RequestOptions} requestOptions - Request-specific configuration.
*
* @throws {@link Skyvern.UnprocessableEntityError}
@ -957,13 +952,15 @@ export class SkyvernClient {
*/
public retryRunWebhook(
runId: string,
request?: Skyvern.RetryRunWebhookRequest,
requestOptions?: SkyvernClient.RequestOptions,
): core.HttpResponsePromise<unknown> {
return core.HttpResponsePromise.fromPromise(this.__retryRunWebhook(runId, requestOptions));
return core.HttpResponsePromise.fromPromise(this.__retryRunWebhook(runId, request, requestOptions));
}
private async __retryRunWebhook(
runId: string,
request?: Skyvern.RetryRunWebhookRequest,
requestOptions?: SkyvernClient.RequestOptions,
): Promise<core.WithRawResponse<unknown>> {
const _headers: core.Fetcher.Args["headers"] = mergeHeaders(
@ -980,7 +977,10 @@ export class SkyvernClient {
),
method: "POST",
headers: _headers,
contentType: "application/json",
queryParameters: requestOptions?.queryParams,
requestType: "json",
body: request != null ? request : undefined,
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
@ -1102,6 +1102,91 @@ export class SkyvernClient {
}
}
/**
* @param {Skyvern.BodyUploadFileV1UploadFilePost} request
* @param {SkyvernClient.RequestOptions} requestOptions - Request-specific configuration.
*
* @throws {@link Skyvern.UnprocessableEntityError}
*
* @example
* import { createReadStream } from "fs";
* await client.uploadFile({
* file: fs.createReadStream("/path/to/your/file")
* })
*/
public uploadFile(
request: Skyvern.BodyUploadFileV1UploadFilePost,
requestOptions?: SkyvernClient.RequestOptions,
): core.HttpResponsePromise<Skyvern.UploadFileResponse> {
return core.HttpResponsePromise.fromPromise(this.__uploadFile(request, requestOptions));
}
private async __uploadFile(
request: Skyvern.BodyUploadFileV1UploadFilePost,
requestOptions?: SkyvernClient.RequestOptions,
): Promise<core.WithRawResponse<Skyvern.UploadFileResponse>> {
const _request = await core.newFormData();
await _request.appendFile("file", request.file);
const _maybeEncodedRequest = await _request.getRequest();
const _headers: core.Fetcher.Args["headers"] = mergeHeaders(
this._options?.headers,
mergeOnlyDefinedHeaders({
"x-api-key": requestOptions?.apiKey ?? this._options?.apiKey,
..._maybeEncodedRequest.headers,
}),
requestOptions?.headers,
);
const _response = await core.fetcher({
url: core.url.join(
(await core.Supplier.get(this._options.baseUrl)) ??
(await core.Supplier.get(this._options.environment)) ??
environments.SkyvernEnvironment.Cloud,
"v1/upload_file",
),
method: "POST",
headers: _headers,
queryParameters: requestOptions?.queryParams,
requestType: "file",
duplex: _maybeEncodedRequest.duplex,
body: _maybeEncodedRequest.body,
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
});
if (_response.ok) {
return { data: _response.body as Skyvern.UploadFileResponse, rawResponse: _response.rawResponse };
}
if (_response.error.reason === "status-code") {
switch (_response.error.statusCode) {
case 422:
throw new Skyvern.UnprocessableEntityError(_response.error.body as unknown, _response.rawResponse);
default:
throw new errors.SkyvernError({
statusCode: _response.error.statusCode,
body: _response.error.body,
rawResponse: _response.rawResponse,
});
}
}
switch (_response.error.reason) {
case "non-json":
throw new errors.SkyvernError({
statusCode: _response.error.statusCode,
body: _response.error.rawBody,
rawResponse: _response.rawResponse,
});
case "timeout":
throw new errors.SkyvernTimeoutError("Timeout exceeded when calling POST /v1/upload_file.");
case "unknown":
throw new errors.SkyvernError({
message: _response.error.errorMessage,
rawResponse: _response.rawResponse,
});
}
}
/**
* Get all browser profiles for the organization
*

View file

@ -0,0 +1,13 @@
// This file was auto-generated by Fern from our API Definition.
import type * as core from "../../../core/index.js";
/**
* @example
* {
* file: fs.createReadStream("/path/to/your/file")
* }
*/
export interface BodyUploadFileV1UploadFilePost {
file: core.file.Uploadable;
}

View file

@ -1,3 +1,4 @@
export type { BodyUploadFileV1UploadFilePost } from "./BodyUploadFileV1UploadFilePost.js";
export type { CreateBrowserProfileRequest } from "./CreateBrowserProfileRequest.js";
export type { CreateBrowserSessionRequest } from "./CreateBrowserSessionRequest.js";
export type { CreateCredentialRequest } from "./CreateCredentialRequest.js";

View file

@ -1,3 +1 @@
export * as scripts from "./scripts/index.js";
export * from "./workflows/client/requests/index.js";
export * as workflows from "./workflows/index.js";

View file

@ -1,112 +0,0 @@
// This file was auto-generated by Fern from our API Definition.
import type { BaseClientOptions, BaseRequestOptions } from "../../../../BaseClient.js";
import { mergeHeaders, mergeOnlyDefinedHeaders } from "../../../../core/headers.js";
import * as core from "../../../../core/index.js";
import * as environments from "../../../../environments.js";
import * as errors from "../../../../errors/index.js";
import * as Skyvern from "../../../index.js";
export declare namespace Workflows {
export interface Options extends BaseClientOptions {}
export interface RequestOptions extends BaseRequestOptions {}
}
export class Workflows {
protected readonly _options: Workflows.Options;
constructor(_options: Workflows.Options = {}) {
this._options = _options;
}
/**
* Set or unset a workflow as a template.
*
* Template status is stored at the workflow_permanent_id level (not per-version),
* meaning all versions of a workflow share the same template status.
*
* @param {string} workflowPermanentId
* @param {Skyvern.SetWorkflowTemplateStatusV1WorkflowsWorkflowPermanentIdTemplatePutRequest} request
* @param {Workflows.RequestOptions} requestOptions - Request-specific configuration.
*
* @throws {@link Skyvern.UnprocessableEntityError}
*
* @example
* await client.workflows.setWorkflowTemplateStatus("workflow_permanent_id", {
* is_template: true
* })
*/
public setWorkflowTemplateStatus(
workflowPermanentId: string,
request: Skyvern.SetWorkflowTemplateStatusV1WorkflowsWorkflowPermanentIdTemplatePutRequest,
requestOptions?: Workflows.RequestOptions,
): core.HttpResponsePromise<Record<string, unknown>> {
return core.HttpResponsePromise.fromPromise(
this.__setWorkflowTemplateStatus(workflowPermanentId, request, requestOptions),
);
}
private async __setWorkflowTemplateStatus(
workflowPermanentId: string,
request: Skyvern.SetWorkflowTemplateStatusV1WorkflowsWorkflowPermanentIdTemplatePutRequest,
requestOptions?: Workflows.RequestOptions,
): Promise<core.WithRawResponse<Record<string, unknown>>> {
const { is_template: isTemplate } = request;
const _queryParams: Record<string, string | string[] | object | object[] | null> = {};
_queryParams.is_template = isTemplate.toString();
const _headers: core.Fetcher.Args["headers"] = mergeHeaders(
this._options?.headers,
mergeOnlyDefinedHeaders({ "x-api-key": requestOptions?.apiKey ?? this._options?.apiKey }),
requestOptions?.headers,
);
const _response = await core.fetcher({
url: core.url.join(
(await core.Supplier.get(this._options.baseUrl)) ??
(await core.Supplier.get(this._options.environment)) ??
environments.SkyvernEnvironment.Cloud,
`v1/workflows/${core.url.encodePathParam(workflowPermanentId)}/template`,
),
method: "PUT",
headers: _headers,
queryParameters: { ..._queryParams, ...requestOptions?.queryParams },
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
});
if (_response.ok) {
return { data: _response.body as Record<string, unknown>, rawResponse: _response.rawResponse };
}
if (_response.error.reason === "status-code") {
switch (_response.error.statusCode) {
case 422:
throw new Skyvern.UnprocessableEntityError(_response.error.body as unknown, _response.rawResponse);
default:
throw new errors.SkyvernError({
statusCode: _response.error.statusCode,
body: _response.error.body,
rawResponse: _response.rawResponse,
});
}
}
switch (_response.error.reason) {
case "non-json":
throw new errors.SkyvernError({
statusCode: _response.error.statusCode,
body: _response.error.rawBody,
rawResponse: _response.rawResponse,
});
case "timeout":
throw new errors.SkyvernTimeoutError(
"Timeout exceeded when calling PUT /v1/workflows/{workflow_permanent_id}/template.",
);
case "unknown":
throw new errors.SkyvernError({
message: _response.error.errorMessage,
rawResponse: _response.rawResponse,
});
}
}
}

View file

@ -1 +0,0 @@
export * from "./requests/index.js";

View file

@ -1,11 +0,0 @@
// This file was auto-generated by Fern from our API Definition.
/**
* @example
* {
* is_template: true
* }
*/
export interface SetWorkflowTemplateStatusV1WorkflowsWorkflowPermanentIdTemplatePutRequest {
is_template: boolean;
}

View file

@ -1 +0,0 @@
export type { SetWorkflowTemplateStatusV1WorkflowsWorkflowPermanentIdTemplatePutRequest } from "./SetWorkflowTemplateStatusV1WorkflowsWorkflowPermanentIdTemplatePutRequest.js";

View file

@ -1 +0,0 @@
export * from "./client/index.js";

View file

@ -7,7 +7,7 @@ export interface PromptAction {
/** The prompt to send to the LLM */
prompt: string;
/** Optional JSON schema to structure the response */
schema?: Record<string, unknown>;
response_schema?: Record<string, unknown>;
/** Optional model configuration */
model?: Record<string, unknown>;
}

View file

@ -0,0 +1,6 @@
// This file was auto-generated by Fern from our API Definition.
export interface RetryRunWebhookRequest {
/** Optional webhook URL to send the payload to instead of the stored configuration */
webhook_url?: string;
}

View file

@ -0,0 +1,8 @@
// This file was auto-generated by Fern from our API Definition.
export interface UploadFileResponse {
/** S3 URI where the file was uploaded */
s3_uri: string;
/** Presigned URL to access the uploaded file */
presigned_url: string;
}

View file

@ -96,6 +96,7 @@ export * from "./PdfParserBlockYaml.js";
export * from "./PromptAction.js";
export * from "./PromptBranchCriteria.js";
export * from "./ProxyLocation.js";
export * from "./RetryRunWebhookRequest.js";
export * from "./RunEngine.js";
export * from "./RunSdkActionRequestAction.js";
export * from "./RunSdkActionResponse.js";
@ -127,6 +128,7 @@ export * from "./ThoughtType.js";
export * from "./TotpCode.js";
export * from "./TotpType.js";
export * from "./UploadFileAction.js";
export * from "./UploadFileResponse.js";
export * from "./UploadToS3Block.js";
export * from "./UploadToS3BlockYaml.js";
export * from "./UrlBlock.js";

View file

@ -0,0 +1 @@
export * from "./file/exports.js";

View file

@ -0,0 +1 @@
export type { Uploadable } from "./types.js";

View file

@ -0,0 +1,217 @@
import type { Uploadable } from "./types.js";
export async function toBinaryUploadRequest(
file: Uploadable,
): Promise<{ body: Uploadable.FileLike; headers?: Record<string, string> }> {
const { data, filename, contentLength, contentType } = await getFileWithMetadata(file);
const request = {
body: data,
headers: {} as Record<string, string>,
};
if (filename) {
request.headers["Content-Disposition"] = `attachment; filename="${filename}"`;
}
if (contentType) {
request.headers["Content-Type"] = contentType;
}
if (contentLength != null) {
request.headers["Content-Length"] = contentLength.toString();
}
return request;
}
export async function toMultipartDataPart(
file: Uploadable,
): Promise<{ data: Uploadable.FileLike; filename?: string; contentType?: string }> {
const { data, filename, contentType } = await getFileWithMetadata(file, {
noSniffFileSize: true,
});
return {
data,
filename,
contentType,
};
}
async function getFileWithMetadata(
file: Uploadable,
{ noSniffFileSize }: { noSniffFileSize?: boolean } = {},
): Promise<Uploadable.WithMetadata> {
if (isFileLike(file)) {
return getFileWithMetadata(
{
data: file,
},
{ noSniffFileSize },
);
}
if ("path" in file) {
const fs = await import("fs");
if (!fs || !fs.createReadStream) {
throw new Error("File path uploads are not supported in this environment.");
}
const data = fs.createReadStream(file.path);
const contentLength =
file.contentLength ?? (noSniffFileSize === true ? undefined : await tryGetFileSizeFromPath(file.path));
const filename = file.filename ?? getNameFromPath(file.path);
return {
data,
filename,
contentType: file.contentType,
contentLength,
};
}
if ("data" in file) {
const data = file.data;
const contentLength =
file.contentLength ??
(await tryGetContentLengthFromFileLike(data, {
noSniffFileSize,
}));
const filename = file.filename ?? tryGetNameFromFileLike(data);
return {
data,
filename,
contentType: file.contentType ?? tryGetContentTypeFromFileLike(data),
contentLength,
};
}
throw new Error(`Invalid FileUpload of type ${typeof file}: ${JSON.stringify(file)}`);
}
function isFileLike(value: unknown): value is Uploadable.FileLike {
return (
isBuffer(value) ||
isArrayBufferView(value) ||
isArrayBuffer(value) ||
isUint8Array(value) ||
isBlob(value) ||
isFile(value) ||
isStreamLike(value) ||
isReadableStream(value)
);
}
async function tryGetFileSizeFromPath(path: string): Promise<number | undefined> {
try {
const fs = await import("fs");
if (!fs || !fs.promises || !fs.promises.stat) {
return undefined;
}
const fileStat = await fs.promises.stat(path);
return fileStat.size;
} catch (_fallbackError) {
return undefined;
}
}
function tryGetNameFromFileLike(data: Uploadable.FileLike): string | undefined {
if (isNamedValue(data)) {
return data.name;
}
if (isPathedValue(data)) {
return getNameFromPath(data.path.toString());
}
return undefined;
}
async function tryGetContentLengthFromFileLike(
data: Uploadable.FileLike,
{ noSniffFileSize }: { noSniffFileSize?: boolean } = {},
): Promise<number | undefined> {
if (isBuffer(data)) {
return data.length;
}
if (isArrayBufferView(data)) {
return data.byteLength;
}
if (isArrayBuffer(data)) {
return data.byteLength;
}
if (isBlob(data)) {
return data.size;
}
if (isFile(data)) {
return data.size;
}
if (noSniffFileSize === true) {
return undefined;
}
if (isPathedValue(data)) {
return await tryGetFileSizeFromPath(data.path.toString());
}
return undefined;
}
function tryGetContentTypeFromFileLike(data: Uploadable.FileLike): string | undefined {
if (isBlob(data)) {
return data.type;
}
if (isFile(data)) {
return data.type;
}
return undefined;
}
function getNameFromPath(path: string): string | undefined {
const lastForwardSlash = path.lastIndexOf("/");
const lastBackSlash = path.lastIndexOf("\\");
const lastSlashIndex = Math.max(lastForwardSlash, lastBackSlash);
return lastSlashIndex >= 0 ? path.substring(lastSlashIndex + 1) : path;
}
type NamedValue = {
name: string;
} & unknown;
type PathedValue = {
path: string | { toString(): string };
} & unknown;
type StreamLike = {
read?: () => unknown;
pipe?: (dest: unknown) => unknown;
} & unknown;
function isNamedValue(value: unknown): value is NamedValue {
return typeof value === "object" && value != null && "name" in value;
}
function isPathedValue(value: unknown): value is PathedValue {
return typeof value === "object" && value != null && "path" in value;
}
function isStreamLike(value: unknown): value is StreamLike {
return typeof value === "object" && value != null && ("read" in value || "pipe" in value);
}
function isReadableStream(value: unknown): value is ReadableStream {
return typeof value === "object" && value != null && "getReader" in value;
}
function isBuffer(value: unknown): value is Buffer {
return typeof Buffer !== "undefined" && Buffer.isBuffer && Buffer.isBuffer(value);
}
function isArrayBufferView(value: unknown): value is ArrayBufferView {
return typeof ArrayBuffer !== "undefined" && ArrayBuffer.isView(value);
}
function isArrayBuffer(value: unknown): value is ArrayBuffer {
return typeof ArrayBuffer !== "undefined" && value instanceof ArrayBuffer;
}
function isUint8Array(value: unknown): value is Uint8Array {
return typeof Uint8Array !== "undefined" && value instanceof Uint8Array;
}
function isBlob(value: unknown): value is Blob {
return typeof Blob !== "undefined" && value instanceof Blob;
}
function isFile(value: unknown): value is File {
return typeof File !== "undefined" && value instanceof File;
}

View file

@ -0,0 +1,2 @@
export * from "./file.js";
export * from "./types.js";

View file

@ -0,0 +1,81 @@
/**
* A file that can be uploaded. Can be a file-like object (stream, buffer, blob, etc.),
* a path to a file, or an object with a file-like object and metadata.
*/
export type Uploadable = Uploadable.FileLike | Uploadable.FromPath | Uploadable.WithMetadata;
export namespace Uploadable {
/**
* Various file-like objects that can be used to upload a file.
*/
export type FileLike =
| ArrayBuffer
| ArrayBufferLike
| ArrayBufferView
| Uint8Array
| import("buffer").Buffer
| import("buffer").Blob
| import("buffer").File
| import("stream").Readable
| import("stream/web").ReadableStream
| globalThis.Blob
| globalThis.File
| ReadableStream;
/**
* A file path with optional metadata, used for uploading a file from the file system.
*/
export type FromPath = {
/** The path to the file to upload */
path: string;
/**
* Optional override for the file name (defaults to basename of path).
* This is used to set the `Content-Disposition` header in upload requests.
*/
filename?: string;
/**
* Optional MIME type of the file (e.g., 'image/jpeg', 'text/plain').
* This is used to set the `Content-Type` header in upload requests.
*/
contentType?: string;
/**
* Optional file size in bytes.
* If not provided, the file size will be determined from the file system.
* The content length is used to set the `Content-Length` header in upload requests.
*/
contentLength?: number;
};
/**
* A file-like object with metadata, used for uploading files.
*/
export type WithMetadata = {
/** The file data */
data: FileLike;
/**
* Optional override for the file name (defaults to basename of path).
* This is used to set the `Content-Disposition` header in upload requests.
*/
filename?: string;
/**
* Optional MIME type of the file (e.g., 'image/jpeg', 'text/plain').
* This is used to set the `Content-Type` header in upload requests.
*
* If not provided, the content type may be determined from the data itself.
* * If the data is a `File`, `Blob`, or similar, the content type will be determined from the file itself, if the type is set.
* * Any other data type will not have a content type set, and the upload request will use `Content-Type: application/octet-stream` instead.
*/
contentType?: string;
/**
* Optional file size in bytes.
* The content length is used to set the `Content-Length` header in upload requests.
* If the content length is not provided and cannot be determined, the upload request will not include the `Content-Length` header, but will use `Transfer-Encoding: chunked` instead.
*
* If not provided, the file size will be determined depending on the data type.
* * If the data is of type `fs.ReadStream` (`createReadStream`), the size will be determined from the file system.
* * If the data is a `Buffer`, `ArrayBuffer`, `Uint8Array`, `Blob`, `File`, or similar, the size will be determined from the data itself.
* * If the data is a `Readable` or `ReadableStream`, the size will not be determined.
*/
contentLength?: number;
};
}

View file

@ -0,0 +1,140 @@
import { toMultipartDataPart, type Uploadable } from "../../core/file/index.js";
import { toJson } from "../../core/json.js";
import { RUNTIME } from "../runtime/index.js";
interface FormDataRequest<Body> {
body: Body;
headers: Record<string, string>;
duplex?: "half";
}
export async function newFormData(): Promise<FormDataWrapper> {
return new FormDataWrapper();
}
export class FormDataWrapper {
private fd: FormData = new FormData();
public async setup(): Promise<void> {
// noop
}
public append(key: string, value: unknown): void {
this.fd.append(key, String(value));
}
public async appendFile(key: string, value: Uploadable): Promise<void> {
const { data, filename, contentType } = await toMultipartDataPart(value);
const blob = await convertToBlob(data, contentType);
if (filename) {
this.fd.append(key, blob, filename);
} else {
this.fd.append(key, blob);
}
}
public getRequest(): FormDataRequest<FormData> {
return {
body: this.fd,
headers: {},
duplex: "half" as const,
};
}
}
type StreamLike = {
read?: () => unknown;
pipe?: (dest: unknown) => unknown;
} & unknown;
function isStreamLike(value: unknown): value is StreamLike {
return typeof value === "object" && value != null && ("read" in value || "pipe" in value);
}
function isReadableStream(value: unknown): value is ReadableStream {
return typeof value === "object" && value != null && "getReader" in value;
}
function isBuffer(value: unknown): value is Buffer {
return typeof Buffer !== "undefined" && Buffer.isBuffer && Buffer.isBuffer(value);
}
function isArrayBufferView(value: unknown): value is ArrayBufferView {
return ArrayBuffer.isView(value);
}
async function streamToBuffer(stream: unknown): Promise<Buffer> {
if (RUNTIME.type === "node") {
const { Readable } = await import("stream");
if (stream instanceof Readable) {
const chunks: Buffer[] = [];
for await (const chunk of stream) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
}
return Buffer.concat(chunks);
}
}
if (isReadableStream(stream)) {
const reader = stream.getReader();
const chunks: Uint8Array[] = [];
try {
while (true) {
const { done, value } = await reader.read();
if (done) break;
chunks.push(value);
}
} finally {
reader.releaseLock();
}
const totalLength = chunks.reduce((sum, chunk) => sum + chunk.length, 0);
const result = new Uint8Array(totalLength);
let offset = 0;
for (const chunk of chunks) {
result.set(chunk, offset);
offset += chunk.length;
}
return Buffer.from(result);
}
throw new Error(
`Unsupported stream type: ${typeof stream}. Expected Node.js Readable stream or Web ReadableStream.`,
);
}
async function convertToBlob(value: unknown, contentType?: string): Promise<Blob> {
if (isStreamLike(value) || isReadableStream(value)) {
const buffer = await streamToBuffer(value);
return new Blob([buffer], { type: contentType });
}
if (value instanceof Blob) {
return value;
}
if (isBuffer(value)) {
return new Blob([value], { type: contentType });
}
if (value instanceof ArrayBuffer) {
return new Blob([value], { type: contentType });
}
if (isArrayBufferView(value)) {
return new Blob([value], { type: contentType });
}
if (typeof value === "string") {
return new Blob([value], { type: contentType });
}
if (typeof value === "object" && value !== null) {
return new Blob([toJson(value)], { type: contentType ?? "application/json" });
}
return new Blob([String(value)], { type: contentType });
}

View file

@ -0,0 +1,12 @@
import { toQueryString } from "../url/qs.js";
export function encodeAsFormParameter(value: unknown): Record<string, string> {
const stringified = toQueryString(value, { encode: false });
const keyValuePairs = stringified.split("&").map((pair) => {
const [key, value] = pair.split("=");
return [key, value] as const;
});
return Object.fromEntries(keyValuePairs);
}

View file

@ -0,0 +1,2 @@
export { encodeAsFormParameter } from "./encodeAsFormParameter.js";
export * from "./FormDataWrapper.js";

View file

@ -1,3 +1,5 @@
export * from "./fetcher/index.js";
export * as file from "./file/index.js";
export * from "./form-data-utils/index.js";
export * from "./runtime/index.js";
export * as url from "./url/index.js";

View file

@ -0,0 +1 @@
export * from "./core/exports.js";

View file

@ -3,5 +3,6 @@ export type { BaseClientOptions, BaseRequestOptions } from "./BaseClient.js";
export { SkyvernClient } from "./Client.js";
export { SkyvernEnvironment } from "./environments.js";
export { SkyvernError, SkyvernTimeoutError } from "./errors/index.js";
export * from "./exports.js";
export { Skyvern, SkyvernBrowser, SkyvernBrowserPageAgent, SkyvernBrowserPageAi } from "./library/index.js";
export type { SkyvernOptions, SkyvernBrowserPage } from "./library/index.js";

View file

@ -1 +1 @@
export const SDK_VERSION = "1.0.3";
export const SDK_VERSION = "1.0.6";

View file

@ -0,0 +1,498 @@
import fs from "fs";
import { join } from "path";
import { Readable } from "stream";
import { toBinaryUploadRequest, type Uploadable } from "../../../src/core/file/index";
describe("toBinaryUploadRequest", () => {
const TEST_FILE_PATH = join(__dirname, "..", "test-file.txt");
beforeEach(() => {
vi.clearAllMocks();
});
describe("Buffer input", () => {
it("should handle Buffer with all metadata", async () => {
const buffer = Buffer.from("test data");
const input: Uploadable.WithMetadata = {
data: buffer,
filename: "test.txt",
contentType: "text/plain",
contentLength: 42,
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(buffer);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="test.txt"',
"Content-Type": "text/plain",
"Content-Length": "42",
});
});
it("should handle Buffer without metadata", async () => {
const buffer = Buffer.from("test data");
const input: Uploadable.WithMetadata = {
data: buffer,
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(buffer);
expect(result.headers).toEqual({
"Content-Length": "9", // buffer.length
});
});
it("should handle Buffer passed directly", async () => {
const buffer = Buffer.from("test data");
const result = await toBinaryUploadRequest(buffer);
expect(result.body).toBe(buffer);
expect(result.headers).toEqual({
"Content-Length": "9", // buffer.length
});
});
});
describe("ArrayBuffer input", () => {
it("should handle ArrayBuffer with metadata", async () => {
const arrayBuffer = new ArrayBuffer(10);
const input: Uploadable.WithMetadata = {
data: arrayBuffer,
filename: "data.bin",
contentType: "application/octet-stream",
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(arrayBuffer);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="data.bin"',
"Content-Type": "application/octet-stream",
"Content-Length": "10", // arrayBuffer.byteLength
});
});
it("should handle ArrayBuffer passed directly", async () => {
const arrayBuffer = new ArrayBuffer(10);
const result = await toBinaryUploadRequest(arrayBuffer);
expect(result.body).toBe(arrayBuffer);
expect(result.headers).toEqual({
"Content-Length": "10", // arrayBuffer.byteLength
});
});
});
describe("Uint8Array input", () => {
it("should handle Uint8Array with metadata", async () => {
const uint8Array = new Uint8Array([1, 2, 3, 4, 5]);
const input: Uploadable.WithMetadata = {
data: uint8Array,
filename: "bytes.bin",
contentType: "application/octet-stream",
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(uint8Array);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="bytes.bin"',
"Content-Type": "application/octet-stream",
"Content-Length": "5", // uint8Array.byteLength
});
});
it("should handle Uint8Array passed directly", async () => {
const uint8Array = new Uint8Array([1, 2, 3, 4, 5]);
const result = await toBinaryUploadRequest(uint8Array);
expect(result.body).toBe(uint8Array);
expect(result.headers).toEqual({
"Content-Length": "5", // uint8Array.byteLength
});
});
});
describe("Blob input", () => {
it("should handle Blob with metadata", async () => {
const blob = new Blob(["test content"], { type: "text/plain" });
const input: Uploadable.WithMetadata = {
data: blob,
filename: "override.txt",
contentType: "text/html", // Override blob's type
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(blob);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="override.txt"',
"Content-Type": "text/html", // Should use provided contentType
"Content-Length": "12", // blob.size
});
});
it("should handle Blob with intrinsic type", async () => {
const blob = new Blob(["test content"], { type: "application/json" });
const input: Uploadable.WithMetadata = {
data: blob,
filename: "data.json",
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(blob);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="data.json"',
"Content-Type": "application/json", // Should use blob's type
"Content-Length": "12", // blob.size
});
});
it("should handle Blob passed directly", async () => {
const blob = new Blob(["test content"], { type: "text/plain" });
const result = await toBinaryUploadRequest(blob);
expect(result.body).toBe(blob);
expect(result.headers).toEqual({
"Content-Type": "text/plain", // Should use blob's type
"Content-Length": "12", // blob.size
});
});
});
describe("File input", () => {
it("should handle File with metadata", async () => {
const file = new File(["file content"], "original.txt", { type: "text/plain" });
const input: Uploadable.WithMetadata = {
data: file,
filename: "renamed.txt",
contentType: "text/html", // Override file's type
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(file);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="renamed.txt"',
"Content-Type": "text/html", // Should use provided contentType
"Content-Length": "12", // file.size
});
});
it("should handle File with intrinsic properties", async () => {
const file = new File(["file content"], "test.json", { type: "application/json" });
const input: Uploadable.WithMetadata = {
data: file,
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(file);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="test.json"', // Should use file's name
"Content-Type": "application/json", // Should use file's type
"Content-Length": "12", // file.size
});
});
it("should handle File passed directly", async () => {
const file = new File(["file content"], "direct.txt", { type: "text/plain" });
const result = await toBinaryUploadRequest(file);
expect(result.body).toBe(file);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="direct.txt"',
"Content-Type": "text/plain",
"Content-Length": "12", // file.size
});
});
});
describe("ReadableStream input", () => {
it("should handle ReadableStream with metadata", async () => {
const stream = new ReadableStream({
start(controller) {
controller.enqueue(new TextEncoder().encode("stream data"));
controller.close();
},
});
const input: Uploadable.WithMetadata = {
data: stream,
filename: "stream.txt",
contentType: "text/plain",
contentLength: 100,
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(stream);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="stream.txt"',
"Content-Type": "text/plain",
"Content-Length": "100", // Should use provided contentLength
});
});
it("should handle ReadableStream without size", async () => {
const stream = new ReadableStream({
start(controller) {
controller.enqueue(new TextEncoder().encode("stream data"));
controller.close();
},
});
const input: Uploadable.WithMetadata = {
data: stream,
filename: "stream.txt",
contentType: "text/plain",
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(stream);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="stream.txt"',
"Content-Type": "text/plain",
// No Content-Length header since it cannot be determined from ReadableStream
});
});
it("should handle ReadableStream passed directly", async () => {
const stream = new ReadableStream({
start(controller) {
controller.enqueue(new TextEncoder().encode("stream data"));
controller.close();
},
});
const result = await toBinaryUploadRequest(stream);
expect(result.body).toBe(stream);
expect(result.headers).toEqual({
// No headers since no metadata provided and cannot be determined
});
});
});
describe("Node.js Readable stream input", () => {
it("should handle Readable stream with metadata", async () => {
const readable = new Readable({
read() {
this.push("readable data");
this.push(null);
},
});
const input: Uploadable.WithMetadata = {
data: readable,
filename: "readable.txt",
contentType: "text/plain",
contentLength: 50,
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(readable);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="readable.txt"',
"Content-Type": "text/plain",
"Content-Length": "50", // Should use provided contentLength
});
});
it("should handle Readable stream without size", async () => {
const readable = new Readable({
read() {
this.push("readable data");
this.push(null);
},
});
const input: Uploadable.WithMetadata = {
data: readable,
filename: "readable.txt",
contentType: "text/plain",
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(readable);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="readable.txt"',
"Content-Type": "text/plain",
// No Content-Length header since it cannot be determined from Readable
});
});
it("should handle Readable stream passed directly", async () => {
const readable = new Readable({
read() {
this.push("readable data");
this.push(null);
},
});
const result = await toBinaryUploadRequest(readable);
expect(result.body).toBe(readable);
expect(result.headers).toEqual({
// No headers since no metadata provided and cannot be determined
});
});
});
describe("File path input (FromPath type)", () => {
it("should handle file path with all metadata", async () => {
const input: Uploadable.FromPath = {
path: TEST_FILE_PATH,
filename: "custom.txt",
contentType: "text/html",
contentLength: 42,
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBeInstanceOf(fs.ReadStream);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="custom.txt"',
"Content-Type": "text/html",
"Content-Length": "42", // Should use provided contentLength
});
});
it("should handle file path with minimal metadata", async () => {
const input: Uploadable.FromPath = {
path: TEST_FILE_PATH,
contentType: "text/plain",
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBeInstanceOf(fs.ReadStream);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="test-file.txt"', // Should extract from path
"Content-Type": "text/plain",
"Content-Length": "21", // Should determine from file system (test file is 21 bytes)
});
});
it("should handle file path with no metadata", async () => {
const input: Uploadable.FromPath = {
path: TEST_FILE_PATH,
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBeInstanceOf(fs.ReadStream);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="test-file.txt"', // Should extract from path
"Content-Length": "21", // Should determine from file system (test file is 21 bytes)
});
});
});
describe("ArrayBufferView input", () => {
it("should handle ArrayBufferView with metadata", async () => {
const arrayBuffer = new ArrayBuffer(10);
const arrayBufferView = new Int8Array(arrayBuffer);
const input: Uploadable.WithMetadata = {
data: arrayBufferView,
filename: "view.bin",
contentType: "application/octet-stream",
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(arrayBufferView);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="view.bin"',
"Content-Type": "application/octet-stream",
"Content-Length": "10", // arrayBufferView.byteLength
});
});
it("should handle ArrayBufferView passed directly", async () => {
const arrayBuffer = new ArrayBuffer(10);
const arrayBufferView = new Int8Array(arrayBuffer);
const result = await toBinaryUploadRequest(arrayBufferView);
expect(result.body).toBe(arrayBufferView);
expect(result.headers).toEqual({
"Content-Length": "10", // arrayBufferView.byteLength
});
});
});
describe("Edge cases", () => {
it("should handle empty headers when no metadata is available", async () => {
const buffer = Buffer.from("");
const input: Uploadable.WithMetadata = {
data: buffer,
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(buffer);
expect(result.headers).toEqual({
"Content-Length": "0",
});
});
it("should handle zero contentLength", async () => {
const buffer = Buffer.from("test");
const input: Uploadable.WithMetadata = {
data: buffer,
contentLength: 0,
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(buffer);
expect(result.headers).toEqual({
"Content-Length": "0", // Should use provided 0
});
});
it("should handle null filename", async () => {
const buffer = Buffer.from("test");
const input: Uploadable.WithMetadata = {
data: buffer,
filename: undefined,
contentType: "text/plain",
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(buffer);
expect(result.headers).toEqual({
"Content-Type": "text/plain",
"Content-Length": "4",
// No Content-Disposition since filename is undefined
});
});
it("should handle null contentType", async () => {
const buffer = Buffer.from("test");
const input: Uploadable.WithMetadata = {
data: buffer,
filename: "test.txt",
contentType: undefined,
};
const result = await toBinaryUploadRequest(input);
expect(result.body).toBe(buffer);
expect(result.headers).toEqual({
"Content-Disposition": 'attachment; filename="test.txt"',
"Content-Length": "4",
// No Content-Type since contentType is undefined
});
});
});
});

View file

@ -0,0 +1,344 @@
import { encodeAsFormParameter } from "../../../src/core/form-data-utils/encodeAsFormParameter";
describe("encodeAsFormParameter", () => {
describe("Basic functionality", () => {
it("should return empty object for null/undefined", () => {
expect(encodeAsFormParameter(null)).toEqual({});
expect(encodeAsFormParameter(undefined)).toEqual({});
});
it("should return empty object for primitive values", () => {
expect(encodeAsFormParameter("hello")).toEqual({});
expect(encodeAsFormParameter(42)).toEqual({});
expect(encodeAsFormParameter(true)).toEqual({});
});
it("should handle simple key-value pairs", () => {
const obj = { name: "John", age: 30 };
expect(encodeAsFormParameter(obj)).toEqual({
name: "John",
age: "30",
});
});
it("should handle empty objects", () => {
expect(encodeAsFormParameter({})).toEqual({});
});
});
describe("Array handling", () => {
it("should handle arrays with indices format (default)", () => {
const obj = { items: ["a", "b", "c"] };
expect(encodeAsFormParameter(obj)).toEqual({
"items[0]": "a",
"items[1]": "b",
"items[2]": "c",
});
});
it("should handle empty arrays", () => {
const obj = { items: [] };
expect(encodeAsFormParameter(obj)).toEqual({});
});
it("should handle arrays with mixed types", () => {
const obj = { mixed: ["string", 42, true, false] };
expect(encodeAsFormParameter(obj)).toEqual({
"mixed[0]": "string",
"mixed[1]": "42",
"mixed[2]": "true",
"mixed[3]": "false",
});
});
it("should handle arrays with objects", () => {
const obj = { users: [{ name: "John" }, { name: "Jane" }] };
expect(encodeAsFormParameter(obj)).toEqual({
"users[0][name]": "John",
"users[1][name]": "Jane",
});
});
it("should handle arrays with null/undefined values", () => {
const obj = { items: ["a", null, "c", undefined, "e"] };
expect(encodeAsFormParameter(obj)).toEqual({
"items[0]": "a",
"items[1]": "",
"items[2]": "c",
"items[4]": "e",
});
});
});
describe("Nested objects", () => {
it("should handle nested objects", () => {
const obj = { user: { name: "John", age: 30 } };
expect(encodeAsFormParameter(obj)).toEqual({
"user[name]": "John",
"user[age]": "30",
});
});
it("should handle deeply nested objects", () => {
const obj = { user: { profile: { name: "John", settings: { theme: "dark" } } } };
expect(encodeAsFormParameter(obj)).toEqual({
"user[profile][name]": "John",
"user[profile][settings][theme]": "dark",
});
});
it("should handle empty nested objects", () => {
const obj = { user: {} };
expect(encodeAsFormParameter(obj)).toEqual({});
});
});
describe("Special characters and encoding", () => {
it("should not encode values (encode: false is used)", () => {
const obj = { name: "John Doe", email: "john@example.com" };
expect(encodeAsFormParameter(obj)).toEqual({
name: "John Doe",
email: "john@example.com",
});
});
it("should not encode special characters in keys", () => {
const obj = { "user name": "John", "email[primary]": "john@example.com" };
expect(encodeAsFormParameter(obj)).toEqual({
"user name": "John",
"email[primary]": "john@example.com",
});
});
it("should handle values that contain special characters", () => {
const obj = {
query: "search term with spaces",
filter: "category:electronics",
};
expect(encodeAsFormParameter(obj)).toEqual({
query: "search term with spaces",
filter: "category:electronics",
});
});
it("should handle ampersand and equals characters (edge case)", () => {
// Note: Values containing & and = may be problematic because
// encodeAsFormParameter splits on these characters when parsing the stringified result
const obj = {
message: "Hello & welcome",
equation: "x = y + z",
};
// This demonstrates the limitation - ampersands and equals signs in values
// will cause the parameter to be split incorrectly
const result = encodeAsFormParameter(obj);
// We expect this to be parsed incorrectly due to the implementation
expect(result.message).toBe("Hello ");
expect(result[" welcome"]).toBeUndefined();
expect(result.equation).toBe("x ");
expect(result[" y + z"]).toBeUndefined();
});
});
describe("Form data specific scenarios", () => {
it("should handle file upload metadata", () => {
const metadata = {
file: {
name: "document.pdf",
size: 1024,
type: "application/pdf",
},
options: {
compress: true,
quality: 0.8,
},
};
expect(encodeAsFormParameter(metadata)).toEqual({
"file[name]": "document.pdf",
"file[size]": "1024",
"file[type]": "application/pdf",
"options[compress]": "true",
"options[quality]": "0.8",
});
});
it("should handle form validation data", () => {
const formData = {
fields: ["name", "email", "phone"],
validation: {
required: ["name", "email"],
patterns: {
email: "^[^@]+@[^@]+\\.[^@]+$",
phone: "^\\+?[1-9]\\d{1,14}$",
},
},
};
expect(encodeAsFormParameter(formData)).toEqual({
"fields[0]": "name",
"fields[1]": "email",
"fields[2]": "phone",
"validation[required][0]": "name",
"validation[required][1]": "email",
"validation[patterns][email]": "^[^@]+@[^@]+\\.[^@]+$",
"validation[patterns][phone]": "^\\+?[1-9]\\d{1,14}$",
});
});
it("should handle search/filter parameters", () => {
const searchParams = {
filters: {
status: ["active", "pending"],
category: {
type: "electronics",
subcategories: ["phones", "laptops"],
},
},
sort: { field: "name", direction: "asc" },
pagination: { page: 1, limit: 20 },
};
expect(encodeAsFormParameter(searchParams)).toEqual({
"filters[status][0]": "active",
"filters[status][1]": "pending",
"filters[category][type]": "electronics",
"filters[category][subcategories][0]": "phones",
"filters[category][subcategories][1]": "laptops",
"sort[field]": "name",
"sort[direction]": "asc",
"pagination[page]": "1",
"pagination[limit]": "20",
});
});
});
describe("Edge cases", () => {
it("should handle boolean values", () => {
const obj = { enabled: true, disabled: false };
expect(encodeAsFormParameter(obj)).toEqual({
enabled: "true",
disabled: "false",
});
});
it("should handle empty strings", () => {
const obj = { name: "", description: "test" };
expect(encodeAsFormParameter(obj)).toEqual({
name: "",
description: "test",
});
});
it("should handle zero values", () => {
const obj = { count: 0, price: 0.0 };
expect(encodeAsFormParameter(obj)).toEqual({
count: "0",
price: "0",
});
});
it("should handle numeric keys", () => {
const obj = { "0": "zero", "1": "one" };
expect(encodeAsFormParameter(obj)).toEqual({
"0": "zero",
"1": "one",
});
});
it("should handle objects with null/undefined values", () => {
const obj = { name: "John", age: null, email: undefined, active: true };
expect(encodeAsFormParameter(obj)).toEqual({
name: "John",
age: "",
active: "true",
});
});
});
describe("Integration with form submission", () => {
it("should produce form-compatible key-value pairs", () => {
const formObject = {
username: "john_doe",
preferences: {
theme: "dark",
notifications: ["email", "push"],
settings: {
autoSave: true,
timeout: 300,
},
},
};
const result = encodeAsFormParameter(formObject);
// Verify all values are strings (as required for form data)
Object.values(result).forEach((value) => {
expect(typeof value).toBe("string");
});
// Verify the structure can be reconstructed
expect(result).toEqual({
username: "john_doe",
"preferences[theme]": "dark",
"preferences[notifications][0]": "email",
"preferences[notifications][1]": "push",
"preferences[settings][autoSave]": "true",
"preferences[settings][timeout]": "300",
});
});
it("should handle complex nested arrays for API parameters", () => {
const apiParams = {
query: {
filters: [
{ field: "status", operator: "eq", value: "active" },
{ field: "created", operator: "gte", value: "2023-01-01" },
],
sort: [
{ field: "name", direction: "asc" },
{ field: "created", direction: "desc" },
],
},
};
const result = encodeAsFormParameter(apiParams);
expect(result).toEqual({
"query[filters][0][field]": "status",
"query[filters][0][operator]": "eq",
"query[filters][0][value]": "active",
"query[filters][1][field]": "created",
"query[filters][1][operator]": "gte",
"query[filters][1][value]": "2023-01-01",
"query[sort][0][field]": "name",
"query[sort][0][direction]": "asc",
"query[sort][1][field]": "created",
"query[sort][1][direction]": "desc",
});
});
});
describe("Error cases and malformed input", () => {
it("should handle circular references gracefully", () => {
const obj: any = { name: "test" };
obj.self = obj;
// This will throw a RangeError due to stack overflow - this is expected behavior
expect(() => encodeAsFormParameter(obj)).toThrow("Maximum call stack size exceeded");
});
it("should handle very deeply nested objects", () => {
let deepObj: any = { value: "deep" };
for (let i = 0; i < 100; i++) {
deepObj = { level: deepObj };
}
expect(() => encodeAsFormParameter(deepObj)).not.toThrow();
const result = encodeAsFormParameter(deepObj);
expect(Object.keys(result).length).toBeGreaterThan(0);
});
it("should handle empty string splitting edge case", () => {
// Test what happens when qs returns an empty string
const result = encodeAsFormParameter({});
expect(result).toEqual({});
});
});
});

View file

@ -0,0 +1,346 @@
import { Blob, File } from "buffer";
import { join } from "path";
/* eslint-disable @typescript-eslint/ban-ts-comment */
import { Readable } from "stream";
import { FormDataWrapper, newFormData } from "../../../src/core/form-data-utils/FormDataWrapper";
// Helper function to serialize FormData to string for inspection
async function serializeFormData(formData: FormData): Promise<string> {
const request = new Request("http://localhost", {
method: "POST",
body: formData,
});
const buffer = await request.arrayBuffer();
return new TextDecoder().decode(buffer);
}
describe("FormDataWrapper", () => {
let formData: FormDataWrapper;
beforeEach(async () => {
formData = new FormDataWrapper();
await formData.setup();
});
it("Upload file by path", async () => {
await formData.appendFile("file", {
path: join(__dirname, "..", "test-file.txt"),
});
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('Content-Disposition: form-data; name="file"');
expect(serialized).toContain('filename="test-file.txt"');
expect(serialized).toContain("This is a test file!");
});
it("Upload file by path with filename", async () => {
await formData.appendFile("file", {
path: join(__dirname, "..", "test-file.txt"),
filename: "custom-file.txt",
});
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('Content-Disposition: form-data; name="file"');
expect(serialized).toContain('filename="custom-file.txt"');
expect(serialized).toContain("This is a test file!");
});
describe("Stream handling", () => {
it("serializes Node.js Readable stream with filename", async () => {
const stream = Readable.from(["file content"]);
await formData.appendFile("file", {
data: stream,
filename: "testfile.txt",
});
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('Content-Disposition: form-data; name="file"');
expect(serialized).toContain('filename="testfile.txt"');
expect(serialized).toContain("file content");
});
it("auto-detects filename from stream path property", async () => {
const stream = Readable.from(["file content"]);
(stream as { path?: string }).path = "/test/path/testfile.txt";
await formData.appendFile("file", stream);
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="testfile.txt"');
});
it("handles Windows-style paths", async () => {
const stream = Readable.from(["file content"]);
(stream as { path?: string }).path = "C:\\test\\path\\testfile.txt";
await formData.appendFile("file", stream);
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="testfile.txt"');
});
it("handles empty streams", async () => {
const stream = Readable.from([]);
await formData.appendFile("file", {
data: stream,
filename: "empty.txt",
});
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="empty.txt"');
expect(serialized).toMatch(/------formdata-undici-\w+|------WebKitFormBoundary\w+/);
});
it("serializes Web ReadableStream with filename", async () => {
const stream = new ReadableStream({
start(controller) {
controller.enqueue(new TextEncoder().encode("web stream content"));
controller.close();
},
});
await formData.appendFile("file", {
data: stream,
filename: "webstream.txt",
});
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="webstream.txt"');
expect(serialized).toContain("web stream content");
});
it("handles empty Web ReadableStream", async () => {
const stream = new ReadableStream({
start(controller) {
controller.close();
},
});
await formData.appendFile("file", {
data: stream,
filename: "empty.txt",
});
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="empty.txt"');
expect(serialized).toMatch(/------formdata-undici-\w+|------WebKitFormBoundary\w+/);
});
});
describe("Blob and File types", () => {
it("serializes Blob with specified filename", async () => {
const blob = new Blob(["file content"], { type: "text/plain" });
await formData.appendFile("file", {
data: blob,
filename: "testfile.txt",
});
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="testfile.txt"');
expect(serialized).toContain("Content-Type: text/plain");
expect(serialized).toContain("file content");
});
it("uses default filename for Blob without explicit filename", async () => {
const blob = new Blob(["file content"], { type: "text/plain" });
await formData.appendFile("file", blob);
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="blob"');
});
it("preserves File object filename", async () => {
if (typeof File !== "undefined") {
const file = new File(["file content"], "original.txt", { type: "text/plain" });
await formData.appendFile("file", file);
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="original.txt"');
expect(serialized).toContain("file content");
}
});
it("allows filename override for File objects", async () => {
if (typeof File !== "undefined") {
const file = new File(["file content"], "original.txt", { type: "text/plain" });
await formData.appendFile("file", {
data: file,
filename: "override.txt",
});
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="override.txt"');
expect(serialized).not.toContain('filename="original.txt"');
}
});
});
describe("Binary data types", () => {
it("serializes ArrayBuffer with filename", async () => {
const arrayBuffer = new ArrayBuffer(8);
new Uint8Array(arrayBuffer).set([1, 2, 3, 4, 5, 6, 7, 8]);
await formData.appendFile("file", {
data: arrayBuffer,
filename: "binary.bin",
});
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="binary.bin"');
expect(serialized).toMatch(/------formdata-undici-\w+|------WebKitFormBoundary\w+/);
});
it("serializes Uint8Array with filename", async () => {
const uint8Array = new Uint8Array([72, 101, 108, 108, 111]); // "Hello"
await formData.appendFile("file", {
data: uint8Array,
filename: "binary.bin",
});
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="binary.bin"');
expect(serialized).toContain("Hello");
});
it("serializes other typed arrays", async () => {
const int16Array = new Int16Array([1000, 2000, 3000]);
await formData.appendFile("file", {
data: int16Array,
filename: "numbers.bin",
});
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="numbers.bin"');
});
it("serializes Buffer data with filename", async () => {
if (typeof Buffer !== "undefined" && typeof Buffer.isBuffer === "function") {
const buffer = Buffer.from("test content");
await formData.appendFile("file", {
data: buffer,
filename: "test.txt",
});
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="test.txt"');
expect(serialized).toContain("test content");
}
});
});
describe("Text and primitive types", () => {
it("serializes string as regular form field", async () => {
formData.append("text", "test string");
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('name="text"');
expect(serialized).not.toContain("filename=");
expect(serialized).toContain("test string");
});
it("serializes numbers and booleans as strings", async () => {
formData.append("number", 12345);
formData.append("flag", true);
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain("12345");
expect(serialized).toContain("true");
});
});
describe("Edge cases and error handling", () => {
it("handles empty filename gracefully", async () => {
await formData.appendFile("file", {
data: new Blob(["content"], { type: "text/plain" }),
filename: "",
});
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="blob"'); // Default fallback
});
it("handles multiple files in single form", async () => {
await formData.appendFile("file1", {
data: new Blob(["content1"], { type: "text/plain" }),
filename: "file1.txt",
});
await formData.appendFile("file2", {
data: new Blob(["content2"], { type: "text/plain" }),
filename: "file2.txt",
});
formData.append("text", "regular field");
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toContain('filename="file1.txt"');
expect(serialized).toContain('filename="file2.txt"');
expect(serialized).toContain('name="text"');
expect(serialized).not.toContain('filename="text"');
});
});
describe("Request structure", () => {
it("returns correct request structure", async () => {
await formData.appendFile("file", {
data: new Blob(["content"], { type: "text/plain" }),
filename: "test.txt",
});
const request = formData.getRequest();
expect(request).toHaveProperty("body");
expect(request).toHaveProperty("headers");
expect(request).toHaveProperty("duplex");
expect(request.body).toBeInstanceOf(FormData);
expect(request.headers).toEqual({});
expect(request.duplex).toBe("half");
});
it("generates proper multipart boundary structure", async () => {
await formData.appendFile("file", {
data: new Blob(["test content"], { type: "text/plain" }),
filename: "test.txt",
});
formData.append("field", "value");
const serialized = await serializeFormData(formData.getRequest().body);
expect(serialized).toMatch(/------formdata-undici-\w+|------WebKitFormBoundary\w+/);
expect(serialized).toContain("Content-Disposition: form-data;");
expect(serialized).toMatch(/------formdata-undici-\w+--|------WebKitFormBoundary\w+--/);
});
});
describe("Factory function", () => {
it("returns FormDataWrapper instance", async () => {
const formData = await newFormData();
expect(formData).toBeInstanceOf(FormDataWrapper);
});
it("creates independent instances", async () => {
const formData1 = await newFormData();
const formData2 = await newFormData();
await formData1.setup();
await formData2.setup();
formData1.append("test1", "value1");
formData2.append("test2", "value2");
const request1 = formData1.getRequest() as { body: FormData };
const request2 = formData2.getRequest() as { body: FormData };
const entries1 = Array.from(request1.body.entries());
const entries2 = Array.from(request2.body.entries());
expect(entries1).toHaveLength(1);
expect(entries2).toHaveLength(1);
expect(entries1[0][0]).toBe("test1");
expect(entries2[0][0]).toBe("test2");
});
});
});

View file

@ -0,0 +1 @@
This is a test file!

View file

@ -1207,7 +1207,7 @@ describe("SkyvernClient", () => {
.build();
await expect(async () => {
return await client.retryRunWebhook("run_id");
return await client.retryRunWebhook("run_id", undefined);
}).rejects.toThrow(Skyvern.UnprocessableEntityError);
});

View file

@ -1,48 +0,0 @@
// This file was auto-generated by Fern from our API Definition.
import * as Skyvern from "../../src/api/index";
import { SkyvernClient } from "../../src/Client";
import { mockServerPool } from "../mock-server/MockServerPool";
describe("Workflows", () => {
test("setWorkflowTemplateStatus (1)", async () => {
const server = mockServerPool.createServer();
const client = new SkyvernClient({ apiKey: "test", environment: server.baseUrl });
const rawResponseBody = { key: "value" };
server
.mockEndpoint()
.put("/v1/workflows/workflow_permanent_id/template")
.respondWith()
.statusCode(200)
.jsonBody(rawResponseBody)
.build();
const response = await client.workflows.setWorkflowTemplateStatus("workflow_permanent_id", {
is_template: true,
});
expect(response).toEqual({
key: "value",
});
});
test("setWorkflowTemplateStatus (2)", async () => {
const server = mockServerPool.createServer();
const client = new SkyvernClient({ apiKey: "test", environment: server.baseUrl });
const rawResponseBody = { key: "value" };
server
.mockEndpoint()
.put("/v1/workflows/workflow_permanent_id/template")
.respondWith()
.statusCode(422)
.jsonBody(rawResponseBody)
.build();
await expect(async () => {
return await client.workflows.setWorkflowTemplateStatus("workflow_permanent_id", {
is_template: true,
});
}).rejects.toThrow(Skyvern.UnprocessableEntityError);
});
});

View file

@ -283,6 +283,7 @@ if typing.TYPE_CHECKING:
PromptAction,
PromptBranchCriteria,
ProxyLocation,
RetryRunWebhookRequest,
RunEngine,
RunSdkActionRequestAction,
RunSdkActionRequestAction_AiAct,
@ -352,6 +353,7 @@ if typing.TYPE_CHECKING:
TotpType,
UploadFileAction,
UploadFileActionData,
UploadFileResponse,
UploadToS3Block,
UploadToS3BlockYaml,
UrlBlock,
@ -496,7 +498,7 @@ if typing.TYPE_CHECKING:
WorkflowStatus,
)
from .errors import BadRequestError, ConflictError, ForbiddenError, NotFoundError, UnprocessableEntityError
from . import scripts, workflows
from . import scripts
from .client import AsyncSkyvern, Skyvern
from .environment import SkyvernEnvironment
from .version import __version__
@ -782,6 +784,7 @@ _dynamic_imports: typing.Dict[str, str] = {
"PromptAction": ".types",
"PromptBranchCriteria": ".types",
"ProxyLocation": ".types",
"RetryRunWebhookRequest": ".types",
"RunEngine": ".types",
"RunSdkActionRequestAction": ".types",
"RunSdkActionRequestAction_AiAct": ".types",
@ -854,6 +857,7 @@ _dynamic_imports: typing.Dict[str, str] = {
"UnprocessableEntityError": ".errors",
"UploadFileAction": ".types",
"UploadFileActionData": ".types",
"UploadFileResponse": ".types",
"UploadToS3Block": ".types",
"UploadToS3BlockYaml": ".types",
"UrlBlock": ".types",
@ -998,7 +1002,6 @@ _dynamic_imports: typing.Dict[str, str] = {
"WorkflowStatus": ".types",
"__version__": ".version",
"scripts": ".scripts",
"workflows": ".workflows",
}
@ -1305,6 +1308,7 @@ __all__ = [
"PromptAction",
"PromptBranchCriteria",
"ProxyLocation",
"RetryRunWebhookRequest",
"RunEngine",
"RunSdkActionRequestAction",
"RunSdkActionRequestAction_AiAct",
@ -1377,6 +1381,7 @@ __all__ = [
"UnprocessableEntityError",
"UploadFileAction",
"UploadFileActionData",
"UploadFileResponse",
"UploadToS3Block",
"UploadToS3BlockYaml",
"UrlBlock",
@ -1521,5 +1526,4 @@ __all__ = [
"WorkflowStatus",
"__version__",
"scripts",
"workflows",
]

View file

@ -6,6 +6,7 @@ import datetime as dt
import typing
import httpx
from . import core
from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
from .core.request_options import RequestOptions
from .environment import SkyvernEnvironment
@ -19,6 +20,7 @@ from .types.create_script_response import CreateScriptResponse
from .types.credential_response import CredentialResponse
from .types.get_run_response import GetRunResponse
from .types.proxy_location import ProxyLocation
from .types.retry_run_webhook_request import RetryRunWebhookRequest
from .types.run_engine import RunEngine
from .types.run_sdk_action_request_action import RunSdkActionRequestAction
from .types.run_sdk_action_response import RunSdkActionResponse
@ -30,6 +32,7 @@ from .types.task_run_request_data_extraction_schema import TaskRunRequestDataExt
from .types.task_run_request_proxy_location import TaskRunRequestProxyLocation
from .types.task_run_response import TaskRunResponse
from .types.totp_code import TotpCode
from .types.upload_file_response import UploadFileResponse
from .types.workflow import Workflow
from .types.workflow_create_yaml_request import WorkflowCreateYamlRequest
from .types.workflow_run_request_proxy_location import WorkflowRunRequestProxyLocation
@ -39,7 +42,6 @@ from .types.workflow_status import WorkflowStatus
if typing.TYPE_CHECKING:
from .scripts.client import AsyncScriptsClient, ScriptsClient
from .workflows.client import AsyncWorkflowsClient, WorkflowsClient
# this is used as the default value for optional parameters
OMIT = typing.cast(typing.Any, ...)
@ -110,7 +112,6 @@ class Skyvern:
timeout=_defaulted_timeout,
)
self._raw_client = RawSkyvern(client_wrapper=self._client_wrapper)
self._workflows: typing.Optional[WorkflowsClient] = None
self._scripts: typing.Optional[ScriptsClient] = None
@property
@ -793,7 +794,7 @@ class Skyvern:
self,
run_id: str,
*,
webhook_url: typing.Optional[str] = None,
request: typing.Optional[RetryRunWebhookRequest] = None,
request_options: typing.Optional[RequestOptions] = None,
) -> typing.Optional[typing.Any]:
"""
@ -804,6 +805,8 @@ class Skyvern:
run_id : str
The id of the task run or the workflow run.
request : typing.Optional[RetryRunWebhookRequest]
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
@ -814,16 +817,17 @@ class Skyvern:
Examples
--------
from skyvern import Skyvern
from skyvern import RetryRunWebhookRequest, Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.retry_run_webhook(
run_id="tsk_123",
request=RetryRunWebhookRequest(),
)
"""
_response = self._raw_client.retry_run_webhook(run_id, webhook_url=webhook_url, request_options=request_options)
_response = self._raw_client.retry_run_webhook(run_id, request=request, request_options=request_options)
return _response.data
def get_run_timeline(
@ -859,6 +863,35 @@ class Skyvern:
_response = self._raw_client.get_run_timeline(run_id, request_options=request_options)
return _response.data
def upload_file(
self, *, file: core.File, request_options: typing.Optional[RequestOptions] = None
) -> UploadFileResponse:
"""
Parameters
----------
file : core.File
See core.File for more documentation
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
UploadFileResponse
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.upload_file()
"""
_response = self._raw_client.upload_file(file=file, request_options=request_options)
return _response.data
def list_browser_profiles(
self, *, include_deleted: typing.Optional[bool] = None, request_options: typing.Optional[RequestOptions] = None
) -> typing.List[BrowserProfile]:
@ -1860,14 +1893,6 @@ class Skyvern:
)
return _response.data
@property
def workflows(self):
if self._workflows is None:
from .workflows.client import WorkflowsClient # noqa: E402
self._workflows = WorkflowsClient(client_wrapper=self._client_wrapper)
return self._workflows
@property
def scripts(self):
if self._scripts is None:
@ -1943,7 +1968,6 @@ class AsyncSkyvern:
timeout=_defaulted_timeout,
)
self._raw_client = AsyncRawSkyvern(client_wrapper=self._client_wrapper)
self._workflows: typing.Optional[AsyncWorkflowsClient] = None
self._scripts: typing.Optional[AsyncScriptsClient] = None
@property
@ -2708,7 +2732,7 @@ class AsyncSkyvern:
self,
run_id: str,
*,
webhook_url: typing.Optional[str] = None,
request: typing.Optional[RetryRunWebhookRequest] = None,
request_options: typing.Optional[RequestOptions] = None,
) -> typing.Optional[typing.Any]:
"""
@ -2719,6 +2743,8 @@ class AsyncSkyvern:
run_id : str
The id of the task run or the workflow run.
request : typing.Optional[RetryRunWebhookRequest]
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
@ -2731,7 +2757,7 @@ class AsyncSkyvern:
--------
import asyncio
from skyvern import AsyncSkyvern
from skyvern import AsyncSkyvern, RetryRunWebhookRequest
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
@ -2741,14 +2767,13 @@ class AsyncSkyvern:
async def main() -> None:
await client.retry_run_webhook(
run_id="tsk_123",
request=RetryRunWebhookRequest(),
)
asyncio.run(main())
"""
_response = await self._raw_client.retry_run_webhook(
run_id, webhook_url=webhook_url, request_options=request_options
)
_response = await self._raw_client.retry_run_webhook(run_id, request=request, request_options=request_options)
return _response.data
async def get_run_timeline(
@ -2792,6 +2817,43 @@ class AsyncSkyvern:
_response = await self._raw_client.get_run_timeline(run_id, request_options=request_options)
return _response.data
async def upload_file(
self, *, file: core.File, request_options: typing.Optional[RequestOptions] = None
) -> UploadFileResponse:
"""
Parameters
----------
file : core.File
See core.File for more documentation
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
UploadFileResponse
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.upload_file()
asyncio.run(main())
"""
_response = await self._raw_client.upload_file(file=file, request_options=request_options)
return _response.data
async def list_browser_profiles(
self, *, include_deleted: typing.Optional[bool] = None, request_options: typing.Optional[RequestOptions] = None
) -> typing.List[BrowserProfile]:
@ -3957,14 +4019,6 @@ class AsyncSkyvern:
)
return _response.data
@property
def workflows(self):
if self._workflows is None:
from .workflows.client import AsyncWorkflowsClient # noqa: E402
self._workflows = AsyncWorkflowsClient(client_wrapper=self._client_wrapper)
return self._workflows
@property
def scripts(self):
if self._scripts is None:

View file

@ -22,10 +22,10 @@ class BaseClientWrapper:
def get_headers(self) -> typing.Dict[str, str]:
headers: typing.Dict[str, str] = {
"User-Agent": "skyvern/1.0.3",
"User-Agent": "skyvern/1.0.6",
"X-Fern-Language": "Python",
"X-Fern-SDK-Name": "skyvern",
"X-Fern-SDK-Version": "1.0.3",
"X-Fern-SDK-Version": "1.0.6",
**(self.get_custom_headers() or {}),
}
if self._api_key is not None:

View file

@ -4,6 +4,7 @@ import datetime as dt
import typing
from json.decoder import JSONDecodeError
from . import core
from .core.api_error import ApiError
from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
from .core.http_response import AsyncHttpResponse, HttpResponse
@ -25,6 +26,7 @@ from .types.create_script_response import CreateScriptResponse
from .types.credential_response import CredentialResponse
from .types.get_run_response import GetRunResponse
from .types.proxy_location import ProxyLocation
from .types.retry_run_webhook_request import RetryRunWebhookRequest
from .types.run_engine import RunEngine
from .types.run_sdk_action_request_action import RunSdkActionRequestAction
from .types.run_sdk_action_response import RunSdkActionResponse
@ -36,6 +38,7 @@ from .types.task_run_request_data_extraction_schema import TaskRunRequestDataExt
from .types.task_run_request_proxy_location import TaskRunRequestProxyLocation
from .types.task_run_response import TaskRunResponse
from .types.totp_code import TotpCode
from .types.upload_file_response import UploadFileResponse
from .types.workflow import Workflow
from .types.workflow_create_yaml_request import WorkflowCreateYamlRequest
from .types.workflow_run_request_proxy_location import WorkflowRunRequestProxyLocation
@ -960,7 +963,7 @@ class RawSkyvern:
self,
run_id: str,
*,
webhook_url: typing.Optional[str] = None,
request: typing.Optional[RetryRunWebhookRequest] = None,
request_options: typing.Optional[RequestOptions] = None,
) -> HttpResponse[typing.Optional[typing.Any]]:
"""
@ -971,6 +974,8 @@ class RawSkyvern:
run_id : str
The id of the task run or the workflow run.
request : typing.Optional[RetryRunWebhookRequest]
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
@ -979,18 +984,17 @@ class RawSkyvern:
HttpResponse[typing.Optional[typing.Any]]
Successful Response
"""
request_kwargs: dict[str, typing.Any] = {}
if webhook_url is not None:
request_kwargs = {
"json": {"webhook_url": webhook_url},
"headers": {"content-type": "application/json"},
"omit": OMIT,
}
_response = self._client_wrapper.httpx_client.request(
f"v1/runs/{jsonable_encoder(run_id)}/retry_webhook",
method="POST",
json=convert_and_respect_annotation_metadata(
object_=request, annotation=RetryRunWebhookRequest, direction="write"
),
headers={
"content-type": "application/json",
},
request_options=request_options,
**request_kwargs,
omit=OMIT,
)
try:
if _response is None or not _response.text.strip():
@ -1092,6 +1096,60 @@ class RawSkyvern:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
def upload_file(
self, *, file: core.File, request_options: typing.Optional[RequestOptions] = None
) -> HttpResponse[UploadFileResponse]:
"""
Parameters
----------
file : core.File
See core.File for more documentation
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
HttpResponse[UploadFileResponse]
Successful Response
"""
_response = self._client_wrapper.httpx_client.request(
"v1/upload_file",
method="POST",
data={},
files={
"file": file,
},
request_options=request_options,
omit=OMIT,
force_multipart=True,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
UploadFileResponse,
parse_obj_as(
type_=UploadFileResponse, # type: ignore
object_=_response.json(),
),
)
return HttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
def list_browser_profiles(
self, *, include_deleted: typing.Optional[bool] = None, request_options: typing.Optional[RequestOptions] = None
) -> HttpResponse[typing.List[BrowserProfile]]:
@ -3492,7 +3550,7 @@ class AsyncRawSkyvern:
self,
run_id: str,
*,
webhook_url: typing.Optional[str] = None,
request: typing.Optional[RetryRunWebhookRequest] = None,
request_options: typing.Optional[RequestOptions] = None,
) -> AsyncHttpResponse[typing.Optional[typing.Any]]:
"""
@ -3503,6 +3561,8 @@ class AsyncRawSkyvern:
run_id : str
The id of the task run or the workflow run.
request : typing.Optional[RetryRunWebhookRequest]
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
@ -3511,18 +3571,17 @@ class AsyncRawSkyvern:
AsyncHttpResponse[typing.Optional[typing.Any]]
Successful Response
"""
request_kwargs: dict[str, typing.Any] = {}
if webhook_url is not None:
request_kwargs = {
"json": {"webhook_url": webhook_url},
"headers": {"content-type": "application/json"},
"omit": OMIT,
}
_response = await self._client_wrapper.httpx_client.request(
f"v1/runs/{jsonable_encoder(run_id)}/retry_webhook",
method="POST",
json=convert_and_respect_annotation_metadata(
object_=request, annotation=RetryRunWebhookRequest, direction="write"
),
headers={
"content-type": "application/json",
},
request_options=request_options,
**request_kwargs,
omit=OMIT,
)
try:
if _response is None or not _response.text.strip():
@ -3624,6 +3683,60 @@ class AsyncRawSkyvern:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
async def upload_file(
self, *, file: core.File, request_options: typing.Optional[RequestOptions] = None
) -> AsyncHttpResponse[UploadFileResponse]:
"""
Parameters
----------
file : core.File
See core.File for more documentation
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
AsyncHttpResponse[UploadFileResponse]
Successful Response
"""
_response = await self._client_wrapper.httpx_client.request(
"v1/upload_file",
method="POST",
data={},
files={
"file": file,
},
request_options=request_options,
omit=OMIT,
force_multipart=True,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
UploadFileResponse,
parse_obj_as(
type_=UploadFileResponse, # type: ignore
object_=_response.json(),
),
)
return AsyncHttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
async def list_browser_profiles(
self, *, include_deleted: typing.Optional[bool] = None, request_options: typing.Optional[RequestOptions] = None
) -> AsyncHttpResponse[typing.List[BrowserProfile]]:

View file

@ -310,6 +310,7 @@ if typing.TYPE_CHECKING:
from .prompt_action import PromptAction
from .prompt_branch_criteria import PromptBranchCriteria
from .proxy_location import ProxyLocation
from .retry_run_webhook_request import RetryRunWebhookRequest
from .run_engine import RunEngine
from .run_sdk_action_request_action import (
RunSdkActionRequestAction,
@ -385,6 +386,7 @@ if typing.TYPE_CHECKING:
from .totp_type import TotpType
from .upload_file_action import UploadFileAction
from .upload_file_action_data import UploadFileActionData
from .upload_file_response import UploadFileResponse
from .upload_to_s3block import UploadToS3Block
from .upload_to_s3block_yaml import UploadToS3BlockYaml
from .url_block import UrlBlock
@ -818,6 +820,7 @@ _dynamic_imports: typing.Dict[str, str] = {
"PromptAction": ".prompt_action",
"PromptBranchCriteria": ".prompt_branch_criteria",
"ProxyLocation": ".proxy_location",
"RetryRunWebhookRequest": ".retry_run_webhook_request",
"RunEngine": ".run_engine",
"RunSdkActionRequestAction": ".run_sdk_action_request_action",
"RunSdkActionRequestAction_AiAct": ".run_sdk_action_request_action",
@ -887,6 +890,7 @@ _dynamic_imports: typing.Dict[str, str] = {
"TotpType": ".totp_type",
"UploadFileAction": ".upload_file_action",
"UploadFileActionData": ".upload_file_action_data",
"UploadFileResponse": ".upload_file_response",
"UploadToS3Block": ".upload_to_s3block",
"UploadToS3BlockYaml": ".upload_to_s3block_yaml",
"UrlBlock": ".url_block",
@ -1330,6 +1334,7 @@ __all__ = [
"PromptAction",
"PromptBranchCriteria",
"ProxyLocation",
"RetryRunWebhookRequest",
"RunEngine",
"RunSdkActionRequestAction",
"RunSdkActionRequestAction_AiAct",
@ -1399,6 +1404,7 @@ __all__ = [
"TotpType",
"UploadFileAction",
"UploadFileActionData",
"UploadFileResponse",
"UploadToS3Block",
"UploadToS3BlockYaml",
"UrlBlock",

View file

@ -3,9 +3,7 @@
import typing
import pydantic
import typing_extensions
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
from ..core.serialization import FieldMetadata
class PromptAction(UniversalBaseModel):
@ -18,9 +16,7 @@ class PromptAction(UniversalBaseModel):
The prompt to send to the LLM
"""
schema_: typing_extensions.Annotated[
typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], FieldMetadata(alias="schema")
] = pydantic.Field(default=None)
response_schema: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None)
"""
Optional JSON schema to structure the response
"""

View file

@ -0,0 +1,22 @@
# This file was auto-generated by Fern from our API Definition.
import typing
import pydantic
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
class RetryRunWebhookRequest(UniversalBaseModel):
webhook_url: typing.Optional[str] = pydantic.Field(default=None)
"""
Optional webhook URL to send the payload to instead of the stored configuration
"""
if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
else:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow

View file

@ -5,9 +5,7 @@ from __future__ import annotations
import typing
import pydantic
import typing_extensions
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
from ..core.serialization import FieldMetadata
from .act_action_data import ActActionData
from .click_action_data import ClickActionData
from .extract_action_data import ExtractActionData
@ -172,9 +170,7 @@ class RunSdkActionRequestAction_Prompt(UniversalBaseModel):
type: typing.Literal["prompt"] = "prompt"
prompt: str
schema_: typing_extensions.Annotated[
typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]], FieldMetadata(alias="schema")
] = None
response_schema: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None
model: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None
if IS_PYDANTIC_V2:

View file

@ -0,0 +1,29 @@
# This file was auto-generated by Fern from our API Definition.
import typing
import pydantic
import typing_extensions
from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel
from ..core.serialization import FieldMetadata
class UploadFileResponse(UniversalBaseModel):
s3uri: typing_extensions.Annotated[str, FieldMetadata(alias="s3_uri")] = pydantic.Field()
"""
S3 URI where the file was uploaded
"""
presigned_url: str = pydantic.Field()
"""
Presigned URL to access the uploaded file
"""
if IS_PYDANTIC_V2:
model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
else:
class Config:
frozen = True
smart_union = True
extra = pydantic.Extra.allow

View file

@ -1,4 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
# isort: skip_file

View file

@ -1,127 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import typing
from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
from ..core.request_options import RequestOptions
from .raw_client import AsyncRawWorkflowsClient, RawWorkflowsClient
class WorkflowsClient:
def __init__(self, *, client_wrapper: SyncClientWrapper):
self._raw_client = RawWorkflowsClient(client_wrapper=client_wrapper)
@property
def with_raw_response(self) -> RawWorkflowsClient:
"""
Retrieves a raw implementation of this client that returns raw responses.
Returns
-------
RawWorkflowsClient
"""
return self._raw_client
def set_workflow_template_status(
self, workflow_permanent_id: str, *, is_template: bool, request_options: typing.Optional[RequestOptions] = None
) -> typing.Dict[str, typing.Optional[typing.Any]]:
"""
Set or unset a workflow as a template.
Template status is stored at the workflow_permanent_id level (not per-version),
meaning all versions of a workflow share the same template status.
Parameters
----------
workflow_permanent_id : str
is_template : bool
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
typing.Dict[str, typing.Optional[typing.Any]]
Successful Response
Examples
--------
from skyvern import Skyvern
client = Skyvern(
api_key="YOUR_API_KEY",
)
client.workflows.set_workflow_template_status(
workflow_permanent_id="workflow_permanent_id",
is_template=True,
)
"""
_response = self._raw_client.set_workflow_template_status(
workflow_permanent_id, is_template=is_template, request_options=request_options
)
return _response.data
class AsyncWorkflowsClient:
def __init__(self, *, client_wrapper: AsyncClientWrapper):
self._raw_client = AsyncRawWorkflowsClient(client_wrapper=client_wrapper)
@property
def with_raw_response(self) -> AsyncRawWorkflowsClient:
"""
Retrieves a raw implementation of this client that returns raw responses.
Returns
-------
AsyncRawWorkflowsClient
"""
return self._raw_client
async def set_workflow_template_status(
self, workflow_permanent_id: str, *, is_template: bool, request_options: typing.Optional[RequestOptions] = None
) -> typing.Dict[str, typing.Optional[typing.Any]]:
"""
Set or unset a workflow as a template.
Template status is stored at the workflow_permanent_id level (not per-version),
meaning all versions of a workflow share the same template status.
Parameters
----------
workflow_permanent_id : str
is_template : bool
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
typing.Dict[str, typing.Optional[typing.Any]]
Successful Response
Examples
--------
import asyncio
from skyvern import AsyncSkyvern
client = AsyncSkyvern(
api_key="YOUR_API_KEY",
)
async def main() -> None:
await client.workflows.set_workflow_template_status(
workflow_permanent_id="workflow_permanent_id",
is_template=True,
)
asyncio.run(main())
"""
_response = await self._raw_client.set_workflow_template_status(
workflow_permanent_id, is_template=is_template, request_options=request_options
)
return _response.data

View file

@ -1,136 +0,0 @@
# This file was auto-generated by Fern from our API Definition.
import typing
from json.decoder import JSONDecodeError
from ..core.api_error import ApiError
from ..core.client_wrapper import AsyncClientWrapper, SyncClientWrapper
from ..core.http_response import AsyncHttpResponse, HttpResponse
from ..core.jsonable_encoder import jsonable_encoder
from ..core.pydantic_utilities import parse_obj_as
from ..core.request_options import RequestOptions
from ..errors.unprocessable_entity_error import UnprocessableEntityError
class RawWorkflowsClient:
def __init__(self, *, client_wrapper: SyncClientWrapper):
self._client_wrapper = client_wrapper
def set_workflow_template_status(
self, workflow_permanent_id: str, *, is_template: bool, request_options: typing.Optional[RequestOptions] = None
) -> HttpResponse[typing.Dict[str, typing.Optional[typing.Any]]]:
"""
Set or unset a workflow as a template.
Template status is stored at the workflow_permanent_id level (not per-version),
meaning all versions of a workflow share the same template status.
Parameters
----------
workflow_permanent_id : str
is_template : bool
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
HttpResponse[typing.Dict[str, typing.Optional[typing.Any]]]
Successful Response
"""
_response = self._client_wrapper.httpx_client.request(
f"v1/workflows/{jsonable_encoder(workflow_permanent_id)}/template",
method="PUT",
params={
"is_template": is_template,
},
request_options=request_options,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
typing.Dict[str, typing.Optional[typing.Any]],
parse_obj_as(
type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore
object_=_response.json(),
),
)
return HttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)
class AsyncRawWorkflowsClient:
def __init__(self, *, client_wrapper: AsyncClientWrapper):
self._client_wrapper = client_wrapper
async def set_workflow_template_status(
self, workflow_permanent_id: str, *, is_template: bool, request_options: typing.Optional[RequestOptions] = None
) -> AsyncHttpResponse[typing.Dict[str, typing.Optional[typing.Any]]]:
"""
Set or unset a workflow as a template.
Template status is stored at the workflow_permanent_id level (not per-version),
meaning all versions of a workflow share the same template status.
Parameters
----------
workflow_permanent_id : str
is_template : bool
request_options : typing.Optional[RequestOptions]
Request-specific configuration.
Returns
-------
AsyncHttpResponse[typing.Dict[str, typing.Optional[typing.Any]]]
Successful Response
"""
_response = await self._client_wrapper.httpx_client.request(
f"v1/workflows/{jsonable_encoder(workflow_permanent_id)}/template",
method="PUT",
params={
"is_template": is_template,
},
request_options=request_options,
)
try:
if 200 <= _response.status_code < 300:
_data = typing.cast(
typing.Dict[str, typing.Optional[typing.Any]],
parse_obj_as(
type_=typing.Dict[str, typing.Optional[typing.Any]], # type: ignore
object_=_response.json(),
),
)
return AsyncHttpResponse(response=_response, data=_data)
if _response.status_code == 422:
raise UnprocessableEntityError(
headers=dict(_response.headers),
body=typing.cast(
typing.Optional[typing.Any],
parse_obj_as(
type_=typing.Optional[typing.Any], # type: ignore
object_=_response.json(),
),
),
)
_response_json = _response.json()
except JSONDecodeError:
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text)
raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json)

2
uv.lock generated
View file

@ -5082,7 +5082,7 @@ wheels = [
[[package]]
name = "skyvern"
version = "1.0.3"
version = "1.0.6"
source = { editable = "." }
dependencies = [
{ name = "aioboto3" },