diff --git a/src/GEN_VERSION b/src/GEN_VERSION index c6a93cf..766f9e7 100644 --- a/src/GEN_VERSION +++ b/src/GEN_VERSION @@ -1 +1 @@ -20240617.20240727 \ No newline at end of file +20240617.20240806 \ No newline at end of file diff --git a/src/Nodes.ts b/src/Nodes.ts index 9f1dd39..37b6902 100644 --- a/src/Nodes.ts +++ b/src/Nodes.ts @@ -1,7 +1,7 @@ /** * 𐃏 Substrate * @generated file - * 20240617.20240727 + * 20240617.20240806 */ import * as OpenAPI from "substrate/OpenAPI"; @@ -294,6 +294,42 @@ export class StableDiffusionXLControlNetOutOutputs extends FutureArray { return super._result() as Promise; } } +/** Generated frames. */ +export class StableVideoDiffusionOutFrameUris extends FutureArray { + /** Returns `FutureString` at given index. */ + override at(index: number) { + return new FutureString(this._directive.next(index)); + } + /** Returns the result for `StableVideoDiffusionOutFrameUris` once it's node has been run. */ + protected override async _result(): Promise { + return super._result() as Promise; + } +} +export class StableVideoDiffusionOutFrameUrisItem extends FutureString {} +/** Frames. */ +export class InterpolateFramesInFrameUris extends FutureArray { + /** Returns `FutureString` at given index. */ + override at(index: number) { + return new FutureString(this._directive.next(index)); + } + /** Returns the result for `InterpolateFramesInFrameUris` once it's node has been run. */ + protected override async _result(): Promise { + return super._result() as Promise; + } +} +export class InterpolateFramesInFrameUrisItem extends FutureString {} +/** Output frames. */ +export class InterpolateFramesOutFrameUris extends FutureArray { + /** Returns `FutureString` at given index. */ + override at(index: number) { + return new FutureString(this._directive.next(index)); + } + /** Returns the result for `InterpolateFramesOutFrameUris` once it's node has been run. */ + protected override async _result(): Promise { + return super._result() as Promise; + } +} +export class InterpolateFramesOutFrameUrisItem extends FutureString {} /** Generated images. */ export class MultiInpaintImageOutOutputs extends FutureArray { /** Returns `InpaintImageOut` at given index. */ @@ -1688,7 +1724,7 @@ export class StableVideoDiffusionIn extends FutureObject { get seed() { return new FutureNumber(this._directive.next("seed")); } - /** (Optional) Frames per second of the generated video. */ + /** (Optional) Frames per second of the generated video. Ignored if output format is `frames`. */ get fps() { return new FutureNumber(this._directive.next("fps")); } @@ -1711,11 +1747,63 @@ export class StableVideoDiffusionOut extends FutureObject { get video_uri() { return new FutureString(this._directive.next("video_uri")); } + + /** Generated frames. */ + get frame_uris() { + return new StableVideoDiffusionOutFrameUris( + this._directive.next("frame_uris"), + ); + } /** returns the result for `StableVideoDiffusionOut` once it's node has been run. */ protected override async _result(): Promise { return super._result() as Promise; } } +/** InterpolateFramesIn */ +export class InterpolateFramesIn extends FutureObject { + /** Frames. */ + get frame_uris() { + return new InterpolateFramesInFrameUris(this._directive.next("frame_uris")); + } + /** (Optional) Use "hosted" to return a video URL hosted on Substrate. You can also provide a URL to a registered [file store](https://docs.substrate.run/reference/external-files). If unset, the video data will be returned as a base64-encoded string. */ + get store() { + return new FutureString(this._directive.next("store")); + } + /** (Optional) Output video format. */ + get output_format() { + return new FutureString(this._directive.next("output_format")); + } + /** (Optional) Frames per second of the generated video. Ignored if output format is `frames`. */ + get fps() { + return new FutureNumber(this._directive.next("fps")); + } + /** (Optional) Number of interpolation steps. Each step adds an interpolated frame between adjacent frames. For example, 2 steps over 2 frames produces 5 frames. */ + get num_steps() { + return new FutureNumber(this._directive.next("num_steps")); + } + /** returns the result for `InterpolateFramesIn` once it's node has been run. */ + protected override async _result(): Promise { + return super._result() as Promise; + } +} +/** InterpolateFramesOut */ +export class InterpolateFramesOut extends FutureObject { + /** Generated video. */ + get video_uri() { + return new FutureString(this._directive.next("video_uri")); + } + + /** Output frames. */ + get frame_uris() { + return new InterpolateFramesOutFrameUris( + this._directive.next("frame_uris"), + ); + } + /** returns the result for `InterpolateFramesOut` once it's node has been run. */ + protected override async _result(): Promise { + return super._result() as Promise; + } +} /** InpaintImageIn */ export class InpaintImageIn extends FutureObject { /** Original image. */ @@ -4408,7 +4496,7 @@ export class StableVideoDiffusion extends Node { /** * Input arguments: `image_uri`, `store` (optional), `output_format` (optional), `seed` (optional), `fps` (optional), `motion_bucket_id` (optional), `noise` (optional) * - * Output fields: `video_uri` + * Output fields: `video_uri` (optional), `frame_uris` (optional) * * https://www.substrate.run/nodes#StableVideoDiffusion */ @@ -4425,7 +4513,7 @@ export class StableVideoDiffusion extends Node { /** * Retrieve this node's output from a response. * - * Output fields: `video_uri` + * Output fields: `video_uri` (optional), `frame_uris` (optional) * * https://www.substrate.run/nodes#StableVideoDiffusion */ @@ -4442,7 +4530,7 @@ export class StableVideoDiffusion extends Node { /** * Future reference to this node's output. * - * Output fields: `video_uri` + * Output fields: `video_uri` (optional), `frame_uris` (optional) * * https://www.substrate.run/nodes#StableVideoDiffusion */ @@ -4454,6 +4542,75 @@ export class StableVideoDiffusion extends Node { return super.output() as OpenAPI.components["schemas"]["StableVideoDiffusionOut"]; } } +export namespace InterpolateFrames { + /** + * InterpolateFrames Input + * https://www.substrate.run/nodes#InterpolateFrames + */ + export type Input = FutureExpandAny< + OpenAPI.components["schemas"]["InterpolateFramesIn"] + >; + + /** + * InterpolateFrames Output + * https://www.substrate.run/nodes#InterpolateFrames + */ + export type Output = OpenAPI.components["schemas"]["InterpolateFramesOut"]; +} + +/** + * Generates a interpolation frames between each adjacent frames. + * + * https://www.substrate.run/nodes#InterpolateFrames + */ +export class InterpolateFrames extends Node { + /** + * Input arguments: `frame_uris`, `store` (optional), `output_format` (optional), `fps` (optional), `num_steps` (optional) + * + * Output fields: `video_uri` (optional), `frame_uris` (optional) + * + * https://www.substrate.run/nodes#InterpolateFrames + */ + constructor( + args: FutureExpandAny, + options?: Options, + ) { + super(args, options); + this.node = "InterpolateFrames"; + } + + /** + * Retrieve this node's output from a response. + * + * Output fields: `video_uri` (optional), `frame_uris` (optional) + * + * https://www.substrate.run/nodes#InterpolateFrames + */ + protected override async result(): Promise< + OpenAPI.components["schemas"]["InterpolateFramesOut"] | undefined + > { + return Promise.resolve( + this._response ? this._response.get(this) : undefined, + ) as Promise< + OpenAPI.components["schemas"]["InterpolateFramesOut"] | undefined + >; + } + + /** + * Future reference to this node's output. + * + * Output fields: `video_uri` (optional), `frame_uris` (optional) + * + * https://www.substrate.run/nodes#InterpolateFrames + */ + override get future(): InterpolateFramesOut { + return new InterpolateFramesOut(new Trace([], this)); + } + + protected override output(): OpenAPI.components["schemas"]["InterpolateFramesOut"] { + return super.output() as OpenAPI.components["schemas"]["InterpolateFramesOut"]; + } +} export namespace TranscribeSpeech { /** * TranscribeSpeech Input @@ -5907,6 +6064,7 @@ export type AnyNode = | StableDiffusionXLInpaint | StableDiffusionXLControlNet | StableVideoDiffusion + | InterpolateFrames | TranscribeSpeech | GenerateSpeech | RemoveBackground @@ -5973,46 +6131,48 @@ export type NodeOutput = T extends Experimental ? OpenAPI.components["schemas"]["StableDiffusionXLControlNetOut"] : T extends StableVideoDiffusion ? OpenAPI.components["schemas"]["StableVideoDiffusionOut"] - : T extends TranscribeSpeech - ? OpenAPI.components["schemas"]["TranscribeSpeechOut"] - : T extends GenerateSpeech - ? OpenAPI.components["schemas"]["GenerateSpeechOut"] - : T extends RemoveBackground - ? OpenAPI.components["schemas"]["RemoveBackgroundOut"] - : T extends EraseImage - ? OpenAPI.components["schemas"]["EraseImageOut"] - : T extends UpscaleImage - ? OpenAPI.components["schemas"]["UpscaleImageOut"] - : T extends SegmentUnderPoint - ? OpenAPI.components["schemas"]["SegmentUnderPointOut"] - : T extends SegmentAnything - ? OpenAPI.components["schemas"]["SegmentAnythingOut"] - : T extends SplitDocument - ? OpenAPI.components["schemas"]["SplitDocumentOut"] - : T extends EmbedText - ? OpenAPI.components["schemas"]["EmbedTextOut"] - : T extends MultiEmbedText - ? OpenAPI.components["schemas"]["MultiEmbedTextOut"] - : T extends EmbedImage - ? OpenAPI.components["schemas"]["EmbedImageOut"] - : T extends MultiEmbedImage - ? OpenAPI.components["schemas"]["MultiEmbedImageOut"] - : T extends JinaV2 - ? OpenAPI.components["schemas"]["JinaV2Out"] - : T extends CLIP - ? OpenAPI.components["schemas"]["CLIPOut"] - : T extends FindOrCreateVectorStore - ? OpenAPI.components["schemas"]["FindOrCreateVectorStoreOut"] - : T extends ListVectorStores - ? OpenAPI.components["schemas"]["ListVectorStoresOut"] - : T extends DeleteVectorStore - ? OpenAPI.components["schemas"]["DeleteVectorStoreOut"] - : T extends QueryVectorStore - ? OpenAPI.components["schemas"]["QueryVectorStoreOut"] - : T extends FetchVectors - ? OpenAPI.components["schemas"]["FetchVectorsOut"] - : T extends UpdateVectors - ? OpenAPI.components["schemas"]["UpdateVectorsOut"] - : T extends DeleteVectors - ? OpenAPI.components["schemas"]["DeleteVectorsOut"] - : never; + : T extends InterpolateFrames + ? OpenAPI.components["schemas"]["InterpolateFramesOut"] + : T extends TranscribeSpeech + ? OpenAPI.components["schemas"]["TranscribeSpeechOut"] + : T extends GenerateSpeech + ? OpenAPI.components["schemas"]["GenerateSpeechOut"] + : T extends RemoveBackground + ? OpenAPI.components["schemas"]["RemoveBackgroundOut"] + : T extends EraseImage + ? OpenAPI.components["schemas"]["EraseImageOut"] + : T extends UpscaleImage + ? OpenAPI.components["schemas"]["UpscaleImageOut"] + : T extends SegmentUnderPoint + ? OpenAPI.components["schemas"]["SegmentUnderPointOut"] + : T extends SegmentAnything + ? OpenAPI.components["schemas"]["SegmentAnythingOut"] + : T extends SplitDocument + ? OpenAPI.components["schemas"]["SplitDocumentOut"] + : T extends EmbedText + ? OpenAPI.components["schemas"]["EmbedTextOut"] + : T extends MultiEmbedText + ? OpenAPI.components["schemas"]["MultiEmbedTextOut"] + : T extends EmbedImage + ? OpenAPI.components["schemas"]["EmbedImageOut"] + : T extends MultiEmbedImage + ? OpenAPI.components["schemas"]["MultiEmbedImageOut"] + : T extends JinaV2 + ? OpenAPI.components["schemas"]["JinaV2Out"] + : T extends CLIP + ? OpenAPI.components["schemas"]["CLIPOut"] + : T extends FindOrCreateVectorStore + ? OpenAPI.components["schemas"]["FindOrCreateVectorStoreOut"] + : T extends ListVectorStores + ? OpenAPI.components["schemas"]["ListVectorStoresOut"] + : T extends DeleteVectorStore + ? OpenAPI.components["schemas"]["DeleteVectorStoreOut"] + : T extends QueryVectorStore + ? OpenAPI.components["schemas"]["QueryVectorStoreOut"] + : T extends FetchVectors + ? OpenAPI.components["schemas"]["FetchVectorsOut"] + : T extends UpdateVectors + ? OpenAPI.components["schemas"]["UpdateVectorsOut"] + : T extends DeleteVectors + ? OpenAPI.components["schemas"]["DeleteVectorsOut"] + : never; diff --git a/src/OpenAPI.ts b/src/OpenAPI.ts index ab5d3a2..5d6c523 100644 --- a/src/OpenAPI.ts +++ b/src/OpenAPI.ts @@ -5,317 +5,907 @@ export interface paths { "/Experimental": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * Experimental * @description Experimental node. */ post: operations["Experimental"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/Box": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * Box * @description Combine multiple values into a single output. */ post: operations["Box"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/If": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * If * @description Return one of two options based on a condition. */ post: operations["If"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/RunPython": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * RunPython * @description Run code using a Python interpreter. */ post: operations["RunPython"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/ComputeText": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * ComputeText * @description Compute text using a language model. */ post: operations["ComputeText"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/MultiComputeText": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * MultiComputeText * @description Generate multiple text choices using a language model. */ post: operations["MultiComputeText"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/BatchComputeText": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * BatchComputeText * @description Compute text for multiple prompts in batch using a language model. */ post: operations["BatchComputeText"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/BatchComputeJSON": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * BatchComputeJSON * @description Compute JSON for multiple prompts in batch using a language model. */ post: operations["BatchComputeJSON"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/ComputeJSON": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * ComputeJSON * @description Compute JSON using a language model. */ post: operations["ComputeJSON"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/MultiComputeJSON": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * MultiComputeJSON * @description Compute multiple JSON choices using a language model. */ post: operations["MultiComputeJSON"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/Mistral7BInstruct": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * Mistral7BInstruct * @description Compute text using [Mistral 7B Instruct](https://mistral.ai/news/announcing-mistral-7b). */ post: operations["Mistral7BInstruct"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/Mixtral8x7BInstruct": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * Mixtral8x7BInstruct * @description Compute text using instruct-tuned [Mixtral 8x7B](https://mistral.ai/news/mixtral-of-experts/). */ post: operations["Mixtral8x7BInstruct"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/Llama3Instruct8B": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * Llama3Instruct8B * @description Compute text using instruct-tuned [Llama 3 8B](https://llama.meta.com/llama3/). */ post: operations["Llama3Instruct8B"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/Llama3Instruct70B": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * Llama3Instruct70B * @description Compute text using instruct-tuned [Llama 3 70B](https://llama.meta.com/llama3/). */ post: operations["Llama3Instruct70B"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/Firellava13B": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * Firellava13B * @description Compute text with image input using [FireLLaVA 13B](https://fireworks.ai/blog/firellava-the-first-commercially-permissive-oss-llava-model). */ post: operations["Firellava13B"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/GenerateImage": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * GenerateImage * @description Generate an image. */ post: operations["GenerateImage"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/MultiGenerateImage": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * MultiGenerateImage * @description Generate multiple images. */ post: operations["MultiGenerateImage"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/InpaintImage": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * InpaintImage * @description Edit an image using image generation inside part of the image or the full image. */ post: operations["InpaintImage"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/MultiInpaintImage": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * MultiInpaintImage * @description Edit multiple images using image generation. */ post: operations["MultiInpaintImage"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/StableDiffusionXLLightning": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * StableDiffusionXLLightning * @description Generate an image using [Stable Diffusion XL Lightning](https://arxiv.org/abs/2402.13929). */ post: operations["StableDiffusionXLLightning"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/StableDiffusionXLInpaint": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * StableDiffusionXLInpaint * @description Edit an image using [Stable Diffusion XL](https://arxiv.org/abs/2307.01952). Supports inpainting (edit part of the image with a mask) and image-to-image (edit the full image). */ post: operations["StableDiffusionXLInpaint"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/StableDiffusionXLControlNet": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * StableDiffusionXLControlNet * @description Generate an image with generation structured by an input image, using Stable Diffusion XL with [ControlNet](https://arxiv.org/abs/2302.05543). */ post: operations["StableDiffusionXLControlNet"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/StableVideoDiffusion": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * StableVideoDiffusion * @description Generates a video using a still image as conditioning frame. */ post: operations["StableVideoDiffusion"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/InterpolateFrames": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; + /** + * InterpolateFrames + * @description Generates a interpolation frames between each adjacent frames. + */ + post: operations["InterpolateFrames"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/TranscribeSpeech": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * TranscribeSpeech * @description Transcribe speech in an audio or video file. */ post: operations["TranscribeSpeech"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/GenerateSpeech": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * GenerateSpeech * @description Generate speech from text. */ post: operations["GenerateSpeech"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/RemoveBackground": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * RemoveBackground * @description Remove the background from an image and return the foreground segment as a cut-out or a mask. */ post: operations["RemoveBackground"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/EraseImage": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * EraseImage * @description Erase the masked part of an image, e.g. to remove an object by inpainting. */ post: operations["EraseImage"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/UpscaleImage": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * UpscaleImage * @description Upscale an image using image generation. */ post: operations["UpscaleImage"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/SegmentUnderPoint": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * SegmentUnderPoint * @description Segment an image under a point and return the segment. */ post: operations["SegmentUnderPoint"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/SegmentAnything": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * SegmentAnything * @description Segment an image using [SegmentAnything](https://github.com/facebookresearch/segment-anything). */ post: operations["SegmentAnything"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/SplitDocument": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * SplitDocument * @description Split document into text segments. */ post: operations["SplitDocument"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/EmbedText": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * EmbedText * @description Generate embedding for a text document. */ post: operations["EmbedText"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/MultiEmbedText": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * MultiEmbedText * @description Generate embeddings for multiple text documents. */ post: operations["MultiEmbedText"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/EmbedImage": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * EmbedImage * @description Generate embedding for an image. */ post: operations["EmbedImage"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/MultiEmbedImage": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * MultiEmbedImage * @description Generate embeddings for multiple images. */ post: operations["MultiEmbedImage"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/JinaV2": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * JinaV2 * @description Generate embeddings for multiple text documents using [Jina Embeddings 2](https://arxiv.org/abs/2310.19923). */ post: operations["JinaV2"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/CLIP": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * CLIP * @description Generate embeddings for text or images using [CLIP](https://openai.com/research/clip). */ post: operations["CLIP"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/FindOrCreateVectorStore": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * FindOrCreateVectorStore * @description Find a vector store matching the given collection name, or create a new vector store. */ post: operations["FindOrCreateVectorStore"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/ListVectorStores": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * ListVectorStores * @description List all vector stores. */ post: operations["ListVectorStores"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/DeleteVectorStore": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * DeleteVectorStore * @description Delete a vector store. */ post: operations["DeleteVectorStore"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/QueryVectorStore": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * QueryVectorStore * @description Query a vector store for similar vectors. */ post: operations["QueryVectorStore"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/FetchVectors": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * FetchVectors * @description Fetch vectors from a vector store. */ post: operations["FetchVectors"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/UpdateVectors": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * UpdateVectors * @description Update vectors in a vector store. */ post: operations["UpdateVectors"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; "/DeleteVectors": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + get?: never; + put?: never; /** * DeleteVectors * @description Delete vectors in a vector store. */ post: operations["DeleteVectors"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; }; } - export type webhooks = Record; - export interface components { schemas: { /** ErrorOut */ @@ -331,7 +921,7 @@ export interface components { * @description The HTTP status code for the error. * @default 500 */ - status_code?: number; + status_code: number; }; /** ExperimentalIn */ ExperimentalIn: { @@ -345,7 +935,7 @@ export interface components { * @description Timeout in seconds. * @default 60 */ - timeout?: number; + timeout: number; }; /** ExperimentalOut */ ExperimentalOut: { @@ -413,7 +1003,7 @@ export interface components { * @description Sampling temperature to use. Higher values make the output more random, lower values make the output more deterministic. * @default 0.4 */ - temperature?: number; + temperature: number; /** @description Maximum number of tokens to generate. */ max_tokens?: number; /** @@ -421,7 +1011,7 @@ export interface components { * @default Llama3Instruct8B * @enum {string} */ - model?: + model: | "Mistral7BInstruct" | "Mixtral8x7BInstruct" | "Llama3Instruct8B" @@ -450,7 +1040,7 @@ export interface components { * @description Sampling temperature to use. Higher values make the output more random, lower values make the output more deterministic. * @default 0.4 */ - temperature?: number; + temperature: number; /** @description Maximum number of tokens to generate. */ max_tokens?: number; /** @@ -458,7 +1048,7 @@ export interface components { * @default Llama3Instruct8B * @enum {string} */ - model?: "Mistral7BInstruct" | "Mixtral8x7BInstruct" | "Llama3Instruct8B"; + model: "Mistral7BInstruct" | "Mixtral8x7BInstruct" | "Llama3Instruct8B"; }; /** ComputeJSONOut */ ComputeJSONOut: { @@ -483,7 +1073,7 @@ export interface components { * @description Sampling temperature to use. Higher values make the output more random, lower values make the output more deterministic. * @default 0.4 */ - temperature?: number; + temperature: number; /** @description Maximum number of tokens to generate. */ max_tokens?: number; /** @@ -491,7 +1081,7 @@ export interface components { * @default Llama3Instruct8B * @enum {string} */ - model?: + model: | "Mistral7BInstruct" | "Mixtral8x7BInstruct" | "Llama3Instruct8B" @@ -514,7 +1104,7 @@ export interface components { * @description Sampling temperature to use. Higher values make the output more random, lower values make the output more deterministic. * @default 0.4 */ - temperature?: number; + temperature: number; /** @description Maximum number of tokens to generate. */ max_tokens?: number; /** @@ -522,7 +1112,7 @@ export interface components { * @default Llama3Instruct8B * @enum {string} */ - model?: "Mistral7BInstruct" | "Llama3Instruct8B"; + model: "Mistral7BInstruct" | "Llama3Instruct8B"; }; /** BatchComputeTextOut */ BatchComputeTextOut: { @@ -550,7 +1140,7 @@ export interface components { * @description Sampling temperature to use. Higher values make the output more random, lower values make the output more deterministic. * @default 0.4 */ - temperature?: number; + temperature: number; /** @description Maximum number of tokens to generate. */ max_tokens?: number; /** @@ -558,7 +1148,7 @@ export interface components { * @default Llama3Instruct8B * @enum {string} */ - model?: "Mistral7BInstruct" | "Mixtral8x7BInstruct" | "Llama3Instruct8B"; + model: "Mistral7BInstruct" | "Mixtral8x7BInstruct" | "Llama3Instruct8B"; }; /** MultiComputeJSONOut */ MultiComputeJSONOut: { @@ -585,7 +1175,7 @@ export interface components { * @description Sampling temperature to use. Higher values make the output more random, lower values make the output more deterministic. * @default 0.4 */ - temperature?: number; + temperature: number; /** @description Maximum number of tokens to generate. */ max_tokens?: number; /** @@ -593,7 +1183,7 @@ export interface components { * @default Llama3Instruct8B * @enum {string} */ - model?: "Mistral7BInstruct" | "Llama3Instruct8B"; + model: "Mistral7BInstruct" | "Llama3Instruct8B"; }; /** BatchComputeJSONOut */ BatchComputeJSONOut: { @@ -617,7 +1207,7 @@ export interface components { * @description Number of choices to generate. * @default 1 */ - num_choices?: number; + num_choices: number; /** @description JSON schema to guide response. */ json_schema?: { [key: string]: unknown; @@ -632,25 +1222,25 @@ export interface components { * @description Higher values decrease the likelihood of repeating previous tokens. * @default 0 */ - frequency_penalty?: number; + frequency_penalty: number; /** * Format: float * @description Higher values decrease the likelihood of repeated sequences. * @default 1 */ - repetition_penalty?: number; + repetition_penalty: number; /** * Format: float * @description Higher values increase the likelihood of new topics appearing. * @default 1.1 */ - presence_penalty?: number; + presence_penalty: number; /** * Format: float * @description Probability below which less likely tokens are filtered out. * @default 0.95 */ - top_p?: number; + top_p: number; /** @description Maximum number of tokens to generate. */ max_tokens?: number; }; @@ -685,7 +1275,7 @@ export interface components { * @description Number of choices to generate. * @default 1 */ - num_choices?: number; + num_choices: number; /** @description JSON schema to guide response. */ json_schema?: { [key: string]: unknown; @@ -700,25 +1290,25 @@ export interface components { * @description Higher values decrease the likelihood of repeating previous tokens. * @default 0 */ - frequency_penalty?: number; + frequency_penalty: number; /** * Format: float * @description Higher values decrease the likelihood of repeated sequences. * @default 1 */ - repetition_penalty?: number; + repetition_penalty: number; /** * Format: float * @description Higher values increase the likelihood of new topics appearing. * @default 1.1 */ - presence_penalty?: number; + presence_penalty: number; /** * Format: float * @description Probability below which less likely tokens are filtered out. * @default 0.95 */ - top_p?: number; + top_p: number; /** @description Maximum number of tokens to generate. */ max_tokens?: number; }; @@ -753,7 +1343,7 @@ export interface components { * @description Number of choices to generate. * @default 1 */ - num_choices?: number; + num_choices: number; /** * Format: float * @description Higher values make the output more random, lower values make the output more deterministic. @@ -764,25 +1354,25 @@ export interface components { * @description Higher values decrease the likelihood of repeating previous tokens. * @default 0 */ - frequency_penalty?: number; + frequency_penalty: number; /** * Format: float * @description Higher values decrease the likelihood of repeated sequences. * @default 1 */ - repetition_penalty?: number; + repetition_penalty: number; /** * Format: float * @description Higher values increase the likelihood of new topics appearing. * @default 1.1 */ - presence_penalty?: number; + presence_penalty: number; /** * Format: float * @description Probability below which less likely tokens are filtered out. * @default 0.95 */ - top_p?: number; + top_p: number; /** @description Maximum number of tokens to generate. */ max_tokens?: number; /** @description JSON schema to guide response. */ @@ -821,7 +1411,7 @@ export interface components { * @description Number of choices to generate. * @default 1 */ - num_choices?: number; + num_choices: number; /** * Format: float * @description Higher values make the output more random, lower values make the output more deterministic. @@ -832,25 +1422,25 @@ export interface components { * @description Higher values decrease the likelihood of repeating previous tokens. * @default 0 */ - frequency_penalty?: number; + frequency_penalty: number; /** * Format: float * @description Higher values decrease the likelihood of repeated sequences. * @default 1 */ - repetition_penalty?: number; + repetition_penalty: number; /** * Format: float * @description Higher values increase the likelihood of new topics appearing. * @default 1.1 */ - presence_penalty?: number; + presence_penalty: number; /** * Format: float * @description Probability below which less likely tokens are filtered out. * @default 0.95 */ - top_p?: number; + top_p: number; /** @description Maximum number of tokens to generate. */ max_tokens?: number; }; @@ -923,7 +1513,7 @@ export interface components { * @description Number of diffusion steps. * @default 30 */ - steps?: number; + steps: number; /** * @description Number of images to generate. * @default 1 @@ -935,12 +1525,12 @@ export interface components { * @description Height of output image, in pixels. * @default 1024 */ - height?: number; + height: number; /** * @description Width of output image, in pixels. * @default 1024 */ - width?: number; + width: number; /** @description Seeds for deterministic generation. Default is a random seed. */ seeds?: number[]; /** @@ -948,7 +1538,7 @@ export interface components { * @description Higher values adhere to the text prompt more strongly, typically at the expense of image quality. * @default 7 */ - guidance_scale?: number; + guidance_scale: number; }; /** StableDiffusionImage */ StableDiffusionImage: { @@ -977,19 +1567,19 @@ export interface components { * @description Number of images to generate. * @default 1 */ - num_images?: number; + num_images: number; /** @description Use "hosted" to return an image URL hosted on Substrate. You can also provide a URL to a registered [file store](https://docs.substrate.run/reference/external-files). If unset, the image data will be returned as a base64-encoded string. */ store?: string; /** * @description Height of output image, in pixels. * @default 1024 */ - height?: number; + height: number; /** * @description Width of output image, in pixels. * @default 1024 */ - width?: number; + width: number; /** @description Seeds for deterministic generation. Default is a random seed. */ seeds?: number[]; }; @@ -1019,7 +1609,7 @@ export interface components { * @description Controls the influence of the image prompt on the generated output. * @default 0.5 */ - ip_adapter_scale?: number; + ip_adapter_scale: number; /** @description Negative input prompt. */ negative_prompt?: string; /** @description Use "hosted" to return an image URL hosted on Substrate. You can also provide a URL to a registered [file store](https://docs.substrate.run/reference/external-files). If unset, the image data will be returned as a base64-encoded string. */ @@ -1028,12 +1618,12 @@ export interface components { * @description Width of output image, in pixels. * @default 1024 */ - width?: number; + width: number; /** * @description Height of output image, in pixels. * @default 1024 */ - height?: number; + height: number; /** @description Random noise seeds. Default is random seeds for each generation. */ seeds?: number[]; }; @@ -1067,7 +1657,7 @@ export interface components { * @description Resolution of the output image, in pixels. * @default 1024 */ - output_resolution?: number; + output_resolution: number; /** @description Negative input prompt. */ negative_prompt?: string; /** @description Use "hosted" to return an image URL hosted on Substrate. You can also provide a URL to a registered [file store](https://docs.substrate.run/reference/external-files). If unset, the image data will be returned as a base64-encoded string. */ @@ -1077,13 +1667,13 @@ export interface components { * @description Controls the influence of the input image on the generated output. * @default 0.5 */ - conditioning_scale?: number; + conditioning_scale: number; /** * Format: float * @description Controls how much to transform the input image. * @default 0.5 */ - strength?: number; + strength: number; /** @description Random noise seeds. Default is random seeds for each generation. */ seeds?: number[]; }; @@ -1108,30 +1698,62 @@ export interface components { * @default gif * @enum {string} */ - output_format?: "gif" | "mp4"; + output_format: "gif" | "webp" | "mp4" | "frames"; /** @description Seed for deterministic generation. Default is a random seed. */ seed?: number; /** - * @description Frames per second of the generated video. + * @description Frames per second of the generated video. Ignored if output format is `frames`. * @default 7 */ - fps?: number; + fps: number; /** * @description The motion bucket id to use for the generated video. This can be used to control the motion of the generated video. Increasing the motion bucket id increases the motion of the generated video. * @default 180 */ - motion_bucket_id?: number; + motion_bucket_id: number; /** * Format: float * @description The amount of noise added to the conditioning image. The higher the values the less the video resembles the conditioning image. Increasing this value also increases the motion of the generated video. * @default 0.1 */ - noise?: number; + noise: number; }; /** StableVideoDiffusionOut */ StableVideoDiffusionOut: { /** @description Generated video. */ - video_uri: string; + video_uri?: string; + /** @description Generated frames. */ + frame_uris?: string[]; + }; + /** InterpolateFramesIn */ + InterpolateFramesIn: { + /** @description Frames. */ + frame_uris: string[]; + /** @description Use "hosted" to return a video URL hosted on Substrate. You can also provide a URL to a registered [file store](https://docs.substrate.run/reference/external-files). If unset, the video data will be returned as a base64-encoded string. */ + store?: string; + /** + * @description Output video format. + * @default gif + * @enum {string} + */ + output_format: "gif" | "webp" | "mp4" | "frames"; + /** + * @description Frames per second of the generated video. Ignored if output format is `frames`. + * @default 7 + */ + fps: number; + /** + * @description Number of interpolation steps. Each step adds an interpolated frame between adjacent frames. For example, 2 steps over 2 frames produces 5 frames. + * @default 2 + */ + num_steps: number; + }; + /** InterpolateFramesOut */ + InterpolateFramesOut: { + /** @description Generated video. */ + video_uri?: string; + /** @description Output frames. */ + frame_uris?: string[]; }; /** InpaintImageIn */ InpaintImageIn: { @@ -1190,7 +1812,7 @@ export interface components { * @description Resolution of the output image, in pixels. * @default 1024 */ - output_resolution?: number; + output_resolution: number; /** @description Negative input prompt. */ negative_prompt?: string; /** @description Use "hosted" to return an image URL hosted on Substrate. You can also provide a URL to a registered [file store](https://docs.substrate.run/reference/external-files). If unset, the image data will be returned as a base64-encoded string. */ @@ -1200,7 +1822,7 @@ export interface components { * @description Controls the strength of the generation process. * @default 0.8 */ - strength?: number; + strength: number; /** @description Random noise seeds. Default is random seeds for each generation. */ seeds?: number[]; }; @@ -1280,12 +1902,12 @@ export interface components { * @description Return a mask image instead of the original content. * @default false */ - return_mask?: boolean; + return_mask: boolean; /** * @description Invert the mask image. Only takes effect if `return_mask` is true. * @default false */ - invert_mask?: boolean; + invert_mask: boolean; /** @description Hex value background color. Transparent if unset. */ background_color?: string; /** @description Use "hosted" to return an image URL hosted on Substrate. You can also provide a URL to a registered [file store](https://docs.substrate.run/reference/external-files). If unset, the image data will be returned as a base64-encoded string. */ @@ -1318,7 +1940,7 @@ export interface components { * @description Resolution of the output image, in pixels. * @default 1024 */ - output_resolution?: number; + output_resolution: number; /** @description Use "hosted" to return an image URL hosted on Substrate. You can also provide a URL to a registered [file store](https://docs.substrate.run/reference/external-files). If unset, the image data will be returned as a base64-encoded string. */ store?: string; }; @@ -1398,27 +2020,27 @@ export interface components { * @description Language of input audio in [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639_language_codes) format. * @default en */ - language?: string; + language: string; /** * @description Segment the text into sentences with approximate timestamps. * @default false */ - segment?: boolean; + segment: boolean; /** * @description Align transcription to produce more accurate sentence-level timestamps and word-level timestamps. An array of word segments will be included in each sentence segment. * @default false */ - align?: boolean; + align: boolean; /** * @description Identify speakers for each segment. Speaker IDs will be included in each segment. * @default false */ - diarize?: boolean; + diarize: boolean; /** * @description Suggest automatic chapter markers. * @default false */ - suggest_chapters?: boolean; + suggest_chapters: boolean; }; /** TranscribedWord */ TranscribedWord: { @@ -1552,7 +2174,7 @@ export interface components { * @description Language of input text. Supported languages: `en, de, fr, es, it, pt, pl, zh, ar, cs, ru, nl, tr, hu, ko`. * @default en */ - language?: string; + language: string; /** @description Use "hosted" to return an audio URL hosted on Substrate. You can also provide a URL to a registered [file store](https://docs.substrate.run/reference/external-files). If unset, the audio data will be returned as a base64-encoded string. */ store?: string; }; @@ -1591,7 +2213,7 @@ export interface components { * @default jina-v2 * @enum {string} */ - model?: "jina-v2" | "clip"; + model: "jina-v2" | "clip"; }; /** EmbedTextOut */ EmbedTextOut: { @@ -1640,7 +2262,7 @@ export interface components { * @default jina-v2 * @enum {string} */ - model?: "jina-v2" | "clip"; + model: "jina-v2" | "clip"; }; /** MultiEmbedTextOut */ MultiEmbedTextOut: { @@ -1701,7 +2323,7 @@ export interface components { * @default clip * @enum {string} */ - model?: "clip"; + model: "clip"; }; /** EmbedImageOut */ EmbedImageOut: { @@ -1753,7 +2375,7 @@ export interface components { * @default clip * @enum {string} */ - model?: "clip"; + model: "clip"; }; /** MultiEmbedImageOut */ MultiEmbedImageOut: { @@ -1977,27 +2599,27 @@ export interface components { * @description Number of results to return. * @default 10 */ - top_k?: number; + top_k: number; /** * @description The size of the dynamic candidate list for searching the index graph. * @default 40 */ - ef_search?: number; + ef_search: number; /** * @description The number of leaves in the index tree to search. * @default 40 */ - num_leaves_to_search?: number; + num_leaves_to_search: number; /** * @description Include the values of the vectors in the response. * @default false */ - include_values?: boolean; + include_values: boolean; /** * @description Include the metadata of the vectors in the response. * @default false */ - include_metadata?: boolean; + include_metadata: boolean; /** @description Filter metadata by key-value pairs. */ filters?: { [key: string]: unknown; @@ -2081,27 +2703,23 @@ export interface components { headers: never; pathItems: never; } - export type $defs = Record; - -export type external = Record; - export interface operations { - /** - * Experimental - * @description Experimental node. - */ Experimental: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "name": "some_name", - * "args": { - * "foo": "bar" - * } - * } - */ + /** @example { + * "name": "some_name", + * "args": { + * "foo": "bar" + * } + * } */ "application/json": { /** @description Identifier. */ name: string; @@ -2120,6 +2738,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Response. */ @@ -2131,27 +2752,27 @@ export interface operations { }; }; }; - /** - * Box - * @description Combine multiple values into a single output. - */ Box: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "value": { - * "a": "b", - * "c": { - * "d": [ - * 1, - * 2, - * 3 - * ] - * } - * } - * } - */ + /** @example { + * "value": { + * "a": "b", + * "c": { + * "d": [ + * 1, + * 2, + * 3 + * ] + * } + * } + * } */ "application/json": { /** @description Values to box. */ value: unknown; @@ -2161,6 +2782,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description The evaluated result. */ @@ -2170,20 +2794,20 @@ export interface operations { }; }; }; - /** - * If - * @description Return one of two options based on a condition. - */ If: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "condition": true, - * "value_if_true": "yes", - * "value_if_false": "no" - * } - */ + /** @example { + * "condition": true, + * "value_if_true": "yes", + * "value_if_false": "no" + * } */ "application/json": { /** @description Condition. */ condition: boolean; @@ -2197,6 +2821,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Result. Null if `value_if_false` is not provided and `condition` is false. */ @@ -2206,22 +2833,22 @@ export interface operations { }; }; }; - /** - * RunPython - * @description Run code using a Python interpreter. - */ RunPython: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "pkl_function": "g2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5SMCF9fbWFpbl9flIwHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5ROjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu", - * "kwargs": {}, - * "pip_install": [ - * "numpy" - * ] - * } - */ + /** @example { + * "pkl_function": "g2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5SMCF9fbWFpbl9flIwHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5ROjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu", + * "kwargs": {}, + * "pip_install": [ + * "numpy" + * ] + * } */ "application/json": { /** @description Pickled function. */ pkl_function?: string; @@ -2239,6 +2866,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Return value of your function. */ @@ -2254,20 +2884,20 @@ export interface operations { }; }; }; - /** - * ComputeText - * @description Compute text using a language model. - */ ComputeText: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompt": "Who is Don Quixote?", - * "temperature": 0.4, - * "max_tokens": 800 - * } - */ + /** @example { + * "prompt": "Who is Don Quixote?", + * "temperature": 0.4, + * "max_tokens": 800 + * } */ "application/json": { /** @description Input prompt. */ prompt: string; @@ -2302,6 +2932,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Text response. */ @@ -2311,20 +2944,20 @@ export interface operations { }; }; }; - /** - * MultiComputeText - * @description Generate multiple text choices using a language model. - */ MultiComputeText: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompt": "Who is Don Quixote?", - * "num_choices": 2, - * "max_tokens": 800 - * } - */ + /** @example { + * "prompt": "Who is Don Quixote?", + * "num_choices": 2, + * "max_tokens": 800 + * } */ "application/json": { /** @description Input prompt. */ prompt: string; @@ -2357,6 +2990,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Response choices. */ @@ -2369,22 +3005,22 @@ export interface operations { }; }; }; - /** - * BatchComputeText - * @description Compute text for multiple prompts in batch using a language model. - */ BatchComputeText: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompts": [ - * "Who is Don Quixote?", - * "Who is Sancho Panza?" - * ], - * "max_tokens": 800 - * } - */ + /** @example { + * "prompts": [ + * "Who is Don Quixote?", + * "Who is Sancho Panza?" + * ], + * "max_tokens": 800 + * } */ "application/json": { /** @description Batch input prompts. */ prompts: string[]; @@ -2408,6 +3044,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Batch outputs. */ @@ -2420,35 +3059,35 @@ export interface operations { }; }; }; - /** - * BatchComputeJSON - * @description Compute JSON for multiple prompts in batch using a language model. - */ BatchComputeJSON: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompts": [ - * "Who is Don Quixote?", - * "Who is Sancho Panza?" - * ], - * "max_tokens": 800, - * "json_schema": { - * "type": "object", - * "properties": { - * "name": { - * "type": "string", - * "description": "The name of the character." - * }, - * "bio": { - * "type": "string", - * "description": "Concise biography of the character." + /** @example { + * "prompts": [ + * "Who is Don Quixote?", + * "Who is Sancho Panza?" + * ], + * "max_tokens": 800, + * "json_schema": { + * "type": "object", + * "properties": { + * "name": { + * "type": "string", + * "description": "The name of the character." + * }, + * "bio": { + * "type": "string", + * "description": "Concise biography of the character." + * } + * } * } - * } - * } - * } - */ + * } */ "application/json": { /** @description Batch input prompts. */ prompts: string[]; @@ -2476,6 +3115,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Batch outputs. */ @@ -2492,33 +3134,33 @@ export interface operations { }; }; }; - /** - * ComputeJSON - * @description Compute JSON using a language model. - */ ComputeJSON: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompt": "Who wrote Don Quixote?", - * "json_schema": { - * "type": "object", - * "properties": { - * "name": { - * "type": "string", - * "description": "The name of the author." + /** @example { + * "prompt": "Who wrote Don Quixote?", + * "json_schema": { + * "type": "object", + * "properties": { + * "name": { + * "type": "string", + * "description": "The name of the author." + * }, + * "bio": { + * "type": "string", + * "description": "Concise biography of the author." + * } + * } * }, - * "bio": { - * "type": "string", - * "description": "Concise biography of the author." - * } - * } - * }, - * "temperature": 0.4, - * "max_tokens": 800 - * } - */ + * "temperature": 0.4, + * "max_tokens": 800 + * } */ "application/json": { /** @description Input prompt. */ prompt: string; @@ -2549,6 +3191,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description JSON response. */ @@ -2562,34 +3207,34 @@ export interface operations { }; }; }; - /** - * MultiComputeJSON - * @description Compute multiple JSON choices using a language model. - */ MultiComputeJSON: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompt": "Who wrote Don Quixote?", - * "json_schema": { - * "type": "object", - * "properties": { - * "name": { - * "type": "string", - * "description": "The name of the author." + /** @example { + * "prompt": "Who wrote Don Quixote?", + * "json_schema": { + * "type": "object", + * "properties": { + * "name": { + * "type": "string", + * "description": "The name of the author." + * }, + * "bio": { + * "type": "string", + * "description": "Concise biography of the author." + * } + * } * }, - * "bio": { - * "type": "string", - * "description": "Concise biography of the author." - * } - * } - * }, - * "num_choices": 2, - * "temperature": 0.4, - * "max_tokens": 800 - * } - */ + * "num_choices": 2, + * "temperature": 0.4, + * "max_tokens": 800 + * } */ "application/json": { /** @description Input prompt. */ prompt: string; @@ -2625,6 +3270,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Response choices. */ @@ -2641,21 +3289,21 @@ export interface operations { }; }; }; - /** - * Mistral7BInstruct - * @description Compute text using [Mistral 7B Instruct](https://mistral.ai/news/announcing-mistral-7b). - */ Mistral7BInstruct: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompt": "Who is Don Quixote?", - * "num_choices": 2, - * "temperature": 0.4, - * "max_tokens": 800 - * } - */ + /** @example { + * "prompt": "Who is Don Quixote?", + * "num_choices": 2, + * "temperature": 0.4, + * "max_tokens": 800 + * } */ "application/json": { /** @description Input prompt. */ prompt: string; @@ -2707,6 +3355,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Response choices. */ @@ -2723,21 +3374,21 @@ export interface operations { }; }; }; - /** - * Mixtral8x7BInstruct - * @description Compute text using instruct-tuned [Mixtral 8x7B](https://mistral.ai/news/mixtral-of-experts/). - */ Mixtral8x7BInstruct: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompt": "Who is Don Quixote?", - * "num_choices": 2, - * "temperature": 0.4, - * "max_tokens": 800 - * } - */ + /** @example { + * "prompt": "Who is Don Quixote?", + * "num_choices": 2, + * "temperature": 0.4, + * "max_tokens": 800 + * } */ "application/json": { /** @description Input prompt. */ prompt: string; @@ -2789,6 +3440,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Response choices. */ @@ -2805,21 +3459,21 @@ export interface operations { }; }; }; - /** - * Llama3Instruct8B - * @description Compute text using instruct-tuned [Llama 3 8B](https://llama.meta.com/llama3/). - */ Llama3Instruct8B: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompt": "Who is Don Quixote?", - * "num_choices": 2, - * "temperature": 0.4, - * "max_tokens": 800 - * } - */ + /** @example { + * "prompt": "Who is Don Quixote?", + * "num_choices": 2, + * "temperature": 0.4, + * "max_tokens": 800 + * } */ "application/json": { /** @description Input prompt. */ prompt: string; @@ -2871,6 +3525,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Response choices. */ @@ -2887,21 +3544,21 @@ export interface operations { }; }; }; - /** - * Llama3Instruct70B - * @description Compute text using instruct-tuned [Llama 3 70B](https://llama.meta.com/llama3/). - */ Llama3Instruct70B: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompt": "Who is Don Quixote?", - * "num_choices": 2, - * "temperature": 0.4, - * "max_tokens": 800 - * } - */ + /** @example { + * "prompt": "Who is Don Quixote?", + * "num_choices": 2, + * "temperature": 0.4, + * "max_tokens": 800 + * } */ "application/json": { /** @description Input prompt. */ prompt: string; @@ -2949,6 +3606,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Response choices. */ @@ -2961,22 +3621,22 @@ export interface operations { }; }; }; - /** - * Firellava13B - * @description Compute text with image input using [FireLLaVA 13B](https://fireworks.ai/blog/firellava-the-first-commercially-permissive-oss-llava-model). - */ Firellava13B: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompt": "what are these paintings of and who made them?", - * "image_uris": [ - * "https://media.substrate.run/docs-fuji-red.jpg", - * "https://media.substrate.run/docs-fuji-blue.jpg" - * ] - * } - */ + /** @example { + * "prompt": "what are these paintings of and who made them?", + * "image_uris": [ + * "https://media.substrate.run/docs-fuji-red.jpg", + * "https://media.substrate.run/docs-fuji-blue.jpg" + * ] + * } */ "application/json": { /** @description Text prompt. */ prompt: string; @@ -2990,6 +3650,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Text response. */ @@ -2999,19 +3662,19 @@ export interface operations { }; }; }; - /** - * GenerateImage - * @description Generate an image. - */ GenerateImage: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompt": "hokusai futuristic supercell spiral cloud with glowing core over turbulent ocean", - * "store": "hosted" - * } - */ + /** @example { + * "prompt": "hokusai futuristic supercell spiral cloud with glowing core over turbulent ocean", + * "store": "hosted" + * } */ "application/json": { /** @description Text prompt. */ prompt: string; @@ -3023,6 +3686,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Base 64-encoded JPEG image bytes, or a hosted image url if `store` is provided. */ @@ -3032,20 +3698,20 @@ export interface operations { }; }; }; - /** - * MultiGenerateImage - * @description Generate multiple images. - */ MultiGenerateImage: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompt": "hokusai futuristic supercell spiral cloud with glowing core over turbulent ocean", - * "num_images": 2, - * "store": "hosted" - * } - */ + /** @example { + * "prompt": "hokusai futuristic supercell spiral cloud with glowing core over turbulent ocean", + * "num_images": 2, + * "store": "hosted" + * } */ "application/json": { /** @description Text prompt. */ prompt: string; @@ -3062,6 +3728,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Generated images. */ @@ -3074,21 +3743,21 @@ export interface operations { }; }; }; - /** - * InpaintImage - * @description Edit an image using image generation inside part of the image or the full image. - */ InpaintImage: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "image_uri": "https://media.substrate.run/docs-klimt-park.jpg", - * "mask_image_uri": "https://media.substrate.run/spiral-logo.jpeg", - * "prompt": "large tropical colorful bright anime birds in a dark jungle full of vines, high resolution", - * "store": "hosted" - * } - */ + /** @example { + * "image_uri": "https://media.substrate.run/docs-klimt-park.jpg", + * "mask_image_uri": "https://media.substrate.run/spiral-logo.jpeg", + * "prompt": "large tropical colorful bright anime birds in a dark jungle full of vines, high resolution", + * "store": "hosted" + * } */ "application/json": { /** @description Original image. */ image_uri: string; @@ -3104,6 +3773,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Base 64-encoded JPEG image bytes, or a hosted image url if `store` is provided. */ @@ -3113,22 +3785,22 @@ export interface operations { }; }; }; - /** - * MultiInpaintImage - * @description Edit multiple images using image generation. - */ MultiInpaintImage: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "image_uri": "https://media.substrate.run/docs-klimt-park.jpg", - * "mask_image_uri": "https://media.substrate.run/spiral-logo.jpeg", - * "prompt": "large tropical colorful bright anime birds in a dark jungle full of vines, high resolution", - * "num_images": 2, - * "store": "hosted" - * } - */ + /** @example { + * "image_uri": "https://media.substrate.run/docs-klimt-park.jpg", + * "mask_image_uri": "https://media.substrate.run/spiral-logo.jpeg", + * "prompt": "large tropical colorful bright anime birds in a dark jungle full of vines, high resolution", + * "num_images": 2, + * "store": "hosted" + * } */ "application/json": { /** @description Original image. */ image_uri: string; @@ -3149,6 +3821,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Generated images. */ @@ -3161,25 +3836,25 @@ export interface operations { }; }; }; - /** - * StableDiffusionXLLightning - * @description Generate an image using [Stable Diffusion XL Lightning](https://arxiv.org/abs/2402.13929). - */ StableDiffusionXLLightning: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompt": "hokusai futuristic supercell spiral cloud with glowing core over turbulent ocean", - * "negative_prompt": "night, moon", - * "num_images": 2, - * "seeds": [ - * 330699, - * 136464 - * ], - * "store": "hosted" - * } - */ + /** @example { + * "prompt": "hokusai futuristic supercell spiral cloud with glowing core over turbulent ocean", + * "negative_prompt": "night, moon", + * "num_images": 2, + * "seeds": [ + * 330699, + * 136464 + * ], + * "store": "hosted" + * } */ "application/json": { /** @description Text prompt. */ prompt: string; @@ -3210,6 +3885,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Generated images. */ @@ -3224,28 +3902,28 @@ export interface operations { }; }; }; - /** - * StableDiffusionXLInpaint - * @description Edit an image using [Stable Diffusion XL](https://arxiv.org/abs/2307.01952). Supports inpainting (edit part of the image with a mask) and image-to-image (edit the full image). - */ StableDiffusionXLInpaint: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "image_uri": "https://media.substrate.run/docs-klimt-park.jpg", - * "mask_image_uri": "https://media.substrate.run/spiral-logo.jpeg", - * "prompt": "large tropical colorful bright birds in a jungle, high resolution oil painting", - * "negative_prompt": "dark, cartoon, anime", - * "strength": 0.8, - * "num_images": 2, - * "store": "hosted", - * "seeds": [ - * 1607280, - * 1720395 - * ] - * } - */ + /** @example { + * "image_uri": "https://media.substrate.run/docs-klimt-park.jpg", + * "mask_image_uri": "https://media.substrate.run/spiral-logo.jpeg", + * "prompt": "large tropical colorful bright birds in a jungle, high resolution oil painting", + * "negative_prompt": "dark, cartoon, anime", + * "strength": 0.8, + * "num_images": 2, + * "store": "hosted", + * "seeds": [ + * 1607280, + * 1720395 + * ] + * } */ "application/json": { /** @description Original image. */ image_uri: string; @@ -3281,6 +3959,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Generated images. */ @@ -3295,28 +3976,28 @@ export interface operations { }; }; }; - /** - * StableDiffusionXLControlNet - * @description Generate an image with generation structured by an input image, using Stable Diffusion XL with [ControlNet](https://arxiv.org/abs/2302.05543). - */ StableDiffusionXLControlNet: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "image_uri": "https://media.substrate.run/spiral-logo.jpeg", - * "prompt": "the futuristic solarpunk city of atlantis at sunset, cinematic bokeh HD", - * "control_method": "illusion", - * "conditioning_scale": 1, - * "strength": 1, - * "store": "hosted", - * "num_images": 2, - * "seeds": [ - * 1607226, - * 1720395 - * ] - * } - */ + /** @example { + * "image_uri": "https://media.substrate.run/spiral-logo.jpeg", + * "prompt": "the futuristic solarpunk city of atlantis at sunset, cinematic bokeh HD", + * "control_method": "illusion", + * "conditioning_scale": 1, + * "strength": 1, + * "store": "hosted", + * "num_images": 2, + * "seeds": [ + * 1607226, + * 1720395 + * ] + * } */ "application/json": { /** @description Input image. */ image_uri: string; @@ -3361,6 +4042,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Generated images. */ @@ -3375,19 +4059,19 @@ export interface operations { }; }; }; - /** - * StableVideoDiffusion - * @description Generates a video using a still image as conditioning frame. - */ StableVideoDiffusion: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "image_uri": "https://media.substrate.run/apple-forest.jpeg", - * "store": "hosted" - * } - */ + /** @example { + * "image_uri": "https://media.substrate.run/apple-forest.jpeg", + * "store": "hosted" + * } */ "application/json": { /** @description Original image. */ image_uri: string; @@ -3398,11 +4082,11 @@ export interface operations { * @default gif * @enum {string} */ - output_format?: "gif" | "mp4"; + output_format?: "gif" | "webp" | "mp4" | "frames"; /** @description Seed for deterministic generation. Default is a random seed. */ seed?: number; /** - * @description Frames per second of the generated video. + * @description Frames per second of the generated video. Ignored if output format is `frames`. * @default 7 */ fps?: number; @@ -3423,32 +4107,94 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Generated video. */ - video_uri: string; + video_uri?: string; + /** @description Generated frames. */ + frame_uris?: string[]; + }; + }; + }; + }; + }; + InterpolateFrames: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + requestBody?: { + content: { + /** @example { + * "frame_uris": [ + * "https://media.substrate.run/apple-forest2.jpeg", + * "https://media.substrate.run/apple-forest3.jpeg" + * ], + * "store": "hosted" + * } */ + "application/json": { + /** @description Frames. */ + frame_uris: string[]; + /** @description Use "hosted" to return a video URL hosted on Substrate. You can also provide a URL to a registered [file store](https://docs.substrate.run/reference/external-files). If unset, the video data will be returned as a base64-encoded string. */ + store?: string; + /** + * @description Output video format. + * @default gif + * @enum {string} + */ + output_format?: "gif" | "webp" | "mp4" | "frames"; + /** + * @description Frames per second of the generated video. Ignored if output format is `frames`. + * @default 7 + */ + fps?: number; + /** + * @description Number of interpolation steps. Each step adds an interpolated frame between adjacent frames. For example, 2 steps over 2 frames produces 5 frames. + * @default 2 + */ + num_steps?: number; + }; + }; + }; + responses: { + /** @description OK */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": { + /** @description Generated video. */ + video_uri?: string; + /** @description Output frames. */ + frame_uris?: string[]; }; }; }; }; }; - /** - * TranscribeSpeech - * @description Transcribe speech in an audio or video file. - */ TranscribeSpeech: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "audio_uri": "https://media.substrate.run/dfw-clip.m4a", - * "prompt": "David Foster Wallace interviewed about US culture, and Infinite Jest", - * "segment": true, - * "align": true, - * "diarize": true, - * "suggest_chapters": true - * } - */ + /** @example { + * "audio_uri": "https://media.substrate.run/dfw-clip.m4a", + * "prompt": "David Foster Wallace interviewed about US culture, and Infinite Jest", + * "segment": true, + * "align": true, + * "diarize": true, + * "suggest_chapters": true + * } */ "application/json": { /** @description Input audio. */ audio_uri: string; @@ -3485,6 +4231,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Transcribed text. */ @@ -3538,19 +4287,19 @@ export interface operations { }; }; }; - /** - * GenerateSpeech - * @description Generate speech from text. - */ GenerateSpeech: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "text": "Substrate: an underlying substance or layer.", - * "store": "hosted" - * } - */ + /** @example { + * "text": "Substrate: an underlying substance or layer.", + * "store": "hosted" + * } */ "application/json": { /** @description Input text. */ text: string; @@ -3562,6 +4311,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Base 64-encoded WAV audio bytes, or a hosted audio url if `store` is provided. */ @@ -3571,19 +4323,19 @@ export interface operations { }; }; }; - /** - * RemoveBackground - * @description Remove the background from an image and return the foreground segment as a cut-out or a mask. - */ RemoveBackground: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "image_uri": "https://media.substrate.run/apple-forest.jpeg", - * "store": "hosted" - * } - */ + /** @example { + * "image_uri": "https://media.substrate.run/apple-forest.jpeg", + * "store": "hosted" + * } */ "application/json": { /** @description Input image. */ image_uri: string; @@ -3607,6 +4359,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Base 64-encoded JPEG image bytes, or a hosted image url if `store` is provided. */ @@ -3616,20 +4371,20 @@ export interface operations { }; }; }; - /** - * EraseImage - * @description Erase the masked part of an image, e.g. to remove an object by inpainting. - */ EraseImage: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "image_uri": "https://media.substrate.run/apple-forest.jpeg", - * "mask_image_uri": "https://media.substrate.run/apple-forest-mask.jpeg", - * "store": "hosted" - * } - */ + /** @example { + * "image_uri": "https://media.substrate.run/apple-forest.jpeg", + * "mask_image_uri": "https://media.substrate.run/apple-forest-mask.jpeg", + * "store": "hosted" + * } */ "application/json": { /** @description Input image. */ image_uri: string; @@ -3643,6 +4398,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Base 64-encoded JPEG image bytes, or a hosted image url if `store` is provided. */ @@ -3652,20 +4410,20 @@ export interface operations { }; }; }; - /** - * UpscaleImage - * @description Upscale an image using image generation. - */ UpscaleImage: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "prompt": "high resolution detailed spiral shell", - * "image_uri": "https://media.substrate.run/docs-shell-emoji.jpg", - * "store": "hosted" - * } - */ + /** @example { + * "prompt": "high resolution detailed spiral shell", + * "image_uri": "https://media.substrate.run/docs-shell-emoji.jpg", + * "store": "hosted" + * } */ "application/json": { /** @description Prompt to guide model on the content of image to upscale. */ prompt?: string; @@ -3684,6 +4442,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Base 64-encoded JPEG image bytes, or a hosted image url if `store` is provided. */ @@ -3693,23 +4454,23 @@ export interface operations { }; }; }; - /** - * SegmentUnderPoint - * @description Segment an image under a point and return the segment. - */ SegmentUnderPoint: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "image_uri": "https://media.substrate.run/docs-vg-bedroom.jpg", - * "point": { - * "x": 189, - * "y": 537 - * }, - * "store": "hosted" - * } - */ + /** @example { + * "image_uri": "https://media.substrate.run/docs-vg-bedroom.jpg", + * "point": { + * "x": 189, + * "y": 537 + * }, + * "store": "hosted" + * } */ "application/json": { /** @description Input image. */ image_uri: string; @@ -3728,6 +4489,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Detected segments in 'mask image' format. Base 64-encoded JPEG image bytes, or a hosted image url if `store` is provided. */ @@ -3737,25 +4501,25 @@ export interface operations { }; }; }; - /** - * SegmentAnything - * @description Segment an image using [SegmentAnything](https://github.com/facebookresearch/segment-anything). - */ SegmentAnything: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "image_uri": "https://media.substrate.run/docs-vg-bedroom.jpg", - * "point_prompts": [ - * { - * "x": 189, - * "y": 537 - * } - * ], - * "store": "hosted" - * } - */ + /** @example { + * "image_uri": "https://media.substrate.run/docs-vg-bedroom.jpg", + * "point_prompts": [ + * { + * "x": 189, + * "y": 537 + * } + * ], + * "store": "hosted" + * } */ "application/json": { /** @description Input image. */ image_uri: string; @@ -3797,6 +4561,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Detected segments in 'mask image' format. Base 64-encoded JPEG image bytes, or a hosted image url if `store` is provided. */ @@ -3806,22 +4573,22 @@ export interface operations { }; }; }; - /** - * SplitDocument - * @description Split document into text segments. - */ SplitDocument: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "doc_id": "example_pdf", - * "uri": "https://arxiv.org/pdf/2405.07945", - * "metadata": { - * "title": "GRASS II: Simulations of Potential Granulation Noise Mitigation Methods" - * } - * } - */ + /** @example { + * "doc_id": "example_pdf", + * "uri": "https://arxiv.org/pdf/2405.07945", + * "metadata": { + * "title": "GRASS II: Simulations of Potential Granulation Noise Mitigation Methods" + * } + * } */ "application/json": { /** @description URI of the document. */ uri: string; @@ -3841,6 +4608,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Document chunks */ @@ -3859,26 +4629,26 @@ export interface operations { }; }; }; - /** - * EmbedText - * @description Generate embedding for a text document. - */ EmbedText: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "text": "Argon is the third most abundant gas in Earth's atmosphere, at 0.934% (9340 ppmv). It is more than twice as abundant as water vapor.", - * "model": "jina-v2", - * "collection_name": "smoke_tests", - * "metadata": { - * "group": "18" - * }, - * "embedded_metadata_keys": [ - * "group" - * ] - * } - */ + /** @example { + * "text": "Argon is the third most abundant gas in Earth's atmosphere, at 0.934% (9340 ppmv). It is more than twice as abundant as water vapor.", + * "model": "jina-v2", + * "collection_name": "smoke_tests", + * "metadata": { + * "group": "18" + * }, + * "embedded_metadata_keys": [ + * "group" + * ] + * } */ "application/json": { /** @description Text to embed. */ text: string; @@ -3904,6 +4674,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** Embedding */ @@ -3922,36 +4695,36 @@ export interface operations { }; }; }; - /** - * MultiEmbedText - * @description Generate embeddings for multiple text documents. - */ MultiEmbedText: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "model": "jina-v2", - * "items": [ - * { - * "text": "Osmium is the densest naturally occurring element. When experimentally measured using X-ray crystallography, it has a density of 22.59 g/cm3. Manufacturers use its alloys with platinum, iridium, and other platinum-group metals to make fountain pen nib tipping, electrical contacts, and in other applications that require extreme durability and hardness.", - * "metadata": { - * "group": "8" - * } - * }, - * { - * "text": "Despite its abundant presence in the universe and Solar System—ranking fifth in cosmic abundance following hydrogen, helium, oxygen, and carbon—neon is comparatively scarce on Earth.", - * "metadata": { - * "group": "18" - * } - * } - * ], - * "collection_name": "smoke_tests", - * "embedded_metadata_keys": [ - * "group" - * ] - * } - */ + /** @example { + * "model": "jina-v2", + * "items": [ + * { + * "text": "Osmium is the densest naturally occurring element. When experimentally measured using X-ray crystallography, it has a density of 22.59 g/cm3. Manufacturers use its alloys with platinum, iridium, and other platinum-group metals to make fountain pen nib tipping, electrical contacts, and in other applications that require extreme durability and hardness.", + * "metadata": { + * "group": "8" + * } + * }, + * { + * "text": "Despite its abundant presence in the universe and Solar System—ranking fifth in cosmic abundance following hydrogen, helium, oxygen, and carbon—neon is comparatively scarce on Earth.", + * "metadata": { + * "group": "18" + * } + * } + * ], + * "collection_name": "smoke_tests", + * "embedded_metadata_keys": [ + * "group" + * ] + * } */ "application/json": { /** @description Items to embed. */ items: { @@ -3980,6 +4753,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Generated embeddings. */ @@ -3998,19 +4774,19 @@ export interface operations { }; }; }; - /** - * EmbedImage - * @description Generate embedding for an image. - */ EmbedImage: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "image_uri": "https://media.substrate.run/docs-fuji-red.jpg", - * "collection_name": "smoke_tests" - * } - */ + /** @example { + * "image_uri": "https://media.substrate.run/docs-fuji-red.jpg", + * "collection_name": "smoke_tests" + * } */ "application/json": { /** @description Image to embed. */ image_uri: string; @@ -4030,6 +4806,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** Embedding */ @@ -4048,26 +4827,26 @@ export interface operations { }; }; }; - /** - * MultiEmbedImage - * @description Generate embeddings for multiple images. - */ MultiEmbedImage: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "items": [ - * { - * "image_uri": "https://media.substrate.run/docs-fuji-red.jpg" - * }, - * { - * "image_uri": "https://media.substrate.run/docs-fuji-blue.jpg" - * } - * ], - * "collection_name": "smoke_tests" - * } - */ + /** @example { + * "items": [ + * { + * "image_uri": "https://media.substrate.run/docs-fuji-red.jpg" + * }, + * { + * "image_uri": "https://media.substrate.run/docs-fuji-blue.jpg" + * } + * ], + * "collection_name": "smoke_tests" + * } */ "application/json": { /** @description Items to embed. */ items: { @@ -4090,6 +4869,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Generated embeddings. */ @@ -4108,35 +4890,35 @@ export interface operations { }; }; }; - /** - * JinaV2 - * @description Generate embeddings for multiple text documents using [Jina Embeddings 2](https://arxiv.org/abs/2310.19923). - */ JinaV2: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "items": [ - * { - * "text": "Hassium is a superheavy element; it has been produced in a laboratory only in very small quantities by fusing heavy nuclei with lighter ones. Natural occurrences of the element have been hypothesised but never found.", - * "metadata": { - * "group": "8" - * } - * }, - * { - * "text": "Xenon is also used to search for hypothetical weakly interacting massive particles and as a propellant for ion thrusters in spacecraft.", - * "metadata": { - * "group": "18" - * } - * } - * ], - * "collection_name": "smoke_tests", - * "embedded_metadata_keys": [ - * "group" - * ] - * } - */ + /** @example { + * "items": [ + * { + * "text": "Hassium is a superheavy element; it has been produced in a laboratory only in very small quantities by fusing heavy nuclei with lighter ones. Natural occurrences of the element have been hypothesised but never found.", + * "metadata": { + * "group": "8" + * } + * }, + * { + * "text": "Xenon is also used to search for hypothetical weakly interacting massive particles and as a propellant for ion thrusters in spacecraft.", + * "metadata": { + * "group": "18" + * } + * } + * ], + * "collection_name": "smoke_tests", + * "embedded_metadata_keys": [ + * "group" + * ] + * } */ "application/json": { /** @description Items to embed. */ items: { @@ -4159,6 +4941,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Generated embeddings. */ @@ -4177,26 +4962,26 @@ export interface operations { }; }; }; - /** - * CLIP - * @description Generate embeddings for text or images using [CLIP](https://openai.com/research/clip). - */ CLIP: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "items": [ - * { - * "image_uri": "https://media.substrate.run/docs-fuji-red.jpg" - * }, - * { - * "image_uri": "https://media.substrate.run/docs-fuji-blue.jpg" - * } - * ], - * "collection_name": "smoke_tests" - * } - */ + /** @example { + * "items": [ + * { + * "image_uri": "https://media.substrate.run/docs-fuji-red.jpg" + * }, + * { + * "image_uri": "https://media.substrate.run/docs-fuji-blue.jpg" + * } + * ], + * "collection_name": "smoke_tests" + * } */ "application/json": { /** @description Items to embed. */ items: { @@ -4221,6 +5006,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Generated embeddings. */ @@ -4239,19 +5027,19 @@ export interface operations { }; }; }; - /** - * FindOrCreateVectorStore - * @description Find a vector store matching the given collection name, or create a new vector store. - */ FindOrCreateVectorStore: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "collection_name": "smoke_tests", - * "model": "jina-v2" - * } - */ + /** @example { + * "collection_name": "smoke_tests", + * "model": "jina-v2" + * } */ "application/json": { /** @description Vector store name. */ collection_name: string; @@ -4266,6 +5054,9 @@ export interface operations { responses: { /** @description Vector store created. */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Vector store name. */ @@ -4282,11 +5073,13 @@ export interface operations { }; }; }; - /** - * ListVectorStores - * @description List all vector stores. - */ ListVectorStores: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { /** @example {} */ @@ -4296,6 +5089,9 @@ export interface operations { responses: { /** @description List of vector stores. */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description List of vector stores. */ @@ -4315,19 +5111,19 @@ export interface operations { }; }; }; - /** - * DeleteVectorStore - * @description Delete a vector store. - */ DeleteVectorStore: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "collection_name": "fake_store", - * "model": "jina-v2" - * } - */ + /** @example { + * "collection_name": "fake_store", + * "model": "jina-v2" + * } */ "application/json": { /** @description Vector store name. */ collection_name: string; @@ -4342,6 +5138,9 @@ export interface operations { responses: { /** @description OK */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Vector store name. */ @@ -4356,25 +5155,25 @@ export interface operations { }; }; }; - /** - * QueryVectorStore - * @description Query a vector store for similar vectors. - */ QueryVectorStore: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "collection_name": "smoke_tests", - * "model": "jina-v2", - * "query_strings": [ - * "gas", - * "metal" - * ], - * "top_k": 1, - * "include_metadata": true - * } - */ + /** @example { + * "collection_name": "smoke_tests", + * "model": "jina-v2", + * "query_strings": [ + * "gas", + * "metal" + * ], + * "top_k": 1, + * "include_metadata": true + * } */ "application/json": { /** @description Vector store to query against. */ collection_name: string; @@ -4426,6 +5225,9 @@ export interface operations { responses: { /** @description Query results. */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Query results. */ @@ -4456,22 +5258,22 @@ export interface operations { }; }; }; - /** - * FetchVectors - * @description Fetch vectors from a vector store. - */ FetchVectors: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "collection_name": "smoke_tests", - * "model": "jina-v2", - * "ids": [ - * "dd8f3774e05d42caa53cfbaa7389c08f" - * ] - * } - */ + /** @example { + * "collection_name": "smoke_tests", + * "model": "jina-v2", + * "ids": [ + * "dd8f3774e05d42caa53cfbaa7389c08f" + * ] + * } */ "application/json": { /** @description Vector store name. */ collection_name: string; @@ -4488,6 +5290,9 @@ export interface operations { responses: { /** @description Vector data. */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Retrieved vectors. */ @@ -4506,27 +5311,27 @@ export interface operations { }; }; }; - /** - * UpdateVectors - * @description Update vectors in a vector store. - */ UpdateVectors: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "collection_name": "smoke_tests", - * "model": "jina-v2", - * "vectors": [ - * { - * "id": "dd8f3774e05d42caa53cfbaa7389c08f", - * "metadata": { - * "appearance": "silvery, blue cast" - * } - * } - * ] - * } - */ + /** @example { + * "collection_name": "smoke_tests", + * "model": "jina-v2", + * "vectors": [ + * { + * "id": "dd8f3774e05d42caa53cfbaa7389c08f", + * "metadata": { + * "appearance": "silvery, blue cast" + * } + * } + * ] + * } */ "application/json": { /** @description Vector store name. */ collection_name: string; @@ -4552,6 +5357,9 @@ export interface operations { responses: { /** @description Count of updated vectors. */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Number of vectors modified. */ @@ -4561,23 +5369,23 @@ export interface operations { }; }; }; - /** - * DeleteVectors - * @description Delete vectors in a vector store. - */ DeleteVectors: { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; requestBody?: { content: { - /** - * @example { - * "collection_name": "smoke_tests", - * "model": "jina-v2", - * "ids": [ - * "ac32b9a133dd4e3689004f6e8f0fd6cd", - * "629df177c7644062a68bceeff223cefa" - * ] - * } - */ + /** @example { + * "collection_name": "smoke_tests", + * "model": "jina-v2", + * "ids": [ + * "ac32b9a133dd4e3689004f6e8f0fd6cd", + * "629df177c7644062a68bceeff223cefa" + * ] + * } */ "application/json": { /** @description Vector store name. */ collection_name: string; @@ -4594,6 +5402,9 @@ export interface operations { responses: { /** @description Count of deleted vectors. */ 200: { + headers: { + [name: string]: unknown; + }; content: { "application/json": { /** @description Number of vectors modified. */ diff --git a/src/index.ts b/src/index.ts index bf57663..8b6830e 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,7 +1,7 @@ /** * 𐃏 Substrate TypeScript SDK * @generated file - * 20240617.20240727 + * 20240617.20240806 */ export { SubstrateError } from "substrate/Error"; @@ -28,6 +28,7 @@ export { StableDiffusionXLInpaint, StableDiffusionXLControlNet, StableVideoDiffusion, + InterpolateFrames, TranscribeSpeech, GenerateSpeech, RemoveBackground, diff --git a/src/openapi.json b/src/openapi.json index f5e4eab..c294550 100644 --- a/src/openapi.json +++ b/src/openapi.json @@ -1370,7 +1370,7 @@ }, "output_format": { "type": "string", - "enum": ["gif", "mp4"], + "enum": ["gif", "webp", "mp4", "frames"], "description": "Output video format.", "default": "gif", "x-loggable": true @@ -1382,9 +1382,10 @@ }, "fps": { "type": "integer", - "description": "Frames per second of the generated video.", + "description": "Frames per second of the generated video. Ignored if output format is `frames`.", "default": 7, - "x-loggable": true + "x-loggable": true, + "minimum": 1 }, "motion_bucket_id": { "type": "integer", @@ -1409,9 +1410,73 @@ "video_uri": { "type": "string", "description": "Generated video." + }, + "frame_uris": { + "type": "array", + "description": "Generated frames.", + "items": { + "type": "string" + } + } + } + }, + "InterpolateFramesIn": { + "title": "InterpolateFramesIn", + "type": "object", + "properties": { + "frame_uris": { + "type": "array", + "description": "Frames.", + "items": { + "type": "string" + }, + "minItems": 2 + }, + "store": { + "type": "string", + "description": "Use \"hosted\" to return a video URL hosted on Substrate. You can also provide a URL to a registered [file store](https://docs.substrate.run/reference/external-files). If unset, the video data will be returned as a base64-encoded string.", + "x-loggable": true + }, + "output_format": { + "type": "string", + "enum": ["gif", "webp", "mp4", "frames"], + "description": "Output video format.", + "default": "gif", + "x-loggable": true + }, + "fps": { + "type": "integer", + "description": "Frames per second of the generated video. Ignored if output format is `frames`.", + "default": 7, + "minimum": 1, + "x-loggable": true + }, + "num_steps": { + "type": "integer", + "description": "Number of interpolation steps. Each step adds an interpolated frame between adjacent frames. For example, 2 steps over 2 frames produces 5 frames.", + "default": 2, + "minimum": 1, + "x-loggable": true } }, - "required": ["video_uri"] + "required": ["frame_uris"] + }, + "InterpolateFramesOut": { + "title": "InterpolateFramesOut", + "type": "object", + "properties": { + "video_uri": { + "type": "string", + "description": "Generated video." + }, + "frame_uris": { + "type": "array", + "description": "Output frames.", + "items": { + "type": "string" + } + } + } }, "InpaintImageIn": { "title": "InpaintImageIn", @@ -3853,6 +3918,45 @@ } } }, + "/InterpolateFrames": { + "post": { + "summary": "InterpolateFrames", + "operationId": "InterpolateFrames", + "tags": ["category:image", "type:low-level"], + "description": "Generates a interpolation frames between each adjacent frames.", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/InterpolateFramesIn" + }, + "example": { + "frame_uris": [ + "https://media.substrate.run/apple-forest2.jpeg", + "https://media.substrate.run/apple-forest3.jpeg" + ], + "store": "hosted" + } + } + } + }, + "responses": { + "200": { + "description": "OK", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/InterpolateFramesOut" + }, + "example": { + "video_uri": "https://assets.substrate.run/84848484.mp4" + } + } + } + } + } + } + }, "/TranscribeSpeech": { "post": { "summary": "TranscribeSpeech",