From 37b5751095424859add25470ff38cc09eb2bf32d Mon Sep 17 00:00:00 2001 From: Vlad Ryzhkov Date: Mon, 30 Jun 2025 14:53:43 -0600 Subject: [PATCH] feat(inference): Add BagelNet inference provider support - Add BagelNet conversational task provider - Extends BaseConversationalTask with OpenAI-compatible API - Base URL: https://api.bagel.net - Supports chat completions with standard OpenAI format - Update provider documentation and type definitions --- packages/inference/README.md | 2 ++ .../inference/src/lib/getProviderHelper.ts | 4 +++ packages/inference/src/providers/bagelnet.ts | 25 +++++++++++++++++++ packages/inference/src/providers/consts.ts | 1 + packages/inference/src/types.ts | 1 + 5 files changed, 33 insertions(+) create mode 100644 packages/inference/src/providers/bagelnet.ts diff --git a/packages/inference/README.md b/packages/inference/README.md index 0ea60b2be7..bd9c358451 100644 --- a/packages/inference/README.md +++ b/packages/inference/README.md @@ -47,6 +47,7 @@ Your access token should be kept private. If you need to protect it in front-end You can send inference requests to third-party providers with the inference client. Currently, we support the following providers: +- [BagelNet](https://bagel.net) - [Fal.ai](https://fal.ai) - [Featherless AI](https://featherless.ai) - [Fireworks AI](https://fireworks.ai) @@ -82,6 +83,7 @@ When authenticated with a Hugging Face access token, the request is routed throu When authenticated with a third-party provider key, the request is made directly against that provider's inference API. Only a subset of models are supported when requesting third-party providers. You can check the list of supported models per pipeline tasks here: +- [BagelNet supported models](https://huggingface.co/api/partners/bagelnet/models) - [Fal.ai supported models](https://huggingface.co/api/partners/fal-ai/models) - [Featherless AI supported models](https://huggingface.co/api/partners/featherless-ai/models) - [Fireworks AI supported models](https://huggingface.co/api/partners/fireworks-ai/models) diff --git a/packages/inference/src/lib/getProviderHelper.ts b/packages/inference/src/lib/getProviderHelper.ts index d7cc87fb59..8f17af33e5 100644 --- a/packages/inference/src/lib/getProviderHelper.ts +++ b/packages/inference/src/lib/getProviderHelper.ts @@ -1,3 +1,4 @@ +import * as BagelNet from "../providers/bagelnet.js"; import * as BlackForestLabs from "../providers/black-forest-labs.js"; import * as Cerebras from "../providers/cerebras.js"; import * as Cohere from "../providers/cohere.js"; @@ -51,6 +52,9 @@ import type { InferenceProvider, InferenceProviderOrPolicy, InferenceTask } from import { InferenceClientInputError } from "../errors.js"; export const PROVIDERS: Record>> = { + bagelnet: { + conversational: new BagelNet.BagelNetConversational(), + }, "black-forest-labs": { "text-to-image": new BlackForestLabs.BlackForestLabsTextToImageTask(), }, diff --git a/packages/inference/src/providers/bagelnet.ts b/packages/inference/src/providers/bagelnet.ts new file mode 100644 index 0000000000..b54dd74983 --- /dev/null +++ b/packages/inference/src/providers/bagelnet.ts @@ -0,0 +1,25 @@ +import { BaseConversationalTask } from "./providerHelper.js"; + +export class BagelNetConversational extends BaseConversationalTask { + constructor() { + super("bagelnet", "https://api.bagel.net", false); + } + + override makeRoute(): string { + return "/v1/chat/completions"; + } + + override preparePayload(params: any) { + return { + model: params.model, + messages: params.messages, + max_tokens: params.max_tokens, + temperature: params.temperature, + stream: params.stream ?? false, + }; + } + + override getResponse(r: any) { + return r; + } +} \ No newline at end of file diff --git a/packages/inference/src/providers/consts.ts b/packages/inference/src/providers/consts.ts index 995161970a..1fb7ebebd8 100644 --- a/packages/inference/src/providers/consts.ts +++ b/packages/inference/src/providers/consts.ts @@ -18,6 +18,7 @@ export const HARDCODED_MODEL_INFERENCE_MAPPING: Record< * Example: * "Qwen/Qwen2.5-Coder-32B-Instruct": "Qwen2.5-Coder-32B-Instruct", */ + bagelnet: {}, "black-forest-labs": {}, cerebras: {}, cohere: {}, diff --git a/packages/inference/src/types.ts b/packages/inference/src/types.ts index f48e9a011c..5f347d198f 100644 --- a/packages/inference/src/types.ts +++ b/packages/inference/src/types.ts @@ -37,6 +37,7 @@ export interface Options { export type InferenceTask = Exclude | "conversational"; export const INFERENCE_PROVIDERS = [ + "bagelnet", "black-forest-labs", "cerebras", "cohere",