Skip to content

feat(inference): Add Hyperfusion Provider #3202

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/huggingface_hub/inference/_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ class InferenceClient:
Note: for better compatibility with OpenAI's client, `model` has been aliased as `base_url`. Those 2
arguments are mutually exclusive. If a URL is passed as `model` or `base_url` for chat completion, the `(/v1)/chat/completions` suffix path will be appended to the URL.
provider (`str`, *optional*):
Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `"replicate"`, "sambanova"` or `"together"`.
Name of the provider to use for inference. Can be `"black-forest-labs"`, `"cerebras"`, `"cohere"`, `"fal-ai"`, `"featherless-ai"`, `"fireworks-ai"`, `"groq"`, `"hf-inference"`, `"hyperbolic"`, `"hyperfusion"`, `"nebius"`, `"novita"`, `"nscale"`, `"openai"`, `"replicate"`, "sambanova"` or `"together"`.
Defaults to "auto" i.e. the first of the providers available for the model, sorted by the user's order in https://hf.co/settings/inference-providers.
If model is a URL or `base_url` is passed, then `provider` is not used.
token (`str`, *optional*):
Expand Down
5 changes: 5 additions & 0 deletions src/huggingface_hub/inference/_providers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
HFInferenceTask,
)
from .hyperbolic import HyperbolicTextGenerationTask, HyperbolicTextToImageTask
from .hyperfusion import HyperfusionConversationalTask
from .nebius import (
NebiusConversationalTask,
NebiusFeatureExtractionTask,
Expand Down Expand Up @@ -53,6 +54,7 @@
"groq",
"hf-inference",
"hyperbolic",
"hyperfusion",
"nebius",
"novita",
"nscale",
Expand Down Expand Up @@ -124,6 +126,9 @@
"conversational": HyperbolicTextGenerationTask("conversational"),
"text-generation": HyperbolicTextGenerationTask("text-generation"),
},
"hyperfusion": {
"conversational": HyperfusionConversationalTask(),
},
"nebius": {
"text-to-image": NebiusTextToImageTask(),
"conversational": NebiusConversationalTask(),
Expand Down
27 changes: 27 additions & 0 deletions src/huggingface_hub/inference/_providers/hyperfusion.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
from typing import Optional

from huggingface_hub.hf_api import InferenceProviderMapping
from huggingface_hub.inference._providers._common import BaseConversationalTask


_PROVIDER = "hyperfusion"
_BASE_URL = "https://api.hyperfusion.io"


class HyperfusionConversationalTask(BaseConversationalTask):
def __init__(self):
super().__init__(provider=_PROVIDER, base_url=_BASE_URL)

def _prepare_api_key(self, api_key: Optional[str]) -> str:
if api_key is None:
raise ValueError(
"You must provide an api_key to work with Hyperfusion API."
)
return api_key

def _prepare_mapping_info(self, model: Optional[str]) -> InferenceProviderMapping:
if model is None:
raise ValueError("Please provide an Hyperfusion model ID, e.g. `llm-en`.")
return InferenceProviderMapping(
providerId=model, task="conversational", status="live", hf_model_id=model
)
5 changes: 5 additions & 0 deletions tests/test_inference_providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
HFInferenceTask,
)
from huggingface_hub.inference._providers.hyperbolic import HyperbolicTextGenerationTask, HyperbolicTextToImageTask
from huggingface_hub.inference._providers.hyperfusion import HyperfusionConversationalTask
from huggingface_hub.inference._providers.nebius import NebiusFeatureExtractionTask, NebiusTextToImageTask
from huggingface_hub.inference._providers.novita import NovitaConversationalTask, NovitaTextGenerationTask
from huggingface_hub.inference._providers.nscale import NscaleConversationalTask, NscaleTextToImageTask
Expand Down Expand Up @@ -910,6 +911,10 @@ def test_text_to_image_get_response(self):
response = helper.get_response({"images": [{"image": base64.b64encode(dummy_image).decode()}]})
assert response == dummy_image

class TestHyperfusionProvider:
def test_prepare_url(self):
helper = HyperfusionConversationalTask()
assert helper._prepare_url("sk-XXXXXX", "llm-en") == "https://api.hyperfusion.io/v1/chat/completions"

class TestNebiusProvider:
def test_prepare_route_text_to_image(self):
Expand Down