Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ Install `marvin`:
uv pip install marvin
```

Configure your LLM provider (Marvin uses OpenAI by default but natively supports [all Pydantic AI models](https://ai.pydantic.dev/models/)):
Configure your LLM provider (Marvin uses OpenAI by default but also supports providers like [AI/ML API](https://aimlapi.com/) and other [Pydantic AI models](https://ai.pydantic.dev/models/)):

```bash
export OPENAI_API_KEY=your-api-key
Expand Down
1 change: 1 addition & 0 deletions docs/guides/configure-llms.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ Marvin supports any model provider that is compatible with Pydantic AI. Common p
- Anthropic
- Azure OpenAI
- Google
- [AI/ML API](https://aimlapi.com) - 300+ models with an OpenAI-compatible interface

Each provider may require its own API key and configuration. Refer to the provider's [documentation](https://ai.pydantic.dev/models/) for specific setup instructions.

Expand Down
8 changes: 7 additions & 1 deletion docs/installation.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ icon: download
## Requirements

- Python 3.10 or higher
- An API key from an LLM provider (OpenAI by default)
- An API key from an LLM provider (OpenAI by default, or [AI/ML API](https://aimlapi.com/))

## Install `marvin`

Expand Down Expand Up @@ -40,6 +40,12 @@ By default, Marvin uses OpenAI's models. Set your API key as an environment vari
export OPENAI_API_KEY="your-api-key"
```

To use AI/ML API, set the `AIML_API_KEY` variable instead:

```bash
export AIML_API_KEY="your-api-key"
```

To use another provider, see the docs on [configuring LLMs](/guides/configure-llms).

## Development Installation
Expand Down
30 changes: 30 additions & 0 deletions examples/provider_specific/aimlapi/run_agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import os
from pathlib import Path

from pydantic_ai.models.openai import OpenAIModel

import marvin
from marvin.providers.aimlapi import AIMLAPIProvider


def write_file(path: str, content: str) -> None:
"""Write content to a file."""
Path(path).write_text(content)


writer = marvin.Agent(
model=OpenAIModel(
"gpt-4o-mini",
provider=AIMLAPIProvider(api_key=os.getenv("AIML_API_KEY")),
),
name="AI/ML Writer",
instructions="Write concise, engaging content for developers",
tools=[write_file],
)

async def main():
result = await marvin.run("how to use pydantic? write haiku to docs.md", agents=[writer])
print(result)

if __name__ == "__main__":
asyncio.run(main())
3 changes: 3 additions & 0 deletions src/marvin/providers/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
from .aimlapi import AIMLAPIProvider

__all__ = ["AIMLAPIProvider"]
66 changes: 66 additions & 0 deletions src/marvin/providers/aimlapi.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
from __future__ import annotations

import os
from typing import overload

from httpx import AsyncClient as AsyncHTTPClient
from openai import AsyncOpenAI

from pydantic_ai.exceptions import UserError
from pydantic_ai.models import cached_async_http_client
from pydantic_ai.profiles import ModelProfile
from pydantic_ai.profiles.openai import openai_model_profile
from pydantic_ai.providers import Provider


class AIMLAPIProvider(Provider[AsyncOpenAI]):
"""Provider for the AI/ML API."""

@property
def name(self) -> str: # pragma: no cover - simple property
return "aimlapi"

@property
def base_url(self) -> str: # pragma: no cover - simple property
return "https://api.aimlapi.com/v1"

@property
def client(self) -> AsyncOpenAI:
return self._client

def model_profile(self, model_name: str) -> ModelProfile | None: # pragma: no cover - thin wrapper
return openai_model_profile(model_name)

@overload
def __init__(self) -> None: ...

@overload
def __init__(self, *, api_key: str) -> None: ...

@overload
def __init__(self, *, api_key: str, http_client: AsyncHTTPClient) -> None: ...

@overload
def __init__(self, *, openai_client: AsyncOpenAI | None = None) -> None: ...

def __init__(
self,
*,
api_key: str | None = None,
openai_client: AsyncOpenAI | None = None,
http_client: AsyncHTTPClient | None = None,
) -> None:
api_key = api_key or os.getenv("AIML_API_KEY")
if not api_key and openai_client is None:
raise UserError(
"You must provide either an API key (set the `AIML_API_KEY` environment variable or pass it via `AIMLAPIProvider(api_key=...)`) "
"or an OpenAI client (pass it via `AIMLAPIProvider(openai_client=...)`) to use the AI/ML API provider."
)

if openai_client is not None:
self._client = openai_client
elif http_client is not None:
self._client = AsyncOpenAI(base_url=self.base_url, api_key=api_key, http_client=http_client)
else:
http_client = cached_async_http_client(provider="aimlapi")
self._client = AsyncOpenAI(base_url=self.base_url, api_key=api_key, http_client=http_client)