Skip to content

Commit 586b06c

Browse files
authored
Merge pull request #5 from mutablelogic/dev
Anthropic and Documentation
2 parents e560ac1 + ed65c09 commit 586b06c

20 files changed

+883
-697
lines changed

README.md

Lines changed: 73 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -151,18 +151,88 @@ ensure the session is maintained across multiple calls.
151151

152152
### Embedding Generation
153153

154-
TODO
154+
You can generate embedding vectors using an appropriate model with Ollama or Mistral models:
155+
156+
```go
157+
import (
158+
"github.com/mutablelogic/go-llm"
159+
)
160+
161+
func embedding(ctx context.Context, agent llm.Agent) error {
162+
// Create a new chat session
163+
vector, err := agent.Model(ctx, "mistral-embed").Embedding(ctx, "hello")
164+
// ...
165+
}
166+
```
155167

156168
### Attachments & Image Caption Generation
157169

158-
TODO
170+
Some models have `vision` capability and others can also summarize text. For example, to
171+
generate captions for an image,
172+
173+
```go
174+
import (
175+
"github.com/mutablelogic/go-llm"
176+
)
177+
178+
func generate_image_caption(ctx context.Context, agent llm.Agent, path string) (string, error) {
179+
f, err := os.Open(path)
180+
if err != nil {
181+
return "", err
182+
}
183+
defer f.Close()
184+
185+
// Describe an image
186+
r, err := agent.Model("claude-3-5-sonnet-20241022").UserPrompt(
187+
ctx, model.UserPrompt("Provide a short caption for this image", llm.WithAttachment(f))
188+
)
189+
if err != nil {
190+
return "", err
191+
}
192+
193+
// Return success
194+
return r.Text(0), err
195+
}
196+
```
197+
198+
To summarize a text or PDF docment is exactly the same using an Anthropic model, but maybe with a
199+
different prompt.
159200

160201
### Streaming
161202

162-
TODO
203+
Streaming is supported with all providers, but Ollama cannot be used with streaming and tools
204+
simultaneously. You provide a callback function of signature `func(llm.Completion)` which will
205+
be called as a completion is received.
206+
207+
```go
208+
import (
209+
"github.com/mutablelogic/go-llm"
210+
)
211+
212+
func generate_completion(ctx context.Context, agent llm.Agent, prompt string) (string, error) {
213+
r, err := agent.Model("claude-3-5-sonnet-20241022").UserPrompt(
214+
ctx, model.UserPrompt("What is the weather in London?"),
215+
llm.WithStream(stream_callback),
216+
)
217+
if err != nil {
218+
return "", err
219+
}
220+
221+
// Return success
222+
return r.Text(0), err
223+
}
224+
225+
func stream_callback(completion llm.Completion) {
226+
// Print out the completion text on each call
227+
fmt.Println(completion.Text(0))
228+
}
229+
230+
```
163231

164232
### Tool Support
165233

234+
All providers support tools, but not all models.
235+
166236
TODO
167237

168238
## Options

cmd/llm/main.go

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -107,11 +107,9 @@ func main() {
107107
if cli.OllamaEndpoint != "" {
108108
opts = append(opts, agent.WithOllama(cli.OllamaEndpoint, clientopts...))
109109
}
110-
/*
111-
if cli.AnthropicKey != "" {
112-
opts = append(opts, agent.WithAnthropic(cli.AnthropicKey, clientopts...))
113-
}
114-
*/
110+
if cli.AnthropicKey != "" {
111+
opts = append(opts, agent.WithAnthropic(cli.AnthropicKey, clientopts...))
112+
}
115113
if cli.MistralKey != "" {
116114
opts = append(opts, agent.WithMistral(cli.MistralKey, clientopts...))
117115
}

pkg/agent/opt.go

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import (
44
// Packages
55
client "github.com/mutablelogic/go-client"
66
llm "github.com/mutablelogic/go-llm"
7+
"github.com/mutablelogic/go-llm/pkg/anthropic"
78
mistral "github.com/mutablelogic/go-llm/pkg/mistral"
89
ollama "github.com/mutablelogic/go-llm/pkg/ollama"
910
)
@@ -22,18 +23,17 @@ func WithOllama(endpoint string, opts ...client.ClientOpt) llm.Opt {
2223
}
2324
}
2425

25-
/*
26-
func WithAnthropic(key string, opts ...client.ClientOpt) llm.Opt {
27-
return func(o *llm.Opts) error {
28-
client, err := anthropic.New(key, opts...)
29-
if err != nil {
30-
return err
31-
} else {
32-
return llm.WithAgent(client)(o)
33-
}
26+
func WithAnthropic(key string, opts ...client.ClientOpt) llm.Opt {
27+
return func(o *llm.Opts) error {
28+
client, err := anthropic.New(key, opts...)
29+
if err != nil {
30+
return err
31+
} else {
32+
return llm.WithAgent(client)(o)
3433
}
3534
}
36-
*/
35+
}
36+
3737
func WithMistral(key string, opts ...client.ClientOpt) llm.Opt {
3838
return func(o *llm.Opts) error {
3939
client, err := mistral.New(key, opts...)

pkg/anthropic/client.go

Lines changed: 36 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,8 @@ package anthropic
55

66
import (
77
// Packages
8+
"context"
9+
810
client "github.com/mutablelogic/go-client"
911
llm "github.com/mutablelogic/go-llm"
1012
)
@@ -42,10 +44,7 @@ func New(ApiKey string, opts ...client.ClientOpt) (*Client, error) {
4244
}
4345

4446
// Return the client
45-
return &Client{
46-
Client: client,
47-
cache: make(map[string]llm.Model),
48-
}, nil
47+
return &Client{client, nil}, nil
4948
}
5049

5150
///////////////////////////////////////////////////////////////////////////////
@@ -55,3 +54,36 @@ func New(ApiKey string, opts ...client.ClientOpt) (*Client, error) {
5554
func (*Client) Name() string {
5655
return defaultName
5756
}
57+
58+
// Return the models
59+
func (anthropic *Client) Models(ctx context.Context) ([]llm.Model, error) {
60+
// Cache models
61+
if anthropic.cache == nil {
62+
models, err := anthropic.ListModels(ctx)
63+
if err != nil {
64+
return nil, err
65+
}
66+
anthropic.cache = make(map[string]llm.Model, len(models))
67+
for _, model := range models {
68+
anthropic.cache[model.Name()] = model
69+
}
70+
}
71+
72+
// Return models
73+
result := make([]llm.Model, 0, len(anthropic.cache))
74+
for _, model := range anthropic.cache {
75+
result = append(result, model)
76+
}
77+
return result, nil
78+
}
79+
80+
// Return a model by name, or nil if not found.
81+
// Panics on error.
82+
func (anthropic *Client) Model(ctx context.Context, name string) llm.Model {
83+
if anthropic.cache == nil {
84+
if _, err := anthropic.Models(ctx); err != nil {
85+
panic(err)
86+
}
87+
}
88+
return anthropic.cache[name]
89+
}

pkg/anthropic/client_test.go

Lines changed: 40 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,10 @@
11
package anthropic_test
22

33
import (
4+
"flag"
5+
"log"
46
"os"
7+
"strconv"
58
"testing"
69

710
// Packages
@@ -10,23 +13,46 @@ import (
1013
assert "github.com/stretchr/testify/assert"
1114
)
1215

13-
func Test_client_001(t *testing.T) {
14-
assert := assert.New(t)
15-
client, err := anthropic.New(GetApiKey(t), opts.OptTrace(os.Stderr, true))
16-
if assert.NoError(err) {
17-
assert.NotNil(client)
18-
t.Log(client)
16+
///////////////////////////////////////////////////////////////////////////////
17+
// TEST SET-UP
18+
19+
var (
20+
client *anthropic.Client
21+
)
22+
23+
func TestMain(m *testing.M) {
24+
var verbose bool
25+
26+
// Verbose output
27+
flag.Parse()
28+
if f := flag.Lookup("test.v"); f != nil {
29+
if v, err := strconv.ParseBool(f.Value.String()); err == nil {
30+
verbose = v
31+
}
1932
}
33+
34+
// API KEY
35+
api_key := os.Getenv("ANTHROPIC_API_KEY")
36+
if api_key == "" {
37+
log.Print("ANTHROPIC_API_KEY not set")
38+
os.Exit(0)
39+
}
40+
41+
// Create client
42+
var err error
43+
client, err = anthropic.New(api_key, opts.OptTrace(os.Stderr, verbose))
44+
if err != nil {
45+
log.Println(err)
46+
os.Exit(-1)
47+
}
48+
os.Exit(m.Run())
2049
}
2150

2251
///////////////////////////////////////////////////////////////////////////////
23-
// ENVIRONMENT
52+
// TESTS
2453

25-
func GetApiKey(t *testing.T) string {
26-
key := os.Getenv("ANTHROPIC_API_KEY")
27-
if key == "" {
28-
t.Skip("ANTHROPIC_API_KEY not set, skipping tests")
29-
t.SkipNow()
30-
}
31-
return key
54+
func Test_client_001(t *testing.T) {
55+
assert := assert.New(t)
56+
assert.NotNil(client)
57+
t.Log(client)
3258
}

0 commit comments

Comments
 (0)