Skip to content

Commit db385bc

Browse files
committed
Added examples
1 parent 932b720 commit db385bc

File tree

5 files changed

+150
-14
lines changed

5 files changed

+150
-14
lines changed

README.md

Lines changed: 20 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -17,16 +17,15 @@ If you have docker installed, you can use the following command to run the tool,
1717
installation:
1818

1919
```bash
20-
# Display help
21-
docker run ghcr.io/mutablelogic/go-llm:latest --help
20+
# Display version, help
21+
docker run ghcr.io/mutablelogic/go-llm version
22+
docker run ghcr.io/mutablelogic/go-llm --help
2223

2324
# Interact with Claude to retrieve news headlines, assuming
24-
# you have an API key for Anthropic and NewsAPI
25-
docker run \
26-
-e OLLAMA_URL -e MISTRAL_API_KEY -e ANTHROPIC_API_KEY -e OPENAI_API_KEY \
27-
-e NEWSAPI_KEY \
28-
ghcr.io/mutablelogic/go-llm:latest \
29-
chat mistral-small-latest --prompt "What is the latest news?" --no-stream
25+
# you have an API key for both Anthropic and NewsAPI
26+
docker run -e ANTHROPIC_API_KEY -e NEWSAPI_KEY \
27+
ghcr.io/mutablelogic/go-llm \
28+
chat mistral-small-latest --prompt "What is the latest news?"
3029
```
3130

3231
See below for more information on how to use the command-line tool (or how to
@@ -559,17 +558,24 @@ LLM agent command line interface
559558
Flags:
560559
-h, --help Show context-sensitive help.
561560
--debug Enable debug output
562-
--verbose Enable verbose output
561+
-v, --verbose Enable verbose output
562+
--timeout=DURATION Agent connection timeout
563563
--ollama-endpoint=STRING Ollama endpoint ($OLLAMA_URL)
564564
--anthropic-key=STRING Anthropic API Key ($ANTHROPIC_API_KEY)
565+
--mistral-key=STRING Mistral API Key ($MISTRAL_API_KEY)
566+
--open-ai-key=STRING OpenAI API Key ($OPENAI_API_KEY)
567+
--gemini-key=STRING Gemini API Key ($GEMINI_API_KEY)
565568
--news-key=STRING News API Key ($NEWSAPI_KEY)
566569
567570
Commands:
568-
agents Return a list of agents
569-
models Return a list of models
570-
tools Return a list of tools
571-
download Download a model
572-
chat Start a chat session
571+
agents Return a list of agents
572+
models Return a list of models
573+
tools Return a list of tools
574+
download Download a model
575+
chat Start a chat session
576+
complete Complete a prompt
577+
embedding Generate an embedding
578+
version Print the version of this tool
573579
574580
Run "llm <command> --help" for more information on a command.
575581
```

cmd/examples/agents/main.go

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
package main
2+
3+
import (
4+
"fmt"
5+
"os"
6+
7+
"github.com/mutablelogic/go-llm/pkg/agent"
8+
)
9+
10+
func main() {
11+
// Create a new agent which aggregates multiple providers
12+
agent, err := agent.New(
13+
agent.WithAnthropic(os.Getenv("ANTHROPIC_API_KEY")),
14+
agent.WithMistral(os.Getenv("MISTRAL_API_KEY")),
15+
agent.WithOpenAI(os.Getenv("OPENAI_API_KEY")),
16+
agent.WithOllama(os.Getenv("OLLAMA_URL")),
17+
)
18+
if err != nil {
19+
panic(err)
20+
}
21+
fmt.Println("Running agents are: ", agent.Name())
22+
}

cmd/examples/completion/main.go

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
package main
2+
3+
import (
4+
"context"
5+
"fmt"
6+
"os"
7+
8+
"github.com/mutablelogic/go-llm/pkg/agent"
9+
)
10+
11+
func main() {
12+
// Create a new agent which aggregates multiple providers
13+
agent, err := agent.New(
14+
agent.WithAnthropic(os.Getenv("ANTHROPIC_API_KEY")),
15+
agent.WithMistral(os.Getenv("MISTRAL_API_KEY")),
16+
agent.WithOpenAI(os.Getenv("OPENAI_API_KEY")),
17+
agent.WithOllama(os.Getenv("OLLAMA_URL")),
18+
)
19+
if err != nil {
20+
panic(err)
21+
}
22+
23+
// Get a model
24+
if len(os.Args) != 3 {
25+
fmt.Println("Usage: completion <model> <prompt>")
26+
os.Exit(-1)
27+
}
28+
29+
model, err := agent.GetModel(context.TODO(), os.Args[1])
30+
if err != nil {
31+
panic(err)
32+
}
33+
34+
// Get completion
35+
completion, err := model.Completion(context.TODO(), os.Args[2])
36+
if err != nil {
37+
panic(err)
38+
}
39+
40+
fmt.Println("Completion is: ", completion)
41+
}

cmd/examples/embedding/main.go

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
package main
2+
3+
import (
4+
"context"
5+
"fmt"
6+
"os"
7+
8+
"github.com/mutablelogic/go-llm/pkg/agent"
9+
)
10+
11+
func main() {
12+
// Create a new agent which aggregates multiple providers
13+
agent, err := agent.New(
14+
agent.WithAnthropic(os.Getenv("ANTHROPIC_API_KEY")),
15+
agent.WithMistral(os.Getenv("MISTRAL_API_KEY")),
16+
agent.WithOpenAI(os.Getenv("OPENAI_API_KEY")),
17+
agent.WithOllama(os.Getenv("OLLAMA_URL")),
18+
)
19+
if err != nil {
20+
panic(err)
21+
}
22+
23+
// Get a model
24+
if len(os.Args) != 3 {
25+
fmt.Println("Usage: embedding <model> <prompt>")
26+
os.Exit(-1)
27+
}
28+
29+
model, err := agent.GetModel(context.TODO(), os.Args[1])
30+
if err != nil {
31+
panic(err)
32+
}
33+
34+
// Get embedding vector
35+
vector, err := model.Embedding(context.TODO(), os.Args[2])
36+
if err != nil {
37+
panic(err)
38+
}
39+
40+
fmt.Println("Vector is: ", vector)
41+
}

cmd/examples/models/main.go

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
package main
2+
3+
import (
4+
"context"
5+
"fmt"
6+
"os"
7+
8+
"github.com/mutablelogic/go-llm/pkg/agent"
9+
)
10+
11+
func main() {
12+
// Create a new agent which aggregates multiple providers
13+
agent, err := agent.New(
14+
agent.WithAnthropic(os.Getenv("ANTHROPIC_API_KEY")),
15+
agent.WithMistral(os.Getenv("MISTRAL_API_KEY")),
16+
agent.WithOpenAI(os.Getenv("OPENAI_API_KEY")),
17+
agent.WithOllama(os.Getenv("OLLAMA_URL")),
18+
)
19+
if err != nil {
20+
panic(err)
21+
}
22+
23+
// Return models
24+
models, err := agent.ListModels(context.TODO())
25+
fmt.Println("Availalable models are: ", models)
26+
}

0 commit comments

Comments
 (0)