Skip to content

Commit e6d2565

Browse files
committed
Updated
1 parent ce37f7e commit e6d2565

File tree

7 files changed

+46
-36
lines changed

7 files changed

+46
-36
lines changed

cmd/llm/complete.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -94,9 +94,9 @@ func (cmd *CompleteCmd) opts() []llm.Opt {
9494
// Set system prompt
9595
var system []string
9696
if cmd.Format == "markdown" {
97-
system = append(system, "Return the completion in markdown format.")
97+
system = append(system, "Structure your output in markdown format.")
9898
} else if cmd.Format == "json" {
99-
system = append(system, "Return the completion in JSON format.")
99+
system = append(system, "Structure your output in JSON format.")
100100
}
101101
if cmd.System != "" {
102102
system = append(system, cmd.System)

cmd/llm/main.go

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -124,11 +124,9 @@ func main() {
124124
if cli.OllamaEndpoint != "" {
125125
opts = append(opts, agent.WithOllama(cli.OllamaEndpoint, clientopts...))
126126
}
127-
/*
128-
if cli.AnthropicKey != "" {
129-
opts = append(opts, agent.WithAnthropic(cli.AnthropicKey, clientopts...))
130-
}
131-
*/
127+
if cli.AnthropicKey != "" {
128+
opts = append(opts, agent.WithAnthropic(cli.AnthropicKey, clientopts...))
129+
}
132130
if cli.MistralKey != "" {
133131
opts = append(opts, agent.WithMistral(cli.MistralKey, clientopts...))
134132
}

pkg/agent/opt.go

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,9 @@ import (
44
// Packages
55
client "github.com/mutablelogic/go-client"
66
llm "github.com/mutablelogic/go-llm"
7+
anthropic "github.com/mutablelogic/go-llm/pkg/anthropic"
78
gemini "github.com/mutablelogic/go-llm/pkg/gemini"
8-
"github.com/mutablelogic/go-llm/pkg/mistral"
9+
mistral "github.com/mutablelogic/go-llm/pkg/mistral"
910
ollama "github.com/mutablelogic/go-llm/pkg/ollama"
1011
openai "github.com/mutablelogic/go-llm/pkg/openai"
1112
)
@@ -24,7 +25,6 @@ func WithOllama(endpoint string, opts ...client.ClientOpt) llm.Opt {
2425
}
2526
}
2627

27-
/*
2828
func WithAnthropic(key string, opts ...client.ClientOpt) llm.Opt {
2929
return func(o *llm.Opts) error {
3030
client, err := anthropic.New(key, opts...)
@@ -35,7 +35,6 @@ func WithAnthropic(key string, opts ...client.ClientOpt) llm.Opt {
3535
}
3636
}
3737
}
38-
*/
3938

4039
func WithMistral(key string, opts ...client.ClientOpt) llm.Opt {
4140
return func(o *llm.Opts) error {

pkg/anthropic/completion.go

Lines changed: 37 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ type Response struct {
2121
Reason string `json:"stop_reason,omitempty"`
2222
StopSequence *string `json:"stop_sequence,omitempty"`
2323
Message
24-
Metrics `json:"usage,omitempty"`
24+
*Metrics `json:"usage,omitempty"`
2525
}
2626

2727
// Metrics
@@ -43,30 +43,43 @@ func (r Response) String() string {
4343
return string(data)
4444
}
4545

46+
func (m Metrics) String() string {
47+
data, err := json.MarshalIndent(m, "", " ")
48+
if err != nil {
49+
return err.Error()
50+
}
51+
return string(data)
52+
}
53+
4654
///////////////////////////////////////////////////////////////////////////////
4755
// PUBLIC METHODS
4856

4957
type reqMessages struct {
50-
Model string `json:"model"`
51-
MaxTokens uint64 `json:"max_tokens,omitempty"`
52-
Metadata *optmetadata `json:"metadata,omitempty"`
53-
StopSequences []string `json:"stop_sequences,omitempty"`
54-
Stream bool `json:"stream,omitempty"`
55-
System string `json:"system,omitempty"`
56-
Temperature float64 `json:"temperature,omitempty"`
57-
TopK uint64 `json:"top_k,omitempty"`
58-
TopP float64 `json:"top_p,omitempty"`
59-
Messages []*Message `json:"messages"`
60-
Tools []llm.Tool `json:"tools,omitempty"`
61-
ToolChoice any `json:"tool_choice,omitempty"`
58+
Model string `json:"model"`
59+
MaxTokens uint64 `json:"max_tokens,omitempty"`
60+
Metadata *optmetadata `json:"metadata,omitempty"`
61+
StopSequences []string `json:"stop_sequences,omitempty"`
62+
Stream bool `json:"stream,omitempty"`
63+
System string `json:"system,omitempty"`
64+
Temperature float64 `json:"temperature,omitempty"`
65+
TopK uint64 `json:"top_k,omitempty"`
66+
TopP float64 `json:"top_p,omitempty"`
67+
Tools []llm.Tool `json:"tools,omitempty"`
68+
ToolChoice any `json:"tool_choice,omitempty"`
69+
Messages []llm.Completion `json:"messages"`
6270
}
6371

72+
// Send a completion request with a single prompt, and return the next completion
6473
func (model *model) Completion(ctx context.Context, prompt string, opts ...llm.Opt) (llm.Completion, error) {
65-
// TODO
66-
return nil, llm.ErrNotImplemented
74+
message, err := messagefactory{}.UserPrompt(prompt, opts...)
75+
if err != nil {
76+
return nil, err
77+
}
78+
return model.Chat(ctx, []llm.Completion{message}, opts...)
6779
}
6880

69-
func (anthropic *Client) Messages(ctx context.Context, context llm.Context, opts ...llm.Opt) (*Response, error) {
81+
// Send a completion request with multiple completions, and return the next completion
82+
func (model *model) Chat(ctx context.Context, completions []llm.Completion, opts ...llm.Opt) (llm.Completion, error) {
7083
// Apply options
7184
opt, err := llm.ApplyOpts(opts...)
7285
if err != nil {
@@ -75,28 +88,30 @@ func (anthropic *Client) Messages(ctx context.Context, context llm.Context, opts
7588

7689
// Request
7790
req, err := client.NewJSONRequest(reqMessages{
78-
Model: context.(*session).model.Name(),
79-
MaxTokens: optMaxTokens(context.(*session).model, opt),
91+
Model: model.Name(),
92+
MaxTokens: optMaxTokens(model, opt),
8093
Metadata: optMetadata(opt),
8194
StopSequences: optStopSequences(opt),
8295
Stream: optStream(opt),
8396
System: optSystemPrompt(opt),
8497
Temperature: optTemperature(opt),
8598
TopK: optTopK(opt),
8699
TopP: optTopP(opt),
87-
Messages: context.(*session).seq,
88-
Tools: optTools(anthropic, opt),
100+
Tools: optTools(model.Client, opt),
89101
ToolChoice: optToolChoice(opt),
102+
Messages: completions,
90103
})
91104
if err != nil {
92105
return nil, err
93106
}
94107

95-
// Stream
108+
// Response options
96109
var response Response
97110
reqopts := []client.RequestOpt{
98111
client.OptPath("messages"),
99112
}
113+
114+
// Streaming
100115
if optStream(opt) {
101116
reqopts = append(reqopts, client.OptTextStreamCallback(func(evt client.TextStreamEvent) error {
102117
if err := streamEvent(&response, evt); err != nil {
@@ -110,7 +125,7 @@ func (anthropic *Client) Messages(ctx context.Context, context llm.Context, opts
110125
}
111126

112127
// Response
113-
if err := anthropic.DoWithContext(ctx, req, &response, reqopts...); err != nil {
128+
if err := model.DoWithContext(ctx, req, &response, reqopts...); err != nil {
114129
return nil, err
115130
}
116131

pkg/anthropic/messagefactory.go

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package anthropic
22

33
import (
4-
// Packages
54
llm "github.com/mutablelogic/go-llm"
65
)
76

pkg/ollama/opt.go

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package ollama
22

33
import (
4-
"strconv"
54
"strings"
65
"time"
76

@@ -95,9 +94,9 @@ func optFormat(opts *llm.Opts) string {
9594
return ""
9695
}
9796
if format == "json_format" {
98-
return strconv.Quote("json")
97+
return "json"
9998
}
100-
return strconv.Quote(format)
99+
return format
101100
}
102101

103102
func optStopSequence(opts *llm.Opts) []string {

0 commit comments

Comments
 (0)