-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathdoc.go
135 lines (135 loc) · 4.92 KB
/
doc.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
// Package dspy is a Go implementation of the DSPy framework for using language models
// to solve complex tasks through composable steps and prompting techniques.
//
// DSPy-Go provides a collection of modules, optimizers, and tools for building
// reliable LLM-powered applications. It focuses on making it easy to:
// - Break down complex tasks into modular steps
// - Optimize prompts and chain-of-thought reasoning
// - Build flexible agent-based systems
// - Handle common LLM interaction patterns
// - Evaluate and improve system performance
//
// Key Components:
//
// - Core: Fundamental abstractions like Module, Signature, LLM and Program
// for defining and executing LLM-based workflows.
//
// - Modules: Building blocks for composing LLM workflows:
// * Predict: Basic prediction module for simple LLM interactions
// * ChainOfThought: Implements step-by-step reasoning with rationale tracking
// * ReAct: Implements Reasoning and Acting with tool integration
//
// - Optimizers: Tools for improving prompt effectiveness:
// * BootstrapFewShot: Automatically selects high-quality examples for few-shot learning
// * MIPRO: Multi-step interactive prompt optimization
// * Copro: Collaborative prompt optimization
//
// - Agents: Advanced patterns for building sophisticated AI systems:
// * Memory: Different memory implementations for tracking conversation history
// * Tools: Integration with external tools and APIs
// * Workflows:
// - Chain: Sequential execution of steps
// - Parallel: Concurrent execution with controlled parallelism
// - Router: Dynamic routing based on classification
// * Orchestrator: Flexible task decomposition and execution
//
// - Integration with multiple LLM providers:
// * Anthropic Claude
// * Google Gemini
// * Ollama
// * LlamaCPP
//
// Simple Example:
//
// import (
// "context"
// "fmt"
// "log"
//
// "github.com/XiaoConstantine/dspy-go/pkg/core"
// "github.com/XiaoConstantine/dspy-go/pkg/llms"
// "github.com/XiaoConstantine/dspy-go/pkg/modules"
// )
//
// func main() {
// // Configure the default LLM
// llms.EnsureFactory()
// err := config.ConfigureDefaultLLM("your-api-key", core.ModelAnthropicSonnet)
// if err != nil {
// log.Fatalf("Failed to configure LLM: %v", err)
// }
//
// // Create a signature for question answering
// signature := core.NewSignature(
// []core.InputField{{Field: core.Field{Name: "question"}}},
// []core.OutputField{{Field: core.Field{Name: "answer"}}},
// )
//
// // Create a ChainOfThought module
// cot := modules.NewChainOfThought(signature)
//
// // Create a program
// program := core.NewProgram(
// map[string]core.Module{"cot": cot},
// func(ctx context.Context, inputs map[string]interface{}) (map[string]interface{}, error) {
// return cot.Process(ctx, inputs)
// },
// )
//
// // Execute the program
// result, err := program.Execute(context.Background(), map[string]interface{}{
// "question": "What is the capital of France?",
// })
// if err != nil {
// log.Fatalf("Error executing program: %v", err)
// }
//
// fmt.Printf("Answer: %s\n", result["answer"])
// }
//
// Advanced Features:
//
// - Tracing and Logging: Detailed tracing and structured logging for debugging and optimization
// Execution context is tracked and passed through the pipeline for debugging and analysis.
//
// - Error Handling: Comprehensive error management with custom error types and centralized handling
//
// - Metric-Based Optimization: Improve module performance based on custom evaluation metrics
//
// - Custom Tool Integration: Extend ReAct modules with domain-specific tools
//
// - Workflow Retry Logic: Resilient execution with configurable retry mechanisms and backoff strategies
//
// - Streaming Support: Process LLM outputs incrementally as they're generated
//
// - Data Storage: Integration with various storage backends for persistence of examples and results
//
// - Arrow Support: Integration with Apache Arrow for efficient data handling and processing
//
// Working with Workflows:
//
// // Chain workflow example
// workflow := workflows.NewChainWorkflow(store)
// workflow.AddStep(&workflows.Step{
// ID: "step1",
// Module: modules.NewPredict(signature1),
// })
// workflow.AddStep(&workflows.Step{
// ID: "step2",
// Module: modules.NewPredict(signature2),
// // Configurable retry logic
// RetryConfig: &workflows.RetryConfig{
// MaxAttempts: 3,
// BackoffMultiplier: 2.0,
// },
// // Conditional execution
// Condition: func(state map[string]interface{}) bool {
// return someCondition(state)
// },
// })
//
// For more examples and detailed documentation, visit:
// https://github.com/XiaoConstantine/dspy-go
//
// DSPy-Go is released under the MIT License.
package dspy