1
+ import pytest
2
+
1
3
import dspy
2
4
from dspy import Example
3
5
from dspy .predict import Predict
@@ -12,57 +14,53 @@ def simple_metric(example, prediction, trace=None):
12
14
13
15
examples = [
14
16
Example (input = "What is the color of the sky?" , output = "blue" ).with_inputs ("input" ),
15
- Example (input = "What does the fox say?" , output = "Ring-ding-ding-ding-dingeringeding!" ),
17
+ Example (input = "What does the fox say?" , output = "Ring-ding-ding-ding-dingeringeding!" ). with_inputs ( "input" ) ,
16
18
]
17
19
trainset = [examples [0 ]]
18
20
19
21
20
22
def test_bootstrap_finetune_initialization ():
21
- # Initialize BootstrapFinetune with a dummy metric and minimal setup
23
+ """Test BootstrapFinetune initialization with various parameters."""
22
24
bootstrap = BootstrapFinetune (metric = simple_metric )
23
25
assert bootstrap .metric == simple_metric , "Metric not correctly initialized"
24
- assert bootstrap .multitask , "Multitask should default to True"
26
+ assert bootstrap .multitask == True , "Multitask should default to True"
25
27
26
28
27
29
class SimpleModule (dspy .Module ):
28
- def __init__ (self , signature , lm = None ):
30
+ def __init__ (self , signature ):
29
31
super ().__init__ ()
30
32
self .predictor = Predict (signature )
31
- if lm :
32
- self .predictor .lm = lm
33
33
34
34
def forward (self , ** kwargs ):
35
35
return self .predictor (** kwargs )
36
36
37
37
38
- def test_compile_with_predict_instances_no_explicit_lm ():
39
- """Test BootstrapFinetune compile with predictors that don't have explicit LMs."""
40
- from unittest .mock import patch
38
+ def test_error_handling_during_bootstrap ():
39
+ """Test error handling during the bootstrapping process."""
40
+
41
+ class BuggyModule (dspy .Module ):
42
+ def __init__ (self , signature ):
43
+ super ().__init__ ()
44
+ self .predictor = Predict (signature )
41
45
42
- # Create student and teacher modules without explicit LMs in predictors
43
- student = SimpleModule ("input -> output" )
44
- teacher = SimpleModule ("input -> output" )
46
+ def forward (self , ** kwargs ):
47
+ raise RuntimeError ("Simulated error" )
45
48
46
- lm = DummyLM (["Initial thoughts" , "Finish[blue]" ])
49
+ student = SimpleModule ("input -> output" )
50
+ teacher = BuggyModule ("input -> output" )
51
+
52
+ # Setup DummyLM to simulate an error scenario
53
+ lm = DummyLM (
54
+ [
55
+ {"output" : "Initial thoughts" }, # Simulate initial teacher's prediction
56
+ ]
57
+ )
47
58
dspy .settings .configure (lm = lm )
48
59
49
- # Verify that the predictor doesn't have an explicit LM
50
- assert student .predictor .lm is None
51
- bootstrap = BootstrapFinetune (metric = simple_metric )
52
-
53
- # Mock all the components that would fail without proper setup
54
- with patch ("dspy.teleprompt.bootstrap_finetune.all_predictors_have_lms" ), \
55
- patch ("dspy.teleprompt.bootstrap_finetune.prepare_teacher" , return_value = teacher ), \
56
- patch ("dspy.teleprompt.bootstrap_finetune.bootstrap_trace_data" , return_value = []), \
57
- patch .object (bootstrap , "_prepare_finetune_data" , return_value = ([], "openai" )), \
58
- patch .object (bootstrap , "finetune_lms" ) as mock_finetune_lms :
59
-
60
- mock_finetune_lms .return_value = {(lm , None ): lm }
61
-
62
- # This should not raise AttributeError due to the fix
63
- compiled_student = bootstrap .compile (student , teacher = teacher , trainset = trainset )
64
-
65
- assert compiled_student is not None , "Failed to compile student"
66
- mock_finetune_lms .assert_called_once ()
67
-
60
+ bootstrap = BootstrapFinetune (
61
+ metric = simple_metric ,
62
+ max_errors = 1 ,
63
+ )
68
64
65
+ with pytest .raises (RuntimeError , match = "Simulated error" ):
66
+ bootstrap .compile (student , teacher = teacher , trainset = trainset )
0 commit comments