Skip to content

Commit b4a03e6

Browse files
committed
done
1 parent 554262b commit b4a03e6

File tree

3 files changed

+79
-29
lines changed

3 files changed

+79
-29
lines changed

dspy/predict/chain_of_thought_with_hint.py

Lines changed: 0 additions & 28 deletions
This file was deleted.

dspy/teleprompt/bootstrap_finetune.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,6 +83,7 @@ def compile(
8383
data_pred_ind = None if self.multitask else pred_ind
8484
lm = pred.lm or settings.lm
8585
training_key = (lm, data_pred_ind)
86+
8687
if training_key not in key_to_data:
8788
train_data, data_format = self._prepare_finetune_data(
8889
trace_data=trace_data, lm=lm, pred_ind=data_pred_ind
@@ -115,7 +116,7 @@ def compile(
115116
logger.info("Updating the student program with the fine-tuned LMs...")
116117
for pred_ind, pred in enumerate(student.predictors()):
117118
data_pred_ind = None if self.multitask else pred_ind
118-
training_key = (pred.lm, data_pred_ind)
119+
training_key = (pred.lm or settings.lm, data_pred_ind)
119120
finetuned_lm = key_to_lm[training_key]
120121
if isinstance(finetuned_lm, Exception):
121122
raise RuntimeError(f"Finetuned LM for predictor {pred_ind} failed.") from finetuned_lm
Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,77 @@
1+
import dspy
2+
from dspy import Example
3+
from dspy.predict import Predict
4+
from dspy.teleprompt import BootstrapFinetune
5+
from dspy.utils.dummies import DummyLM
6+
7+
8+
# Define a simple metric function for testing
9+
def simple_metric(example, prediction, trace=None):
10+
# Simplified metric for testing: true if prediction matches expected output
11+
return example.output == prediction.output
12+
13+
14+
examples = [
15+
Example(input="What is the color of the sky?", output="blue").with_inputs("input"),
16+
Example(input="What does the fox say?", output="Ring-ding-ding-ding-dingeringeding!"),
17+
]
18+
trainset = [examples[0]]
19+
20+
21+
def test_bootstrap_finetune_initialization():
22+
# Initialize BootstrapFinetune with a dummy metric and minimal setup
23+
bootstrap = BootstrapFinetune(metric=simple_metric)
24+
assert bootstrap.metric == simple_metric, "Metric not correctly initialized"
25+
assert bootstrap.multitask == True, "Multitask should default to True"
26+
27+
28+
class SimpleModule(dspy.Module):
29+
def __init__(self, signature, lm=None):
30+
super().__init__()
31+
self.predictor = Predict(signature)
32+
if lm:
33+
self.predictor.lm = lm
34+
35+
def forward(self, **kwargs):
36+
return self.predictor(**kwargs)
37+
38+
39+
def test_compile_with_predict_instances_no_explicit_lm():
40+
"""Test BootstrapFinetune compile with predictors that don't have explicit LMs."""
41+
from unittest.mock import patch
42+
43+
# Create student and teacher modules without explicit LMs in predictors
44+
# This tests the fix: lm = pred.lm or settings.lm
45+
student = SimpleModule("input -> output")
46+
teacher = SimpleModule("input -> output")
47+
48+
# Set up LM in settings - this will be the fallback
49+
lm = DummyLM(["Initial thoughts", "Finish[blue]"])
50+
original_lm = dspy.settings.lm
51+
dspy.settings.configure(lm=lm)
52+
53+
54+
# Verify that the predictor doesn't have an explicit LM
55+
assert student.predictor.lm is None
56+
57+
# Initialize BootstrapFinetune - this should work without AttributeError
58+
bootstrap = BootstrapFinetune(metric=simple_metric)
59+
60+
# Mock all the components that would fail without proper setup
61+
with patch('dspy.teleprompt.bootstrap_finetune.all_predictors_have_lms'), \
62+
patch('dspy.teleprompt.bootstrap_finetune.prepare_teacher', return_value=teacher), \
63+
patch('dspy.teleprompt.bootstrap_finetune.bootstrap_trace_data', return_value=[]), \
64+
patch.object(bootstrap, '_prepare_finetune_data', return_value=([], 'openai')), \
65+
patch.object(bootstrap, 'finetune_lms') as mock_finetune_lms:
66+
67+
# Mock the finetune_lms to return a mapping from training key to LM
68+
mock_finetune_lms.return_value = {(lm, None): lm}
69+
70+
# This should not raise AttributeError due to the fix
71+
compiled_student = bootstrap.compile(student, teacher=teacher, trainset=trainset)
72+
73+
assert compiled_student is not None, "Failed to compile student"
74+
# Verify that finetune_lms was called
75+
mock_finetune_lms.assert_called_once()
76+
77+

0 commit comments

Comments
 (0)