Skip to content

Commit 07158ce

Browse files
poudroAntoine Sintonchenmoneygithub
authored
compute cache key once (#8394)
* compute cache key once * compute cache key once in async wrapper * use a copy of request instead of changing function signatures * add unit test and fix comments * bring back deepcopy --------- Co-authored-by: Antoine Sinton <antoine@cnty.ai> Co-authored-by: chenmoneygithub <chen.qian@databricks.com>
1 parent 46d4422 commit 07158ce

File tree

2 files changed

+43
-3
lines changed

2 files changed

+43
-3
lines changed

dspy/clients/cache.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,6 @@ def __init__(
3030
disk_cache_dir: str,
3131
disk_size_limit_bytes: Optional[int] = 1024 * 1024 * 10,
3232
memory_max_entries: Optional[int] = 1000000,
33-
3433
):
3534
"""
3635
Args:
@@ -230,9 +229,11 @@ def sync_wrapper(*args, **kwargs):
230229
return cached_result
231230

232231
# Otherwise, compute and store the result
232+
# Make a copy of the original request in case it's modified in place, e.g., deleting some fields
233+
original_request = copy.deepcopy(modified_request)
233234
result = fn(*args, **kwargs)
234235
# `enable_memory_cache` can be provided at call time to avoid indefinite growth.
235-
cache.put(modified_request, result, ignored_args_for_cache_key, enable_memory_cache)
236+
cache.put(original_request, result, ignored_args_for_cache_key, enable_memory_cache)
236237

237238
return result
238239

@@ -249,8 +250,10 @@ async def async_wrapper(*args, **kwargs):
249250
return cached_result
250251

251252
# Otherwise, compute and store the result
253+
# Make a copy of the original request in case it's modified in place, e.g., deleting some fields
254+
original_request = copy.deepcopy(modified_request)
252255
result = await fn(*args, **kwargs)
253-
cache.put(modified_request, result, ignored_args_for_cache_key, enable_memory_cache)
256+
cache.put(original_request, result, ignored_args_for_cache_key, enable_memory_cache)
254257

255258
return result
256259

tests/clients/test_cache.py

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -280,3 +280,40 @@ async def test_function(prompt, model):
280280
# Call with different arguments should compute again
281281
result3 = await test_function(prompt="Different", model="openai/gpt-4o-mini")
282282
assert result3 == "Response for Different with openai/gpt-4o-mini"
283+
284+
285+
def test_cache_consistency_with_lm_call_modifies_the_request(cache):
286+
"""Test that the cache is consistent with the LM call that modifies the request."""
287+
from dspy.clients.cache import request_cache
288+
289+
# Mock the dspy.cache attribute
290+
with patch("dspy.cache", cache):
291+
# Define a test function
292+
@request_cache()
293+
def test_function(**kwargs):
294+
del kwargs["field_to_delete"]
295+
return kwargs
296+
297+
# First call should compute the result
298+
test_function(field_to_delete="delete", field_to_keep="keep")
299+
300+
# The cache key should use the original request, not the modified one
301+
assert (
302+
cache.get(
303+
{
304+
"field_to_keep": "keep",
305+
"_fn_identifier": f"{test_function.__module__}.{test_function.__qualname__}",
306+
}
307+
)
308+
is None
309+
)
310+
assert (
311+
cache.get(
312+
{
313+
"field_to_keep": "keep",
314+
"field_to_delete": "delete",
315+
"_fn_identifier": f"{test_function.__module__}.{test_function.__qualname__}",
316+
}
317+
)
318+
is not None
319+
)

0 commit comments

Comments
 (0)