Skip to content

Commit 925006f

Browse files
authored
BUG: Reduce optimization memory footprint (#884)
* Reduce memory footprint Memoizing the whole stats made copies of contained `_strategy`, which held duplicate references at least to whole data ... Now we memoize just the maximization float value.
1 parent ae3d69f commit 925006f

File tree

1 file changed

+8
-4
lines changed

1 file changed

+8
-4
lines changed

backtesting/backtesting.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1567,16 +1567,20 @@ def _optimize_sambo() -> Union[pd.Series,
15671567
else:
15681568
dimensions.append(values.tolist())
15691569

1570-
# Avoid recomputing re-evaluations:
1571-
memoized_run = lru_cache()(lambda tup: self.run(**dict(tup))) # XXX: Reeval if this needed?
1570+
# Avoid recomputing re-evaluations
1571+
@lru_cache()
1572+
def memoized_run(tup):
1573+
nonlocal maximize, self
1574+
stats = self.run(**dict(tup))
1575+
return -maximize(stats)
1576+
15721577
progress = iter(_tqdm(repeat(None), total=max_tries, leave=False, desc='Backtest.optimize'))
15731578
_names = tuple(kwargs.keys())
15741579

15751580
def objective_function(x):
15761581
nonlocal progress, memoized_run, constraint, _names
15771582
next(progress)
1578-
res = memoized_run(tuple(zip(_names, x)))
1579-
value = -maximize(res)
1583+
value = memoized_run(tuple(zip(_names, x)))
15801584
return 0 if np.isnan(value) else value
15811585

15821586
def cons(x):

0 commit comments

Comments
 (0)