Skip to content

Commit ef93670

Browse files
committed
fix: Handle setting LoRA adapter when none already set
1 parent 04de669 commit ef93670

File tree

1 file changed

+2
-0
lines changed

1 file changed

+2
-0
lines changed

llama_cpp/llama.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -637,6 +637,8 @@ def set_lora_adapter_scale(self, lora_path: str, scale: float, *, load_if_needed
637637
# Set scale in context
638638
self._ctx.lora_adapter_set(lora_adapter, scale)
639639

640+
if self.lora_adapters is None:
641+
self.lora_adapters = {}
640642
self.lora_adapters[lora_path] = scale
641643
self._lora_adapters_active = tuple(sorted(
642644
filter(lambda path_scale: path_scale[1] != 0.0, self.lora_adapters.items())

0 commit comments

Comments
 (0)