We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent 7752362 commit 04de669Copy full SHA for 04de669
llama_cpp/llama.py
@@ -409,8 +409,9 @@ def __init__(
409
# Immutable value representing active adapters for use as a key
410
self._lora_adapters_active: Tuple[Tuple[str, float]] = ()
411
412
- for lora_path, scale in self.lora_adapters.copy().items():
413
- self.set_lora_adapter_scale(lora_path, scale, load_if_needed=True)
+ if self.lora_adapters:
+ for lora_path, scale in self.lora_adapters.copy().items():
414
+ self.set_lora_adapter_scale(lora_path, scale, load_if_needed=True)
415
416
if self.verbose:
417
print(llama_cpp.llama_print_system_info().decode("utf-8"), file=sys.stderr)
0 commit comments