fix lcm_lora offline

This commit is contained in:
Qing 2024-01-16 22:10:26 +08:00
parent 340bca64b7
commit 8dd3a06945

View File

@ -182,7 +182,7 @@ class ModelManager:
lcm_lora_loaded = bool(self.model.model.get_list_adapters()) lcm_lora_loaded = bool(self.model.model.get_list_adapters())
if config.sd_lcm_lora: if config.sd_lcm_lora:
if not lcm_lora_loaded: if not lcm_lora_loaded:
self.model.model.load_lora_weights(self.model.lcm_lora_id) self.model.model.load_lora_weights(self.model.lcm_lora_id, weight_name="pytorch_lora_weights.safetensors")
else: else:
if lcm_lora_loaded: if lcm_lora_loaded:
self.model.model.disable_lora() self.model.model.disable_lora()