diff --git a/invokeai/app/invocations/flux_lora_loader.py b/invokeai/app/invocations/flux_lora_loader.py index f4181032d72..ef364bd0fd6 100644 --- a/invokeai/app/invocations/flux_lora_loader.py +++ b/invokeai/app/invocations/flux_lora_loader.py @@ -110,14 +110,14 @@ def invoke(self, context: InvocationContext) -> FluxLoRALoaderOutput: title="FLUX LoRA Collection Loader", tags=["lora", "model", "flux"], category="model", - version="1.2.0", + version="1.3.0", classification=Classification.Prototype, ) class FLUXLoRACollectionLoader(BaseInvocation): """Applies a collection of LoRAs to a FLUX transformer.""" - loras: LoRAField | list[LoRAField] = InputField( - description="LoRA models and weights. May be a single LoRA or collection.", title="LoRAs" + loras: Optional[LoRAField | list[LoRAField]] = InputField( + default=None, description="LoRA models and weights. May be a single LoRA or collection.", title="LoRAs" ) transformer: Optional[TransformerField] = InputField( @@ -144,7 +144,19 @@ def invoke(self, context: InvocationContext) -> FluxLoRALoaderOutput: loras = self.loras if isinstance(self.loras, list) else [self.loras] added_loras: list[str] = [] + if self.transformer is not None: + output.transformer = self.transformer.model_copy(deep=True) + + if self.clip is not None: + output.clip = self.clip.model_copy(deep=True) + + if self.t5_encoder is not None: + output.t5_encoder = self.t5_encoder.model_copy(deep=True) + for lora in loras: + if lora is None: + continue + assert type(lora) is LoRAField if lora.lora.key in added_loras: continue @@ -155,19 +167,13 @@ def invoke(self, context: InvocationContext) -> FluxLoRALoaderOutput: added_loras.append(lora.lora.key) - if self.transformer is not None: - if output.transformer is None: - output.transformer = self.transformer.model_copy(deep=True) + if self.transformer is not None and output.transformer is not None: output.transformer.loras.append(lora) - if self.clip is not None: - if output.clip is None: - output.clip = self.clip.model_copy(deep=True) + if self.clip is not None and output.clip is not None: output.clip.loras.append(lora) - if self.t5_encoder is not None: - if output.t5_encoder is None: - output.t5_encoder = self.t5_encoder.model_copy(deep=True) + if self.t5_encoder is not None and output.t5_encoder is None: output.t5_encoder.loras.append(lora) return output diff --git a/invokeai/app/invocations/model.py b/invokeai/app/invocations/model.py index cdb4b5ccca6..a893d13fdfd 100644 --- a/invokeai/app/invocations/model.py +++ b/invokeai/app/invocations/model.py @@ -206,7 +206,7 @@ def invoke(self, context: InvocationContext) -> LoRALoaderOutput: lora_key = self.lora.key if not context.models.exists(lora_key): - raise Exception(f"Unkown lora: {lora_key}!") + raise Exception(f"Unknown lora: {lora_key}!") if self.unet is not None and any(lora.lora.key == lora_key for lora in self.unet.loras): raise Exception(f'LoRA "{lora_key}" already applied to unet') @@ -257,12 +257,12 @@ def invoke(self, context: InvocationContext) -> LoRASelectorOutput: return LoRASelectorOutput(lora=LoRAField(lora=self.lora, weight=self.weight)) -@invocation("lora_collection_loader", title="LoRA Collection Loader", tags=["model"], category="model", version="1.0.0") +@invocation("lora_collection_loader", title="LoRA Collection Loader", tags=["model"], category="model", version="1.1.0") class LoRACollectionLoader(BaseInvocation): """Applies a collection of LoRAs to the provided UNet and CLIP models.""" - loras: LoRAField | list[LoRAField] = InputField( - description="LoRA models and weights. May be a single LoRA or collection.", title="LoRAs" + loras: Optional[LoRAField | list[LoRAField]] = InputField( + default=None, description="LoRA models and weights. May be a single LoRA or collection.", title="LoRAs" ) unet: Optional[UNetField] = InputField( default=None, @@ -282,7 +282,15 @@ def invoke(self, context: InvocationContext) -> LoRALoaderOutput: loras = self.loras if isinstance(self.loras, list) else [self.loras] added_loras: list[str] = [] + if self.unet is not None: + output.unet = self.unet.model_copy(deep=True) + if self.clip is not None: + output.clip = self.clip.model_copy(deep=True) + for lora in loras: + if lora is None: + continue + assert type(lora) is LoRAField if lora.lora.key in added_loras: continue @@ -293,14 +301,10 @@ def invoke(self, context: InvocationContext) -> LoRALoaderOutput: added_loras.append(lora.lora.key) - if self.unet is not None: - if output.unet is None: - output.unet = self.unet.model_copy(deep=True) + if self.unet is not None and output.unet is not None: output.unet.loras.append(lora) - if self.clip is not None: - if output.clip is None: - output.clip = self.clip.model_copy(deep=True) + if self.clip is not None and output.clip is not None: output.clip.loras.append(lora) return output @@ -400,13 +404,13 @@ def invoke(self, context: InvocationContext) -> SDXLLoRALoaderOutput: title="SDXL LoRA Collection Loader", tags=["model"], category="model", - version="1.0.0", + version="1.1.0", ) class SDXLLoRACollectionLoader(BaseInvocation): """Applies a collection of SDXL LoRAs to the provided UNet and CLIP models.""" - loras: LoRAField | list[LoRAField] = InputField( - description="LoRA models and weights. May be a single LoRA or collection.", title="LoRAs" + loras: Optional[LoRAField | list[LoRAField]] = InputField( + default=None, description="LoRA models and weights. May be a single LoRA or collection.", title="LoRAs" ) unet: Optional[UNetField] = InputField( default=None, @@ -432,7 +436,19 @@ def invoke(self, context: InvocationContext) -> SDXLLoRALoaderOutput: loras = self.loras if isinstance(self.loras, list) else [self.loras] added_loras: list[str] = [] + if self.unet is not None: + output.unet = self.unet.model_copy(deep=True) + + if self.clip is not None: + output.clip = self.clip.model_copy(deep=True) + + if self.clip2 is not None: + output.clip2 = self.clip2.model_copy(deep=True) + for lora in loras: + if lora is None: + continue + assert type(lora) is LoRAField if lora.lora.key in added_loras: continue @@ -443,19 +459,13 @@ def invoke(self, context: InvocationContext) -> SDXLLoRALoaderOutput: added_loras.append(lora.lora.key) - if self.unet is not None: - if output.unet is None: - output.unet = self.unet.model_copy(deep=True) + if self.unet is not None and output.unet is not None: output.unet.loras.append(lora) - if self.clip is not None: - if output.clip is None: - output.clip = self.clip.model_copy(deep=True) + if self.clip is not None and output.clip is not None: output.clip.loras.append(lora) - if self.clip2 is not None: - if output.clip2 is None: - output.clip2 = self.clip2.model_copy(deep=True) + if self.clip2 is not None and output.clip2 is not None: output.clip2.loras.append(lora) return output @@ -473,7 +483,7 @@ def invoke(self, context: InvocationContext) -> VAEOutput: key = self.vae_model.key if not context.models.exists(key): - raise Exception(f"Unkown vae: {key}!") + raise Exception(f"Unknown vae: {key}!") return VAEOutput(vae=VAEField(vae=self.vae_model)) diff --git a/invokeai/frontend/web/src/services/api/schema.ts b/invokeai/frontend/web/src/services/api/schema.ts index c8a506a370e..174e937421d 100644 --- a/invokeai/frontend/web/src/services/api/schema.ts +++ b/invokeai/frontend/web/src/services/api/schema.ts @@ -6201,7 +6201,7 @@ export type components = { * @description LoRA models and weights. May be a single LoRA or collection. * @default null */ - loras?: components["schemas"]["LoRAField"] | components["schemas"]["LoRAField"][]; + loras?: components["schemas"]["LoRAField"] | components["schemas"]["LoRAField"][] | null; /** * Transformer * @description Transformer @@ -12206,7 +12206,7 @@ export type components = { * @description LoRA models and weights. May be a single LoRA or collection. * @default null */ - loras?: components["schemas"]["LoRAField"] | components["schemas"]["LoRAField"][]; + loras?: components["schemas"]["LoRAField"] | components["schemas"]["LoRAField"][] | null; /** * UNet * @description UNet (scheduler, LoRAs) @@ -16054,7 +16054,7 @@ export type components = { * @description LoRA models and weights. May be a single LoRA or collection. * @default null */ - loras?: components["schemas"]["LoRAField"] | components["schemas"]["LoRAField"][]; + loras?: components["schemas"]["LoRAField"] | components["schemas"]["LoRAField"][] | null; /** * UNet * @description UNet (scheduler, LoRAs)