Skip to content

Commit

Permalink
Merge pull request #79 from noskill/flux1
Browse files Browse the repository at this point in the history
fix attention kwargs for flux
  • Loading branch information
Necr0x0Der authored Aug 27, 2024
2 parents 01e3b03 + 8c86566 commit 53af130
Showing 1 changed file with 7 additions and 3 deletions.
10 changes: 7 additions & 3 deletions multigen/pipes.py
Original file line number Diff line number Diff line change
Expand Up @@ -197,9 +197,13 @@ def try_set_scheduler(self, inputs):

def load_lora(self, path, multiplier=1.0):
self.pipe.load_lora_weights(path)
if 'cross_attention_kwargs' not in self.pipe_params:
self.pipe_params['cross_attention_kwargs'] = {}
self.pipe_params['cross_attention_kwargs']["scale"] = multiplier
if self.model_type == ModelType.FLUX:
attention_kw = 'joint_attention_kwargs'
else:
attention_kw = 'cross_attention_kwargs'
if attention_kw not in self.pipe_params:
self.pipe_params[attention_kw] = {}
self.pipe_params[attention_kw]["scale"] = multiplier
self._loras.append(path)

def add_hypernet(self, path, multiplier=None):
Expand Down

0 comments on commit 53af130

Please sign in to comment.