Skip to content

Commit

Permalink
trying fix issues: #8 and #11 Not sure if it works
Browse files Browse the repository at this point in the history
  • Loading branch information
a2569875 committed Jun 27, 2023
1 parent dd29e05 commit db3552a
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 1 deletion.
6 changes: 5 additions & 1 deletion composable_lora_function_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,11 @@ def on_enable():
composable_lycoris.backup_MultiheadAttention_forward_before_lyco = torch.nn.MultiheadAttention_forward_before_lyco
if hasattr(torch.nn, 'MultiheadAttention_load_state_dict_before_lyco'):
composable_lycoris.backup_MultiheadAttention_load_state_dict_before_lyco = torch.nn.MultiheadAttention_load_state_dict_before_lyco

if hasattr(composable_lora, 'lyco_notfound'):
if composable_lora.lyco_notfound:
torch.nn.Linear_forward_before_lyco = composable_lora.Linear_forward_before_clora
torch.nn.Conv2d_forward_before_lyco = composable_lora.Conv2d_forward_before_clora
torch.nn.MultiheadAttention_forward_before_lyco = composable_lora.MultiheadAttention_forward_before_clora
torch.nn.Linear.forward = composable_lora.lora_Linear_forward
torch.nn.Conv2d.forward = composable_lora.lora_Conv2d_forward
torch.nn.MultiheadAttention.forward = lycoris.lyco_MultiheadAttention_forward
Expand Down
23 changes: 23 additions & 0 deletions scripts/composable_lora_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,24 @@ def unload():
torch.nn.Conv2d.forward = torch.nn.Conv2d_forward_before_lora
torch.nn.MultiheadAttention.forward = torch.nn.MultiheadAttention_forward_before_lora

if not hasattr(composable_lora, 'Linear_forward_before_clora'):
if hasattr(torch.nn, 'Linear_forward_before_lyco'):
composable_lora.Linear_forward_before_clora = torch.nn.Linear_forward_before_lyco
else:
composable_lora.Linear_forward_before_clora = torch.nn.Linear.forward

if not hasattr(composable_lora, 'Conv2d_forward_before_clora'):
if hasattr(torch.nn, 'Conv2d_forward_before_lyco'):
composable_lora.Conv2d_forward_before_clora = torch.nn.Conv2d_forward_before_lyco
else:
composable_lora.Conv2d_forward_before_clora = torch.nn.Conv2d.forward

if not hasattr(composable_lora, 'MultiheadAttention_forward_before_clora'):
if hasattr(torch.nn, 'MultiheadAttention_forward_before_lyco'):
composable_lora.MultiheadAttention_forward_before_clora = torch.nn.MultiheadAttention_forward_before_lyco
else:
composable_lora.MultiheadAttention_forward_before_clora = torch.nn.MultiheadAttention.forward

if not hasattr(torch.nn, 'Linear_forward_before_lora'):
if hasattr(torch.nn, 'Linear_forward_before_lyco'):
torch.nn.Linear_forward_before_lora = torch.nn.Linear_forward_before_lyco
Expand All @@ -33,6 +51,11 @@ def unload():
else:
torch.nn.MultiheadAttention_forward_before_lora = torch.nn.MultiheadAttention.forward

if hasattr(torch.nn, 'Linear_forward_before_lyco'):
composable_lora.lyco_notfound = False
else:
composable_lora.lyco_notfound = True

torch.nn.Linear.forward = composable_lora.lora_Linear_forward
torch.nn.Conv2d.forward = composable_lora.lora_Conv2d_forward

Expand Down

0 comments on commit db3552a

Please sign in to comment.