2023-05-19 13:59:29 -06:00
import re
2023-01-21 06:15:53 -07:00
import torch
2023-01-28 07:18:47 -07:00
import gradio as gr
2023-05-09 02:25:46 -06:00
from fastapi import FastAPI
2023-01-21 06:15:53 -07:00
2023-07-16 14:13:55 -06:00
import network
import networks
2023-07-17 00:05:18 -06:00
import lora # noqa:F401
2023-01-21 06:15:53 -07:00
import extra_networks_lora
import ui_extra_networks_lora
2023-01-25 01:29:46 -07:00
from modules import script_callbacks , ui_extra_networks , extra_networks , shared
2023-01-21 06:15:53 -07:00
def unload ( ) :
2023-07-16 14:13:55 -06:00
torch . nn . Linear . forward = torch . nn . Linear_forward_before_network
torch . nn . Linear . _load_from_state_dict = torch . nn . Linear_load_state_dict_before_network
torch . nn . Conv2d . forward = torch . nn . Conv2d_forward_before_network
torch . nn . Conv2d . _load_from_state_dict = torch . nn . Conv2d_load_state_dict_before_network
torch . nn . MultiheadAttention . forward = torch . nn . MultiheadAttention_forward_before_network
torch . nn . MultiheadAttention . _load_from_state_dict = torch . nn . MultiheadAttention_load_state_dict_before_network
2023-01-21 06:15:53 -07:00
def before_ui ( ) :
ui_extra_networks . register_page ( ui_extra_networks_lora . ExtraNetworksPageLora ( ) )
2023-07-17 01:06:02 -06:00
2023-08-13 06:07:37 -06:00
networks . extra_network_lora = extra_networks_lora . ExtraNetworkLora ( )
extra_networks . register_extra_network ( networks . extra_network_lora )
extra_networks . register_extra_network_alias ( networks . extra_network_lora , " lyco " )
2023-01-21 06:15:53 -07:00
2023-07-16 14:13:55 -06:00
if not hasattr ( torch . nn , ' Linear_forward_before_network ' ) :
torch . nn . Linear_forward_before_network = torch . nn . Linear . forward
2023-01-21 06:15:53 -07:00
2023-07-16 14:13:55 -06:00
if not hasattr ( torch . nn , ' Linear_load_state_dict_before_network ' ) :
torch . nn . Linear_load_state_dict_before_network = torch . nn . Linear . _load_from_state_dict
2023-03-25 14:06:33 -06:00
2023-07-16 14:13:55 -06:00
if not hasattr ( torch . nn , ' Conv2d_forward_before_network ' ) :
torch . nn . Conv2d_forward_before_network = torch . nn . Conv2d . forward
2023-01-21 06:15:53 -07:00
2023-07-16 14:13:55 -06:00
if not hasattr ( torch . nn , ' Conv2d_load_state_dict_before_network ' ) :
torch . nn . Conv2d_load_state_dict_before_network = torch . nn . Conv2d . _load_from_state_dict
2023-03-25 14:06:33 -06:00
2023-08-12 12:27:39 -06:00
if not hasattr ( torch . nn , ' GroupNorm_forward_before_network ' ) :
torch . nn . GroupNorm_forward_before_network = torch . nn . GroupNorm . forward
if not hasattr ( torch . nn , ' GroupNorm_load_state_dict_before_network ' ) :
torch . nn . GroupNorm_load_state_dict_before_network = torch . nn . GroupNorm . _load_from_state_dict
if not hasattr ( torch . nn , ' LayerNorm_forward_before_network ' ) :
torch . nn . LayerNorm_forward_before_network = torch . nn . LayerNorm . forward
if not hasattr ( torch . nn , ' LayerNorm_load_state_dict_before_network ' ) :
torch . nn . LayerNorm_load_state_dict_before_network = torch . nn . LayerNorm . _load_from_state_dict
2023-07-16 14:13:55 -06:00
if not hasattr ( torch . nn , ' MultiheadAttention_forward_before_network ' ) :
torch . nn . MultiheadAttention_forward_before_network = torch . nn . MultiheadAttention . forward
2023-03-26 01:44:20 -06:00
2023-07-16 14:13:55 -06:00
if not hasattr ( torch . nn , ' MultiheadAttention_load_state_dict_before_network ' ) :
torch . nn . MultiheadAttention_load_state_dict_before_network = torch . nn . MultiheadAttention . _load_from_state_dict
2023-03-26 01:44:20 -06:00
2023-07-16 14:13:55 -06:00
torch . nn . Linear . forward = networks . network_Linear_forward
torch . nn . Linear . _load_from_state_dict = networks . network_Linear_load_state_dict
torch . nn . Conv2d . forward = networks . network_Conv2d_forward
torch . nn . Conv2d . _load_from_state_dict = networks . network_Conv2d_load_state_dict
2023-08-12 12:27:39 -06:00
torch . nn . GroupNorm . forward = networks . network_GroupNorm_forward
torch . nn . GroupNorm . _load_from_state_dict = networks . network_GroupNorm_load_state_dict
torch . nn . LayerNorm . forward = networks . network_LayerNorm_forward
torch . nn . LayerNorm . _load_from_state_dict = networks . network_LayerNorm_load_state_dict
2023-07-16 14:13:55 -06:00
torch . nn . MultiheadAttention . forward = networks . network_MultiheadAttention_forward
torch . nn . MultiheadAttention . _load_from_state_dict = networks . network_MultiheadAttention_load_state_dict
2023-01-21 06:15:53 -07:00
2023-07-16 14:13:55 -06:00
script_callbacks . on_model_loaded ( networks . assign_network_names_to_compvis_modules )
2023-01-21 06:15:53 -07:00
script_callbacks . on_script_unloaded ( unload )
script_callbacks . on_before_ui ( before_ui )
2023-07-16 14:13:55 -06:00
script_callbacks . on_infotext_pasted ( networks . infotext_pasted )
2023-01-25 01:29:46 -07:00
shared . options_templates . update ( shared . options_section ( ( ' extra_networks ' , " Extra Networks " ) , {
2023-07-16 14:13:55 -06:00
" sd_lora " : shared . OptionInfo ( " None " , " Add network to prompt " , gr . Dropdown , lambda : { " choices " : [ " None " , * networks . available_networks ] } , refresh = networks . list_available_networks ) ,
2023-05-19 13:59:29 -06:00
" lora_preferred_name " : shared . OptionInfo ( " Alias from file " , " When adding to prompt, refer to Lora by " , gr . Radio , { " choices " : [ " Alias from file " , " Filename " ] } ) ,
" lora_add_hashes_to_infotext " : shared . OptionInfo ( True , " Add Lora hashes to infotext " ) ,
2023-07-17 09:56:14 -06:00
" lora_show_all " : shared . OptionInfo ( False , " Always show all networks on the Lora page " ) . info ( " otherwise, those detected as for incompatible version of Stable Diffusion will be hidden " ) ,
" lora_hide_unknown_for_versions " : shared . OptionInfo ( [ ] , " Hide networks of unknown versions for model versions " , gr . CheckboxGroup , { " choices " : [ " SD1 " , " SD2 " , " SDXL " ] } ) ,
2023-08-09 07:54:49 -06:00
" lora_in_memory_limit " : shared . OptionInfo ( 0 , " Number of Lora networks to keep cached in memory " , gr . Number , { " precision " : 0 } ) ,
2023-01-25 01:29:46 -07:00
} ) )
2023-05-08 03:07:43 -06:00
shared . options_templates . update ( shared . options_section ( ( ' compatibility ' , " Compatibility " ) , {
2023-07-16 14:13:55 -06:00
" lora_functional " : shared . OptionInfo ( False , " Lora/Networks: use old method that takes longer when you have multiple Loras active and produces same results as kohya-ss/sd-webui-additional-networks extension " ) ,
2023-05-08 03:07:43 -06:00
} ) )
2023-05-09 02:25:46 -06:00
2023-07-16 14:13:55 -06:00
def create_lora_json ( obj : network . NetworkOnDisk ) :
2023-05-09 02:25:46 -06:00
return {
" name " : obj . name ,
" alias " : obj . alias ,
" path " : obj . filename ,
" metadata " : obj . metadata ,
}
2023-07-16 14:13:55 -06:00
def api_networks ( _ : gr . Blocks , app : FastAPI ) :
2023-05-09 02:25:46 -06:00
@app.get ( " /sdapi/v1/loras " )
async def get_loras ( ) :
2023-07-16 14:13:55 -06:00
return [ create_lora_json ( obj ) for obj in networks . available_networks . values ( ) ]
2023-05-19 03:37:34 -06:00
2023-05-18 15:12:01 -06:00
@app.post ( " /sdapi/v1/refresh-loras " )
async def refresh_loras ( ) :
2023-07-16 14:13:55 -06:00
return networks . list_available_networks ( )
2023-05-09 02:25:46 -06:00
2023-07-16 14:13:55 -06:00
script_callbacks . on_app_started ( api_networks )
2023-05-09 02:25:46 -06:00
2023-05-19 13:59:29 -06:00
re_lora = re . compile ( " <lora:([^:]+): " )
def infotext_pasted ( infotext , d ) :
hashes = d . get ( " Lora hashes " )
if not hashes :
return
hashes = [ x . strip ( ) . split ( ' : ' , 1 ) for x in hashes . split ( " , " ) ]
hashes = { x [ 0 ] . strip ( ) . replace ( " , " , " " ) : x [ 1 ] . strip ( ) for x in hashes }
2023-07-16 14:13:55 -06:00
def network_replacement ( m ) :
2023-05-19 13:59:29 -06:00
alias = m . group ( 1 )
shorthash = hashes . get ( alias )
if shorthash is None :
return m . group ( 0 )
2023-07-16 14:13:55 -06:00
network_on_disk = networks . available_network_hash_lookup . get ( shorthash )
if network_on_disk is None :
2023-05-19 13:59:29 -06:00
return m . group ( 0 )
2023-07-16 14:13:55 -06:00
return f ' <lora: { network_on_disk . get_alias ( ) } : '
2023-05-19 13:59:29 -06:00
2023-07-16 14:13:55 -06:00
d [ " Prompt " ] = re . sub ( re_lora , network_replacement , d [ " Prompt " ] )
2023-05-19 13:59:29 -06:00
script_callbacks . on_infotext_pasted ( infotext_pasted )
2023-08-09 07:54:49 -06:00
shared . opts . onchange ( " lora_in_memory_limit " , networks . purge_networks_from_memory )