2022-09-19 16:13:12 -06:00
import os
2023-01-22 00:17:12 -07:00
import re
2023-01-10 23:10:07 -07:00
import shutil
2023-04-02 16:41:55 -06:00
import json
2022-09-19 16:13:12 -06:00
2022-09-11 02:31:16 -06:00
2022-09-25 17:22:12 -06:00
import torch
2022-09-27 01:44:00 -06:00
import tqdm
2022-09-25 17:22:12 -06:00
2023-07-31 23:27:54 -06:00
from modules import shared , images , sd_models , sd_vae , sd_models_config , errors
2023-01-23 04:50:20 -07:00
from modules . ui_common import plaintext_to_html
2022-09-28 15:59:44 -06:00
import gradio as gr
2022-11-27 05:51:29 -07:00
import safetensors . torch
2022-09-13 10:23:55 -06:00
2022-09-11 02:31:16 -06:00
2022-09-17 00:07:07 -06:00
def run_pnginfo ( image ) :
2022-09-19 11:18:16 -06:00
if image is None :
return ' ' , ' ' , ' '
2022-11-27 06:28:32 -07:00
geninfo , items = images . read_info_from_image ( image )
items = { * * { ' parameters ' : geninfo } , * * items }
2022-11-23 19:39:09 -07:00
2022-09-11 02:31:16 -06:00
info = ' '
2022-09-13 10:23:55 -06:00
for key , text in items . items ( ) :
2022-09-11 02:31:16 -06:00
info + = f """
< div >
< p > < b > { plaintext_to_html ( str ( key ) ) } < / b > < / p >
< p > { plaintext_to_html ( str ( text ) ) } < / p >
< / div >
""" .strip()+ " \n "
if len ( info ) == 0 :
message = " Nothing found in the image. "
info = f " <div><p> { message } <p></div> "
2022-09-23 13:49:21 -06:00
return ' ' , geninfo , info
2022-09-25 17:22:12 -06:00
2023-01-10 23:10:07 -07:00
def create_config ( ckpt_result , config_source , a , b , c ) :
def config ( x ) :
2023-01-27 12:43:08 -07:00
res = sd_models_config . find_checkpoint_config_near_filename ( x ) if x else None
2023-01-19 00:39:51 -07:00
return res if res != shared . sd_default_config else None
2023-01-10 23:10:07 -07:00
if config_source == 0 :
cfg = config ( a ) or config ( b ) or config ( c )
elif config_source == 1 :
cfg = config ( b )
elif config_source == 2 :
cfg = config ( c )
else :
cfg = None
if cfg is None :
return
filename , _ = os . path . splitext ( ckpt_result )
checkpoint_filename = filename + " .yaml "
print ( " Copying config: " )
print ( " from: " , cfg )
print ( " to: " , checkpoint_filename )
shutil . copyfile ( cfg , checkpoint_filename )
2023-01-19 08:24:17 -07:00
checkpoint_dict_skip_on_merge = [ " cond_stage_model.transformer.text_model.embeddings.position_ids " ]
2023-01-19 00:39:51 -07:00
2023-01-19 02:12:09 -07:00
def to_half ( tensor , enable ) :
if enable and tensor . dtype == torch . float :
return tensor . half ( )
return tensor
2023-07-31 23:27:54 -06:00
def read_metadata ( primary_model_name , secondary_model_name , tertiary_model_name ) :
metadata = { }
for checkpoint_name in [ primary_model_name , secondary_model_name , tertiary_model_name ] :
checkpoint_info = sd_models . checkpoints_list . get ( checkpoint_name , None )
if checkpoint_info is None :
continue
metadata . update ( checkpoint_info . metadata )
return json . dumps ( metadata , indent = 4 , ensure_ascii = False )
def run_modelmerger ( id_task , primary_model_name , secondary_model_name , tertiary_model_name , interp_method , multiplier , save_as_half , custom_name , checkpoint_format , config_source , bake_in_vae , discard_weights , save_metadata , add_merge_recipe , copy_metadata_fields , metadata_json ) :
2023-06-30 04:11:31 -06:00
shared . state . begin ( job = " model-merge " )
2023-01-03 08:21:51 -07:00
2023-01-18 22:53:50 -07:00
def fail ( message ) :
shared . state . textinfo = message
shared . state . end ( )
2023-01-18 23:25:37 -07:00
return [ * [ gr . update ( ) for _ in range ( 4 ) ] , message ]
2023-01-18 22:53:50 -07:00
2022-10-16 16:44:39 -06:00
def weighted_sum ( theta0 , theta1 , alpha ) :
2022-09-26 08:50:21 -06:00
return ( ( 1 - alpha ) * theta0 ) + ( alpha * theta1 )
2022-10-16 16:44:39 -06:00
def get_difference ( theta1 , theta2 ) :
return theta1 - theta2
def add_difference ( theta0 , theta1_2_diff , alpha ) :
return theta0 + ( alpha * theta1_2_diff )
2022-10-14 00:05:06 -06:00
2023-01-19 08:24:17 -07:00
def filename_weighted_sum ( ) :
2023-01-19 00:39:51 -07:00
a = primary_model_info . model_name
b = secondary_model_info . model_name
Ma = round ( 1 - multiplier , 2 )
Mb = round ( multiplier , 2 )
return f " { Ma } ( { a } ) + { Mb } ( { b } ) "
2023-01-19 08:24:17 -07:00
def filename_add_difference ( ) :
2023-01-19 00:39:51 -07:00
a = primary_model_info . model_name
b = secondary_model_info . model_name
c = tertiary_model_info . model_name
M = round ( multiplier , 2 )
return f " { a } + { M } ( { b } - { c } ) "
def filename_nothing ( ) :
return primary_model_info . model_name
theta_funcs = {
2023-01-19 08:24:17 -07:00
" Weighted sum " : ( filename_weighted_sum , None , weighted_sum ) ,
" Add difference " : ( filename_add_difference , get_difference , add_difference ) ,
2023-01-19 00:39:51 -07:00
" No interpolation " : ( filename_nothing , None , None ) ,
}
filename_generator , theta_func1 , theta_func2 = theta_funcs [ interp_method ]
shared . state . job_count = ( 1 if theta_func1 else 0 ) + ( 1 if theta_func2 else 0 )
2023-01-18 17:13:15 -07:00
if not primary_model_name :
2023-01-18 22:53:50 -07:00
return fail ( " Failed: Merging requires a primary model. " )
2023-01-18 17:13:15 -07:00
2022-09-28 15:59:44 -06:00
primary_model_info = sd_models . checkpoints_list [ primary_model_name ]
2023-01-18 17:13:15 -07:00
2023-01-19 00:39:51 -07:00
if theta_func2 and not secondary_model_name :
2023-01-18 22:53:50 -07:00
return fail ( " Failed: Merging requires a secondary model. " )
2022-09-27 01:44:00 -06:00
2023-01-19 00:39:51 -07:00
secondary_model_info = sd_models . checkpoints_list [ secondary_model_name ] if theta_func2 else None
2022-09-27 01:44:00 -06:00
2023-01-18 17:13:15 -07:00
if theta_func1 and not tertiary_model_name :
2023-01-18 22:53:50 -07:00
return fail ( f " Failed: Interpolation method ( { interp_method } ) requires a tertiary model. " )
2023-01-19 00:39:51 -07:00
2023-01-18 19:21:52 -07:00
tertiary_model_info = sd_models . checkpoints_list [ tertiary_model_name ] if theta_func1 else None
2023-01-18 17:13:15 -07:00
result_is_inpainting_model = False
2023-01-26 04:05:40 -07:00
result_is_instruct_pix2pix_model = False
2022-12-03 23:13:36 -07:00
2023-01-19 00:39:51 -07:00
if theta_func2 :
2023-05-09 22:52:45 -06:00
shared . state . textinfo = " Loading B "
2023-01-19 00:39:51 -07:00
print ( f " Loading { secondary_model_info . filename } ... " )
theta_1 = sd_models . read_state_dict ( secondary_model_info . filename , map_location = ' cpu ' )
else :
theta_1 = None
2022-10-14 00:05:06 -06:00
2022-10-16 16:44:39 -06:00
if theta_func1 :
2023-05-09 22:52:45 -06:00
shared . state . textinfo = " Loading C "
2022-12-03 23:13:36 -07:00
print ( f " Loading { tertiary_model_info . filename } ... " )
theta_2 = sd_models . read_state_dict ( tertiary_model_info . filename , map_location = ' cpu ' )
2023-01-19 00:39:51 -07:00
shared . state . textinfo = ' Merging B and C '
2023-01-18 23:25:37 -07:00
shared . state . sampling_steps = len ( theta_1 . keys ( ) )
2022-10-16 16:44:39 -06:00
for key in tqdm . tqdm ( theta_1 . keys ( ) ) :
2023-01-19 08:24:17 -07:00
if key in checkpoint_dict_skip_on_merge :
2023-01-19 00:39:51 -07:00
continue
2022-10-16 16:44:39 -06:00
if ' model ' in key :
2022-10-18 06:33:24 -06:00
if key in theta_2 :
t2 = theta_2 . get ( key , torch . zeros_like ( theta_1 [ key ] ) )
theta_1 [ key ] = theta_func1 ( theta_1 [ key ] , t2 )
else :
2022-10-18 07:05:52 -06:00
theta_1 [ key ] = torch . zeros_like ( theta_1 [ key ] )
2023-01-18 23:25:37 -07:00
shared . state . sampling_step + = 1
2022-12-03 23:13:36 -07:00
del theta_2
2023-01-18 23:25:37 -07:00
shared . state . nextjob ( )
2023-01-03 08:21:51 -07:00
shared . state . textinfo = f " Loading { primary_model_info . filename } ... "
2022-12-03 23:13:36 -07:00
print ( f " Loading { primary_model_info . filename } ... " )
theta_0 = sd_models . read_state_dict ( primary_model_info . filename , map_location = ' cpu ' )
print ( " Merging... " )
2023-01-19 00:39:51 -07:00
shared . state . textinfo = ' Merging A and B '
2023-01-18 23:25:37 -07:00
shared . state . sampling_steps = len ( theta_0 . keys ( ) )
2022-09-27 01:44:00 -06:00
for key in tqdm . tqdm ( theta_0 . keys ( ) ) :
2023-01-19 00:39:51 -07:00
if theta_1 and ' model ' in key and key in theta_1 :
2023-01-14 04:00:00 -07:00
2023-01-19 08:24:17 -07:00
if key in checkpoint_dict_skip_on_merge :
2023-01-14 04:00:00 -07:00
continue
2022-12-04 02:30:44 -07:00
a = theta_0 [ key ]
b = theta_1 [ key ]
2022-10-14 12:20:28 -06:00
2022-12-04 02:30:44 -07:00
# this enables merging an inpainting model (A) with another one (B);
# where normal model would have 4 channels, for latenst space, inpainting model would
# have another 4 channels for unmasked picture's latent space, plus one channel for mask, for a total of 9
if a . shape != b . shape and a . shape [ 0 : 1 ] + a . shape [ 2 : ] == b . shape [ 0 : 1 ] + b . shape [ 2 : ] :
if a . shape [ 1 ] == 4 and b . shape [ 1 ] == 9 :
raise RuntimeError ( " When merging inpainting model with a normal one, A must be the inpainting model. " )
2023-01-26 02:38:04 -07:00
if a . shape [ 1 ] == 4 and b . shape [ 1 ] == 8 :
2023-01-26 04:05:40 -07:00
raise RuntimeError ( " When merging instruct-pix2pix model with a normal one, A must be the instruct-pix2pix model. " )
2022-12-04 02:30:44 -07:00
2023-01-26 04:05:40 -07:00
if a . shape [ 1 ] == 8 and b . shape [ 1 ] == 4 : #If we have an Instruct-Pix2Pix model...
2023-01-26 01:45:16 -07:00
theta_0 [ key ] [ : , 0 : 4 , : , : ] = theta_func2 ( a [ : , 0 : 4 , : , : ] , b , multiplier ) #Merge only the vectors the models have in common. Otherwise we get an error due to dimension mismatch.
2023-01-26 04:05:40 -07:00
result_is_instruct_pix2pix_model = True
2023-01-26 01:45:16 -07:00
else :
assert a . shape [ 1 ] == 9 and b . shape [ 1 ] == 4 , f " Bad dimensions for merged layer { key } : A= { a . shape } , B= { b . shape } "
theta_0 [ key ] [ : , 0 : 4 , : , : ] = theta_func2 ( a [ : , 0 : 4 , : , : ] , b , multiplier )
result_is_inpainting_model = True
2022-12-04 02:30:44 -07:00
else :
theta_0 [ key ] = theta_func2 ( a , b , multiplier )
2023-05-11 09:28:15 -06:00
2023-01-19 02:12:09 -07:00
theta_0 [ key ] = to_half ( theta_0 [ key ] , save_as_half )
2022-10-09 19:26:52 -06:00
2023-01-18 23:25:37 -07:00
shared . state . sampling_step + = 1
2023-01-19 00:39:51 -07:00
del theta_1
bake_in_vae_filename = sd_vae . vae_dict . get ( bake_in_vae , None )
if bake_in_vae_filename is not None :
print ( f " Baking in VAE from { bake_in_vae_filename } " )
shared . state . textinfo = ' Baking in VAE '
vae_dict = sd_vae . load_vae_dict ( bake_in_vae_filename , map_location = ' cpu ' )
2023-01-14 04:00:00 -07:00
2023-01-19 00:39:51 -07:00
for key in vae_dict . keys ( ) :
theta_0_key = ' first_stage_model. ' + key
if theta_0_key in theta_0 :
2023-01-19 02:12:09 -07:00
theta_0 [ theta_0_key ] = to_half ( vae_dict [ key ] , save_as_half )
2023-01-14 04:00:00 -07:00
2023-01-19 00:39:51 -07:00
del vae_dict
2022-09-27 01:44:00 -06:00
2023-01-19 02:12:09 -07:00
if save_as_half and not theta_func2 :
for key in theta_0 . keys ( ) :
theta_0 [ key ] = to_half ( theta_0 [ key ] , save_as_half )
2023-01-22 00:17:12 -07:00
if discard_weights :
regex = re . compile ( discard_weights )
for key in list ( theta_0 ) :
if re . search ( regex , key ) :
theta_0 . pop ( key , None )
2022-09-30 13:57:25 -06:00
ckpt_dir = shared . cmd_opts . ckpt_dir or sd_models . model_path
2023-01-19 00:39:51 -07:00
filename = filename_generator ( ) if custom_name == ' ' else custom_name
filename + = " .inpainting " if result_is_inpainting_model else " "
2023-01-26 09:27:07 -07:00
filename + = " .instruct-pix2pix " if result_is_instruct_pix2pix_model else " "
2023-01-19 00:39:51 -07:00
filename + = " . " + checkpoint_format
2022-12-04 02:30:44 -07:00
2022-09-30 13:57:25 -06:00
output_modelname = os . path . join ( ckpt_dir , filename )
2022-09-28 15:21:54 -06:00
2023-01-18 23:25:37 -07:00
shared . state . nextjob ( )
2023-01-19 00:39:51 -07:00
shared . state . textinfo = " Saving "
2022-09-27 01:44:00 -06:00
print ( f " Saving to { output_modelname } ... " )
2022-11-27 05:51:29 -07:00
2023-07-31 23:27:54 -06:00
metadata = { }
if save_metadata and copy_metadata_fields :
if primary_model_info :
metadata . update ( primary_model_info . metadata )
if secondary_model_info :
metadata . update ( secondary_model_info . metadata )
if tertiary_model_info :
metadata . update ( tertiary_model_info . metadata )
2023-04-02 16:41:55 -06:00
if save_metadata :
2023-07-31 23:27:54 -06:00
try :
metadata . update ( json . loads ( metadata_json ) )
except Exception as e :
errors . display ( e , " readin metadata from json " )
metadata [ " format " ] = " pt "
2023-05-17 08:44:07 -06:00
2023-07-31 23:27:54 -06:00
if save_metadata and add_merge_recipe :
2023-04-02 16:41:55 -06:00
merge_recipe = {
2023-04-02 17:40:33 -06:00
" type " : " webui " , # indicate this model was merged with webui's built-in merger
2023-04-02 16:41:55 -06:00
" primary_model_hash " : primary_model_info . sha256 ,
" secondary_model_hash " : secondary_model_info . sha256 if secondary_model_info else None ,
" tertiary_model_hash " : tertiary_model_info . sha256 if tertiary_model_info else None ,
" interp_method " : interp_method ,
" multiplier " : multiplier ,
" save_as_half " : save_as_half ,
" custom_name " : custom_name ,
" config_source " : config_source ,
" bake_in_vae " : bake_in_vae ,
" discard_weights " : discard_weights ,
" is_inpainting " : result_is_inpainting_model ,
" is_instruct_pix2pix " : result_is_instruct_pix2pix_model
}
2023-05-17 08:44:07 -06:00
sd_merge_models = { }
2023-04-02 16:41:55 -06:00
def add_model_metadata ( checkpoint_info ) :
2023-04-02 18:06:39 -06:00
checkpoint_info . calculate_shorthash ( )
2023-05-17 08:44:07 -06:00
sd_merge_models [ checkpoint_info . sha256 ] = {
2023-04-02 16:41:55 -06:00
" name " : checkpoint_info . name ,
" legacy_hash " : checkpoint_info . hash ,
2023-04-02 20:41:23 -06:00
" sd_merge_recipe " : checkpoint_info . metadata . get ( " sd_merge_recipe " , None )
2023-04-02 16:41:55 -06:00
}
2023-05-17 08:44:07 -06:00
sd_merge_models . update ( checkpoint_info . metadata . get ( " sd_merge_models " , { } ) )
2023-04-02 16:41:55 -06:00
add_model_metadata ( primary_model_info )
if secondary_model_info :
add_model_metadata ( secondary_model_info )
if tertiary_model_info :
add_model_metadata ( tertiary_model_info )
2023-07-31 23:27:54 -06:00
metadata [ " sd_merge_recipe " ] = json . dumps ( merge_recipe )
2023-05-17 08:44:07 -06:00
metadata [ " sd_merge_models " ] = json . dumps ( sd_merge_models )
2023-04-02 16:41:55 -06:00
2022-11-27 05:51:29 -07:00
_ , extension = os . path . splitext ( output_modelname )
if extension . lower ( ) == " .safetensors " :
2023-07-31 23:27:54 -06:00
safetensors . torch . save_file ( theta_0 , output_modelname , metadata = metadata if len ( metadata ) > 0 else None )
2022-11-27 05:51:29 -07:00
else :
torch . save ( theta_0 , output_modelname )
2022-09-27 01:44:00 -06:00
2022-09-28 15:59:44 -06:00
sd_models . list_models ( )
2023-04-02 16:41:55 -06:00
created_model = next ( ( ckpt for ckpt in sd_models . checkpoints_list . values ( ) if ckpt . name == filename ) , None )
if created_model :
created_model . calculate_shorthash ( )
2022-09-28 15:59:44 -06:00
2023-01-10 23:10:07 -07:00
create_config ( output_modelname , config_source , primary_model_info , secondary_model_info , tertiary_model_info )
2023-01-19 00:39:51 -07:00
print ( f " Checkpoint saved to { output_modelname } . " )
shared . state . textinfo = " Checkpoint saved "
2023-01-03 08:21:51 -07:00
shared . state . end ( )
2023-01-18 23:25:37 -07:00
return [ * [ gr . Dropdown . update ( choices = sd_models . checkpoint_tiles ( ) ) for _ in range ( 4 ) ] , " Checkpoint saved to " + output_modelname ]