Skip to content

Commit c85d0a9

Browse files
committed
small fixes
1 parent ad351b3 commit c85d0a9

File tree

2 files changed

+3
-1
lines changed

2 files changed

+3
-1
lines changed

backends/model_converter/convert_model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@
3333
extra_keys = ['temb_coefficients_fp32' , 'causal_mask' , 'aux_output_conv.weight' , 'aux_output_conv.bias', 'alphas_cumprod']
3434

3535
for k in torch_weights['state_dict']:
36-
if k not in SD_SHAPES:
36+
if k not in SD_SHAPES and k not in extra_keys:
3737
continue
3838
np_arr = torch_weights['state_dict'][k]
3939
key_bytes = np_arr.tobytes()

electron_app/src/StableDiffusion.vue

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -152,11 +152,13 @@ export default {
152152
if(!this.is_input_avail)
153153
return;
154154
let tokens = [49406].concat((get_tokens(prompt_params.prompt))).concat([49407])
155+
tokens.filter(n => n != null && n != undefined)
155156
prompt_params.prompt_tokens = tokens;
156157
157158
if(prompt_params.negative_prompt)
158159
{
159160
let tokens2 = [49406].concat((get_tokens(prompt_params.negative_prompt))).concat([49407])
161+
tokens2.filter(n => n != null && n != undefined)
160162
prompt_params.negative_prompt_tokens = tokens2
161163
}
162164

0 commit comments

Comments
 (0)