mirror of
https://github.com/easydiffusion/easydiffusion.git
synced 2024-11-25 09:44:25 +01:00
Remove unnecessary hotfix
This commit is contained in:
parent
35c75115de
commit
83c34ea52f
@ -35,8 +35,6 @@ call conda activate
|
|||||||
@REM remove the old version of the dev console script, if it's still present
|
@REM remove the old version of the dev console script, if it's still present
|
||||||
if exist "Open Developer Console.cmd" del "Open Developer Console.cmd"
|
if exist "Open Developer Console.cmd" del "Open Developer Console.cmd"
|
||||||
|
|
||||||
@REM @call python -c "import os; import shutil; frm = 'sd-ui-files\\ui\\hotfix\\9c24e6cd9f499d02c4f21a033736dabd365962dc80fe3aeb57a8f85ea45a20a3.26fead7ea4f0f843f6eb4055dfd25693f1a71f3c6871b184042d4b126244e142'; dst = os.path.join(os.path.expanduser('~'), '.cache', 'huggingface', 'transformers', '9c24e6cd9f499d02c4f21a033736dabd365962dc80fe3aeb57a8f85ea45a20a3.26fead7ea4f0f843f6eb4055dfd25693f1a71f3c6871b184042d4b126244e142'); shutil.copyfile(frm, dst) if os.path.exists(dst) else print(''); print('Hotfixed broken JSON file from OpenAI');"
|
|
||||||
|
|
||||||
@rem create the stable-diffusion folder, to work with legacy installations
|
@rem create the stable-diffusion folder, to work with legacy installations
|
||||||
if not exist "stable-diffusion" mkdir stable-diffusion
|
if not exist "stable-diffusion" mkdir stable-diffusion
|
||||||
cd stable-diffusion
|
cd stable-diffusion
|
||||||
|
@ -19,11 +19,6 @@ if [ -e "open_dev_console.sh" ]; then
|
|||||||
rm "open_dev_console.sh"
|
rm "open_dev_console.sh"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# python -c "import os; import shutil; frm = 'sd-ui-files/ui/hotfix/9c24e6cd9f499d02c4f21a033736dabd365962dc80fe3aeb57a8f85ea45a20a3.26fead7ea4f0f843f6eb4055dfd25693f1a71f3c6871b184042d4b126244e142'; dst = os.path.join(os.path.expanduser('~'), '.cache', 'huggingface', 'transformers', '9c24e6cd9f499d02c4f21a033736dabd365962dc80fe3aeb57a8f85ea45a20a3.26fead7ea4f0f843f6eb4055dfd25693f1a71f3c6871b184042d4b126244e142'); shutil.copyfile(frm, dst) if os.path.exists(dst) else print(''); print('Hotfixed broken JSON file from OpenAI');"
|
|
||||||
|
|
||||||
# Caution, this file will make your eyes and brain bleed. It's such an unholy mess.
|
|
||||||
# Note to self: Please rewrite this in Python. For the sake of your own sanity.
|
|
||||||
|
|
||||||
# set the correct installer path (current vs legacy)
|
# set the correct installer path (current vs legacy)
|
||||||
if [ -e "installer_files/env" ]; then
|
if [ -e "installer_files/env" ]; then
|
||||||
export INSTALL_ENV_DIR="$(pwd)/installer_files/env"
|
export INSTALL_ENV_DIR="$(pwd)/installer_files/env"
|
||||||
|
@ -1,171 +0,0 @@
|
|||||||
{
|
|
||||||
"_name_or_path": "clip-vit-large-patch14/",
|
|
||||||
"architectures": [
|
|
||||||
"CLIPModel"
|
|
||||||
],
|
|
||||||
"initializer_factor": 1.0,
|
|
||||||
"logit_scale_init_value": 2.6592,
|
|
||||||
"model_type": "clip",
|
|
||||||
"projection_dim": 768,
|
|
||||||
"text_config": {
|
|
||||||
"_name_or_path": "",
|
|
||||||
"add_cross_attention": false,
|
|
||||||
"architectures": null,
|
|
||||||
"attention_dropout": 0.0,
|
|
||||||
"bad_words_ids": null,
|
|
||||||
"bos_token_id": 0,
|
|
||||||
"chunk_size_feed_forward": 0,
|
|
||||||
"cross_attention_hidden_size": null,
|
|
||||||
"decoder_start_token_id": null,
|
|
||||||
"diversity_penalty": 0.0,
|
|
||||||
"do_sample": false,
|
|
||||||
"dropout": 0.0,
|
|
||||||
"early_stopping": false,
|
|
||||||
"encoder_no_repeat_ngram_size": 0,
|
|
||||||
"eos_token_id": 2,
|
|
||||||
"finetuning_task": null,
|
|
||||||
"forced_bos_token_id": null,
|
|
||||||
"forced_eos_token_id": null,
|
|
||||||
"hidden_act": "quick_gelu",
|
|
||||||
"hidden_size": 768,
|
|
||||||
"id2label": {
|
|
||||||
"0": "LABEL_0",
|
|
||||||
"1": "LABEL_1"
|
|
||||||
},
|
|
||||||
"initializer_factor": 1.0,
|
|
||||||
"initializer_range": 0.02,
|
|
||||||
"intermediate_size": 3072,
|
|
||||||
"is_decoder": false,
|
|
||||||
"is_encoder_decoder": false,
|
|
||||||
"label2id": {
|
|
||||||
"LABEL_0": 0,
|
|
||||||
"LABEL_1": 1
|
|
||||||
},
|
|
||||||
"layer_norm_eps": 1e-05,
|
|
||||||
"length_penalty": 1.0,
|
|
||||||
"max_length": 20,
|
|
||||||
"max_position_embeddings": 77,
|
|
||||||
"min_length": 0,
|
|
||||||
"model_type": "clip_text_model",
|
|
||||||
"no_repeat_ngram_size": 0,
|
|
||||||
"num_attention_heads": 12,
|
|
||||||
"num_beam_groups": 1,
|
|
||||||
"num_beams": 1,
|
|
||||||
"num_hidden_layers": 12,
|
|
||||||
"num_return_sequences": 1,
|
|
||||||
"output_attentions": false,
|
|
||||||
"output_hidden_states": false,
|
|
||||||
"output_scores": false,
|
|
||||||
"pad_token_id": 1,
|
|
||||||
"prefix": null,
|
|
||||||
"problem_type": null,
|
|
||||||
"projection_dim" : 768,
|
|
||||||
"pruned_heads": {},
|
|
||||||
"remove_invalid_values": false,
|
|
||||||
"repetition_penalty": 1.0,
|
|
||||||
"return_dict": true,
|
|
||||||
"return_dict_in_generate": false,
|
|
||||||
"sep_token_id": null,
|
|
||||||
"task_specific_params": null,
|
|
||||||
"temperature": 1.0,
|
|
||||||
"tie_encoder_decoder": false,
|
|
||||||
"tie_word_embeddings": true,
|
|
||||||
"tokenizer_class": null,
|
|
||||||
"top_k": 50,
|
|
||||||
"top_p": 1.0,
|
|
||||||
"torch_dtype": null,
|
|
||||||
"torchscript": false,
|
|
||||||
"transformers_version": "4.16.0.dev0",
|
|
||||||
"use_bfloat16": false,
|
|
||||||
"vocab_size": 49408
|
|
||||||
},
|
|
||||||
"text_config_dict": {
|
|
||||||
"hidden_size": 768,
|
|
||||||
"intermediate_size": 3072,
|
|
||||||
"num_attention_heads": 12,
|
|
||||||
"num_hidden_layers": 12,
|
|
||||||
"projection_dim": 768
|
|
||||||
},
|
|
||||||
"torch_dtype": "float32",
|
|
||||||
"transformers_version": null,
|
|
||||||
"vision_config": {
|
|
||||||
"_name_or_path": "",
|
|
||||||
"add_cross_attention": false,
|
|
||||||
"architectures": null,
|
|
||||||
"attention_dropout": 0.0,
|
|
||||||
"bad_words_ids": null,
|
|
||||||
"bos_token_id": null,
|
|
||||||
"chunk_size_feed_forward": 0,
|
|
||||||
"cross_attention_hidden_size": null,
|
|
||||||
"decoder_start_token_id": null,
|
|
||||||
"diversity_penalty": 0.0,
|
|
||||||
"do_sample": false,
|
|
||||||
"dropout": 0.0,
|
|
||||||
"early_stopping": false,
|
|
||||||
"encoder_no_repeat_ngram_size": 0,
|
|
||||||
"eos_token_id": null,
|
|
||||||
"finetuning_task": null,
|
|
||||||
"forced_bos_token_id": null,
|
|
||||||
"forced_eos_token_id": null,
|
|
||||||
"hidden_act": "quick_gelu",
|
|
||||||
"hidden_size": 1024,
|
|
||||||
"id2label": {
|
|
||||||
"0": "LABEL_0",
|
|
||||||
"1": "LABEL_1"
|
|
||||||
},
|
|
||||||
"image_size": 224,
|
|
||||||
"initializer_factor": 1.0,
|
|
||||||
"initializer_range": 0.02,
|
|
||||||
"intermediate_size": 4096,
|
|
||||||
"is_decoder": false,
|
|
||||||
"is_encoder_decoder": false,
|
|
||||||
"label2id": {
|
|
||||||
"LABEL_0": 0,
|
|
||||||
"LABEL_1": 1
|
|
||||||
},
|
|
||||||
"layer_norm_eps": 1e-05,
|
|
||||||
"length_penalty": 1.0,
|
|
||||||
"max_length": 20,
|
|
||||||
"min_length": 0,
|
|
||||||
"model_type": "clip_vision_model",
|
|
||||||
"no_repeat_ngram_size": 0,
|
|
||||||
"num_attention_heads": 16,
|
|
||||||
"num_beam_groups": 1,
|
|
||||||
"num_beams": 1,
|
|
||||||
"num_hidden_layers": 24,
|
|
||||||
"num_return_sequences": 1,
|
|
||||||
"output_attentions": false,
|
|
||||||
"output_hidden_states": false,
|
|
||||||
"output_scores": false,
|
|
||||||
"pad_token_id": null,
|
|
||||||
"patch_size": 14,
|
|
||||||
"prefix": null,
|
|
||||||
"problem_type": null,
|
|
||||||
"projection_dim" : 768,
|
|
||||||
"pruned_heads": {},
|
|
||||||
"remove_invalid_values": false,
|
|
||||||
"repetition_penalty": 1.0,
|
|
||||||
"return_dict": true,
|
|
||||||
"return_dict_in_generate": false,
|
|
||||||
"sep_token_id": null,
|
|
||||||
"task_specific_params": null,
|
|
||||||
"temperature": 1.0,
|
|
||||||
"tie_encoder_decoder": false,
|
|
||||||
"tie_word_embeddings": true,
|
|
||||||
"tokenizer_class": null,
|
|
||||||
"top_k": 50,
|
|
||||||
"top_p": 1.0,
|
|
||||||
"torch_dtype": null,
|
|
||||||
"torchscript": false,
|
|
||||||
"transformers_version": "4.16.0.dev0",
|
|
||||||
"use_bfloat16": false
|
|
||||||
},
|
|
||||||
"vision_config_dict": {
|
|
||||||
"hidden_size": 1024,
|
|
||||||
"intermediate_size": 4096,
|
|
||||||
"num_attention_heads": 16,
|
|
||||||
"num_hidden_layers": 24,
|
|
||||||
"patch_size": 14,
|
|
||||||
"projection_dim": 768
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Reference in New Issue
Block a user