From f74ac29cefd4daed8199a5c626ce3e85afbe26b7 Mon Sep 17 00:00:00 2001 From: Rahul Chand Date: Sun, 17 Sep 2023 03:02:28 +0530 Subject: [PATCH] source code --- package.json | 11 +- public/all_configs.json | 1 + public/favicon.ico | Bin 3870 -> 15406 bytes public/index.html | 12 +- public/itsovermeme.png | Bin 0 -> 113209 bytes public/weback.jpg | Bin 0 -> 14747 bytes src/App.js | 1238 ++++++++++++++++++++++++++++++++++++++- src/index.js | 1 + src/textBox.js | 18 + tailwind.config.js | 11 + 10 files changed, 1269 insertions(+), 23 deletions(-) create mode 100644 public/all_configs.json create mode 100644 public/itsovermeme.png create mode 100644 public/weback.jpg create mode 100644 src/textBox.js create mode 100644 tailwind.config.js diff --git a/package.json b/package.json index cf049b5..61ac316 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,7 @@ { "name": "gpu_mem", "version": "0.1.0", + "homepage": "https://rahulschand.github.io/gpu_poor", "private": true, "dependencies": { "@testing-library/jest-dom": "^5.17.0", @@ -8,10 +9,15 @@ "@testing-library/user-event": "^13.5.0", "react": "^18.2.0", "react-dom": "^18.2.0", - "react-scripts": "5.0.1", + "react-modal": "^3.16.1", + "react-router-dom": "^6.16.0", + "react-scripts": "^5.0.1", + "tailwind": "^4.0.0", "web-vitals": "^2.1.4" }, "scripts": { + "predeploy": "npm run build", + "deploy": "gh-pages -d build", "start": "react-scripts start", "build": "react-scripts build", "test": "react-scripts test", @@ -34,5 +40,8 @@ "last 1 firefox version", "last 1 safari version" ] + }, + "devDependencies": { + "gh-pages": "^6.0.0" } } diff --git a/public/all_configs.json b/public/all_configs.json new file mode 100644 index 0000000..d0dc5cb --- /dev/null +++ b/public/all_configs.json @@ -0,0 +1 @@ +{"NousResearch/Llama-2-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "google/flan-t5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "google/flan-t5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "t5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "t5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "PascalNotin/Tranception_Small": {"architectures": ["TranceptionLMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": 3072, "n_layer": 12, "vocab_size": 25}, "bigscience/bloom-560m": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 24, "num_attention_heads": 16, "vocab_size": 250880}, "distilgpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 6, "vocab_size": 50257}, "hf-internal-testing/tiny-random-gpt2": {"intermediate_size": 37, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "vocab_size": 1000}, "tiiuae/falcon-7b": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "t5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "bigscience/bloomz-1b1": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 24, "num_attention_heads": 16, "vocab_size": 250880}, "gpt2-medium": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_layer": 24, "vocab_size": 50257}, "mrm8488/t5-base-finetuned-common_gen": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "lmsys/fastchat-t5-3b-v1.0": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32110}, "gpt2-xl": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1600, "n_head": 25, "n_layer": 48, "vocab_size": 50257}, "meta-llama/Llama-2-7b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "hf-internal-testing/tiny-random-t5": {"d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 1103}, "EleutherAI/pythia-6.9b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "databricks/dolly-v2-3b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50280}, "hf-internal-testing/tiny-random-GPTNeoXForCausalLM": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 32, "intermediate_size": 37, "num_attention_heads": 4, "num_hidden_layers": 5, "vocab_size": 1024}, "meta-llama/Llama-2-7b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "microsoft/DialoGPT-medium": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_layer": 24, "vocab_size": 50257}, "google/mt5-base": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "TheBloke/Wizard-Vicuna-7B-Uncensored-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "hf-internal-testing/tiny-random-BloomModel": {"architectures": ["BloomModel"], "hidden_size": 32, "n_head": 4, "n_layer": 5, "vocab_size": 1024}, "google/flan-t5-xxl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32128}, "lmsys/vicuna-7b-v1.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "huggyllama/llama-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "mrm8488/t5-base-finetuned-summarize-news": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "google/flan-t5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32128}, "tiiuae/falcon-40b-instruct": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "ramsrigouthamg/t5_sentence_paraphraser": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "flexudy/t5-base-multi-sentence-doctor": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "lewtun/tiny-random-mt5": {"architectures": ["MT5Model"], "d_ff": 1024, "d_model": 16, "num_heads": 4, "num_layers": 2, "vocab_size": 250112}, "gpt2-large": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_layer": 36, "vocab_size": 50257}, "valhalla/t5-base-e2e-qg": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32102}, "sshleifer/tiny-gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2, "n_head": 2, "n_layer": 2, "vocab_size": 50257}, "fxmarty/tiny-llama-fast-tokenizer": {"architectures": ["LlamaForCausalLM"], "hidden_size": 16, "intermediate_size": 64, "num_attention_heads": 4, "num_hidden_layers": 2, "vocab_size": 32000}, "decapoda-research/llama-7b-hf": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "stabilityai/StableBeluga2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "syzymon/long_llama_3b": {"architectures": ["LongLlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "NousResearch/Llama-2-7b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Llama-2-70B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "tiiuae/falcon-7b-instruct": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "google/flan-t5-xl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "meta-llama/Llama-2-13b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "petals-team/StableBeluga2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "meta-llama/Llama-2-70b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "EleutherAI/gpt-neox-20b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 6144, "intermediate_size": 24576, "num_attention_heads": 64, "num_hidden_layers": 44, "vocab_size": 50432}, "hf-internal-testing/tiny-random-GPTBigCodeForCausalLM": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 1024}, "nferruz/ProtGPT2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50257}, "philschmid/flan-t5-xxl-sharded-fp16": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32128}, "HuggingFaceM4/tiny-random-LlamaForCausalLM": {"architectures": ["LlamaForCausalLM"], "hidden_size": 16, "intermediate_size": 64, "num_attention_heads": 4, "num_hidden_layers": 2, "vocab_size": 32000}, "hf-internal-testing/tiny-random-BloomForCausalLM": {"architectures": ["BloomForCausalLM"], "hidden_size": 32, "n_head": 4, "n_layer": 5, "vocab_size": 1024}, "hf-internal-testing/tiny-random-GPT2LMHeadModel": {"architectures": ["GPT2LMHeadModel"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 1024}, "Vamsi/T5_Paraphrase_Paws": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "lmsys/vicuna-7b-v1.3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "meta-llama/Llama-2-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ybelkada/tiny-random-T5ForConditionalGeneration-calibrated": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 32100}, "prithivida/parrot_paraphraser_on_T5": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "hf-internal-testing/tiny-random-GPTBigCodeModel": {"architectures": ["GPTBigCodeModel"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 1024}, "hkunlp/instructor-large": {"architectures": ["T5EncoderModel"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "lmsys/vicuna-7b-v1.5": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "fabiochiu/t5-small-medium-title-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "TheBloke/Llama-2-7b-Chat-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "skt/kogpt2-base-v2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 51200}, "google/t5-v1_1-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Maykeye/TinyLLama-v0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 64, "intermediate_size": 256, "num_attention_heads": 16, "num_hidden_layers": 8, "vocab_size": 32000}, "TheBloke/Llama-2-13B-chat-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "tiiuae/falcon-40b": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "sonoisa/t5-base-japanese-title-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Rostlab/prot_t5_xl_uniref50": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 128}, "TheBloke/vicuna-7B-v1.3-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "daryl149/llama-2-7b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "stabilityai/StableBeluga-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "meta-llama/Llama-2-70b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "TheBloke/MythoMax-L2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "czurita/nsql-llama-2-7B-sharded-bf16-2GB": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "vennify/t5-base-grammar-correction": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "czearing/story-to-title": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "google/byt5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3840, "d_model": 1536, "num_heads": 16, "num_layers": 36, "vocab_size": 384}, "HuggingFaceH4/starchat-beta": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49156}, "codellama/CodeLlama-34b-Instruct-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "openlm-research/open_llama_13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "optimum/t5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "t5-3b": {"architectures": ["T5WithLMHeadModel"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "humarin/chatgpt_paraphraser_on_T5_base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Gustavosta/MagicPrompt-Stable-Diffusion": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "bigscience/bloomz-7b1": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 30, "num_attention_heads": 32, "vocab_size": 250880}, "trl-internal-testing/tiny-random-GPTNeoXForCausalLM": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 32, "intermediate_size": 37, "num_attention_heads": 4, "num_hidden_layers": 5, "vocab_size": 1024}, "NousResearch/Yarn-Llama-2-7b-64k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "khhuang/zerofec-qa2claim-t5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "h2oai/h2ogpt-gm-oasst1-en-2048-falcon-7b-v3": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "khhuang/zerofec-daqa-t5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "declare-lab/flan-alpaca-gpt4-xl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "codellama/CodeLlama-7b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32016}, "lmsys/vicuna-13b-v1.3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Rostlab/prot_t5_xl_half_uniref50-enc": {"architectures": ["T5EncoderModel"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 128}, "google/mt5-small": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "Salesforce/safety-flan-t5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "patrickvonplaten/t5-tiny-random": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 256, "d_model": 64, "num_heads": 2, "num_layers": 2, "vocab_size": 32128}, "google/flan-ul2": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 16384, "d_model": 4096, "num_heads": 16, "num_layers": 32, "vocab_size": 32128}, "EleutherAI/pythia-70m": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "bigscience/mt0-large": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 250112}, "stevhliu/my_awesome_billsum_model": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "EleutherAI/pythia-70m-deduped": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "lmsys/vicuna-13b-v1.5": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "PAIXAI/Astrid-1B": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 8, "num_hidden_layers": 16, "vocab_size": 50304}, "Phind/Phind-CodeLlama-34B-Python-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "pszemraj/flan-t5-large-grammar-synthesis": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "Voicelab/vlt5-base-keywords": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 50048}, "togethercomputer/Llama-2-7B-32K-Instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "huggyllama/llama-65b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "ai-forever/ruGPT-3.5-13B": {"architectures": ["GPT2LMHeadModel"], "n_embd": 5120, "n_head": 40, "n_inner": null, "n_layer": 40, "vocab_size": 50272}, "Einmalumdiewelt/T5-Base_GNAD": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "google/t5-v1_1-xl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "Austism/chronos-hermes-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "upstage/SOLAR-0-70b-16bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "bigscience/bloom-7b1": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "nlpai-lab/kullm-polyglot-12.8b-v2": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 30080}, "codellama/CodeLlama-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "hf-internal-testing/tiny-random-GPT2Model": {"architectures": ["GPT2Model"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 1024}, "Gryphe/MythoMax-L2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "openlm-research/open_llama_3b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "TheBloke/Llama-2-70B-chat-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "trl-internal-testing/dummy-GPT2-correct-vocab": {"architectures": ["GPT2LMHeadModel"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 50257}, "charsiu/g2p_multilingual_byT5_small_100": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3584, "d_model": 1472, "num_heads": 6, "num_layers": 12, "vocab_size": 384}, "EleutherAI/pythia-160m": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 50304}, "ElnaggarLab/ankh-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 48, "vocab_size": 144}, "trl-internal-testing/tiny-random-GPT2LMHeadModel": {"architectures": ["GPT2LMHeadModel"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 1024}, "openlm-research/open_llama_7b_v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/OpenAssistant-Llama2-13B-Orca-v2-8K-3166-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "codellama/CodeLlama-7b-Instruct-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32016}, "WizardLM/WizardCoder-Python-34B-V1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32001}, "pszemraj/grammar-synthesis-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32128}, "TheBloke/llama-2-70b-Guanaco-QLoRA-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "openlm-research/open_llama_3b_v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "IDEA-CCNL/Wenzhong-GPT2-110M": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": 3072, "n_layer": 12, "vocab_size": 50304}, "microsoft/DialoGPT-small": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50257}, "trl-internal-testing/tiny-random-BloomForCausalLM": {"architectures": ["BloomForCausalLM"], "hidden_size": 32, "n_head": 4, "n_layer": 5, "vocab_size": 1024}, "trl-internal-testing/tiny-random-T5ForConditionalGeneration": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 1302}, "hf-internal-testing/tiny-random-onnx-mt5": {"architectures": ["MT5Model"], "d_ff": 1024, "d_model": 16, "num_heads": 4, "num_layers": 2, "vocab_size": 250112}, "NousResearch/Llama-2-7b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "trl-internal-testing/tiny-random-MT5ForConditionalGeneration": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 256, "d_model": 64, "num_heads": 4, "num_layers": 8, "vocab_size": 5100}, "fxmarty/tiny-testing-gpt2-remote-code": {"architectures": ["GPT2CustomLMHeadModel"], "intermediate_size": 37, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "vocab_size": 1000}, "castorini/monot5-base-msmarco-10k": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "microsoft/DialoGPT-large": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_layer": 36, "vocab_size": 50257}, "bigscience/bloomz-560m": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 24, "num_attention_heads": 16, "vocab_size": 250880}, "Open-Orca/OpenOrca-Platypus2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "google/byt5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3584, "d_model": 1472, "num_heads": 6, "num_layers": 12, "vocab_size": 384}, "bigscience/bloom-1b7": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 24, "num_attention_heads": 16, "vocab_size": 250880}, "elinas/chronos-13b-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "google/t5-efficient-tiny": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 256, "num_heads": 4, "num_layers": 4, "vocab_size": 32128}, "bigscience/bloom-1b1": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 24, "num_attention_heads": 16, "vocab_size": 250880}, "EleutherAI/polyglot-ko-1.3b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 30080}, "bigscience/bloom-3b": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 30, "num_attention_heads": 32, "vocab_size": 250880}, "TinyPixel/Llama-2-7B-bf16-sharded": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "edumunozsala/llama-2-7b-int4-python-code-20k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "yahma/llama-7b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "beomi/KoAlpaca-Polyglot-12.8B": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 30003}, "stanfordnlp/backpack-gpt2": {"architectures": ["BackpackGPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50264}, "prithivida/grammar_error_correcter_v1": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "lvkaokao/llama2-7b-hf-chat-lora-v3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "google/t5-v1_1-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32128}, "TheBloke/gpt4-alpaca-lora_mlp-65B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "google/mt5-large": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 250112}, "EleutherAI/pythia-2.8b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50304}, "cyberagent/open-calm-7b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 52224}, "lvwerra/gpt2-imdb": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50257}, "WizardLM/WizardLM-13B-V1.2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "KoboldAI/GPT-NeoX-20B-Erebus": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 6144, "intermediate_size": 24576, "num_attention_heads": 64, "num_hidden_layers": 44, "vocab_size": 50432}, "togethercomputer/RedPajama-INCITE-Instruct-3B-v1": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "aditi2222/automatic_title_generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "shibing624/chinese-alpaca-plus-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 49954}, "optimum/gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "togethercomputer/LLaMA-2-7B-32K": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "amazon/FalconLite": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65040}, "EleutherAI/polyglot-ko-5.8b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 16, "num_hidden_layers": 28, "vocab_size": 30080}, "databricks/dolly-v2-7b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50280}, "snrspeaks/t5-one-line-summary": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "lmsys/vicuna-33b-v1.3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "TheBloke/OpenOrca-Platypus2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "TheBloke/Llama-2-13B-Chat-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "sdadas/mt5-base-translator-pl-en": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250100}, "TheBloke/Llama-2-7b-chat-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "bigcode/gpt_bigcode-santacoder": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 49280}, "h2oai/h2ogpt-gm-oasst1-en-2048-falcon-7b-v2": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "TheBloke/airoboros-l2-70B-GPT4-2.0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "lmsys/vicuna-13b-v1.5-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "bigcode/santacoder": {"architectures": ["GPT2LMHeadCustomModel"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 49280}, "togethercomputer/RedPajama-INCITE-Chat-3B-v1": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "ai-forever/mGPT": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2048, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 100000}, "openlm-research/open_llama_7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "decapoda-research/llama-13b-hf": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "OpenAssistant/codellama-13b-oasst-sft-v10": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "rinna/bilingual-gpt-neox-4b-instruction-sft": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2816, "intermediate_size": 11264, "num_attention_heads": 22, "num_hidden_layers": 36, "vocab_size": 65536}, "KoboldAI/LLaMA2-13B-Holomax-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "MBZUAI/LaMini-T5-61M": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "google/t5-v1_1-xxl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32128}, "EleutherAI/pythia-1.4b-deduped": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "stabilityai/StableBeluga-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "tiiuae/falcon-rw-1b": {"architectures": ["FalconForCausalLM"], "hidden_size": 2048, "num_attention_heads": 32, "num_hidden_layers": 24, "vocab_size": 50304}, "ClueAI/ChatYuan-large-v2": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "af1tang/personaGPT": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50263}, "google/t5-large-lm-adapt": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "vilsonrodrigues/falcon-7b-instruct-sharded": {"architectures": ["FalconForCausalLM"], "hidden_size": 4544, "num_attention_heads": 71, "num_hidden_layers": 32, "vocab_size": 65024}, "petals-team/falcon-rw-1b": {"architectures": ["FalconForCausalLM"], "hidden_size": 2048, "num_attention_heads": 32, "num_hidden_layers": 24, "vocab_size": 50304}, "bigscience/T0_3B": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "TheTravellingEngineer/llama2-7b-hf-guanaco": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Salesforce/codet5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "EleutherAI/pythia-2.8b-deduped": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50304}, "The-Face-Of-Goonery/Huginn-13b-v1.2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "FredZhang7/distilgpt2-stable-diffusion-v2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 6, "vocab_size": 50257}, "WizardLM/WizardCoder-15B-V1.0": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49153}, "EleutherAI/pythia-410m-deduped": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 1024, "intermediate_size": 4096, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "huggyllama/llama-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ybelkada/falcon-7b-sharded-bf16": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "MingZhong/unieval-sum": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32100}, "NousResearch/Nous-Hermes-Llama2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "csebuetnlp/mT5_multilingual_XLSum": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "hkunlp/instructor-xl": {"architectures": ["T5EncoderModel"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "h2oai/h2ogpt-4096-llama2-13b-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "HuggingFaceH4/tiny-random-LlamaForCausalLM": {"architectures": ["LlamaForCausalLM"], "hidden_size": 16, "intermediate_size": 64, "num_attention_heads": 4, "num_hidden_layers": 2, "vocab_size": 32000}, "EleutherAI/polyglot-ko-12.8b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 30080}, "databricks/dolly-v2-12b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50280}, "mrm8488/t5-base-finetuned-span-sentiment-extraction": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "WizardLM/WizardLM-70B-V1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32001}, "codellama/CodeLlama-34b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "Salesforce/codet5-base-multi-sum": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "MBZUAI/LaMini-T5-738M": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "codellama/CodeLlama-13b-Instruct-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "h2oai/h2ogpt-4096-llama2-7b-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50288}, "bigscience/bloom": {"architectures": ["BloomForCausalLM"], "n_layer": 70, "num_attention_heads": 112, "vocab_size": 250880}, "TigerResearch/tigerbot-13b-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 60928}, "TheBloke/airoboros-l2-70B-gpt4-1.4.1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Austism/chronos-hermes-13b-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "snrspeaks/KeyPhraseTransformer": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Rocketknight1/tiny-random-falcon-7b": {"architectures": ["FalconForCausalLM"], "hidden_size": 1136, "num_attention_heads": 71, "num_hidden_layers": 2, "vocab_size": 65024}, "hf-internal-testing/tiny-random-T5Model": {"architectures": ["T5Model"], "d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 32100}, "sambanovasystems/BLOOMChat-176B-v1": {"architectures": ["BloomForCausalLM"], "hidden_size": 14336, "n_head": 112, "n_layer": 70, "vocab_size": 250880}, "huggyllama/llama-30b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "elyza/ELYZA-japanese-Llama-2-7b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "lcw99/t5-base-korean-text-summary": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 50358}, "it5/it5-base-news-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32103}, "uer/gpt2-chinese-cluecorpussmall": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 21128}, "t5-11b": {"architectures": ["T5WithLMHeadModel"], "d_ff": 65536, "d_model": 1024, "num_heads": 128, "num_layers": 24, "vocab_size": 32128}, "KoboldAI/LLaMA2-13B-Holomax": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/Llama-2-7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "bigscience/bloomz-3b": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 30, "num_attention_heads": 32, "vocab_size": 250880}, "lmsys/vicuna-7b-v1.5-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "sonoisa/t5-base-japanese": {"architectures": ["T5Model"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "line-corporation/japanese-large-lm-3.6b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 3072, "intermediate_size": 12288, "num_attention_heads": 32, "num_hidden_layers": 30, "vocab_size": 51200}, "TheBloke/Llama-2-7B-32K-Instruct-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "EleutherAI/pythia-410m": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 1024, "intermediate_size": 4096, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "NousResearch/Llama-2-70b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "TheBloke/falcon-7b-instruct-GPTQ": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "eachadea/vicuna-13b-1.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "beomi/llama-2-ko-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 46336}, "TheBloke/falcon-40b-instruct-GPTQ": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "TheBloke/Llama-2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "garage-bAInd/Platypus2-70B-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "rajkumarrrk/gpt2-fine-tuned-on-imdb-positive-reviews": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "cerebras/Cerebras-GPT-13B": {"architectures": ["GPT2Model"], "n_embd": 5120, "n_head": 40, "n_inner": 20480, "n_layer": 40, "vocab_size": 50257}, "rinna/japanese-gpt2-medium": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": 4096, "n_layer": 24, "vocab_size": 32000}, "bigscience/T0pp": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32128}, "Phind/Phind-CodeLlama-34B-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "beomi/kykim-gpt3-kor-small_based_on_gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 42000}, "Pi3141/DialoGPT-medium-elon-3": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "EleutherAI/pythia-1b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 8, "num_hidden_layers": 16, "vocab_size": 50304}, "ai-forever/rugpt3large_based_on_gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1536, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "jondurbin/airoboros-l2-13b-gpt4-m2.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "codellama/CodeLlama-13b-Python-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "AUTOMATIC/promptgen-lexart": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 6, "vocab_size": 50257}, "Salesforce/codet5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32100}, "h2oai/h2ogpt-oig-oasst1-512-6_9b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "rinna/japanese-gpt-neox-3.6b-instruction-ppo": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2816, "intermediate_size": 11264, "num_attention_heads": 22, "num_hidden_layers": 36, "vocab_size": 32000}, "prithivida/informal_to_formal_styletransfer": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "TheBloke/Wizard-Vicuna-13B-Uncensored-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "matsuo-lab/weblab-10b-instruction-sft": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4864, "intermediate_size": 19456, "num_attention_heads": 38, "num_hidden_layers": 36, "vocab_size": 50277}, "succinctly/text2image-prompt-generator": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "TheBloke/Llama-2-7B-Chat-GGML": {}, "TheBloke/Llama-2-70B-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "sentence-transformers/gtr-t5-large": {"architectures": ["T5EncoderModel"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "h2oai/h2ogpt-gm-oasst1-en-2048-falcon-40b-v2": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "togethercomputer/RedPajama-INCITE-Base-3B-v1": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "rinna/bilingual-gpt-neox-4b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2816, "intermediate_size": 11264, "num_attention_heads": 22, "num_hidden_layers": 36, "vocab_size": 65536}, "TheBloke/Llama-2-13B-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "pankajmathur/orca_mini_v3_70b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "OpenAssistant/llama2-13b-orca-8k-3319": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "TheBloke/StableBeluga-7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "defog/sqlcoder": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49152}, "WizardLM/WizardCoder-Python-13B-V1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "stabilityai/stablelm-tuned-alpha-3b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 16, "vocab_size": 50688}, "cyberagent/open-calm-small": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 52096}, "TheBloke/Wizard-Vicuna-30B-Uncensored-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "TheBloke/WizardLM-70B-V1.0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32001}, "bigscience/bigscience-small-testing": {"architectures": ["BloomModel"], "hidden_size": 64, "n_head": 8, "n_inner": null, "n_layer": 2, "vocab_size": 250880}, "cyberagent/open-calm-1b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 52096}, "lamini/lamini_docs_finetuned": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "EnglishVoice/t5-base-uk-to-us-english": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "codellama/CodeLlama-7b-Python-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/WizardLM-13B-V1.2-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "EleutherAI/pythia-160m-deduped": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 50304}, "jphme/Llama-2-13b-chat-german": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "EleutherAI/pythia-12b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50688}, "TheBloke/WizardLM-1.0-Uncensored-Llama2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Salesforce/codet5p-220m": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "google/mt5-xl": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 250112}, "cerebras/Cerebras-GPT-111M": {"n_inner": 3072, "n_embd": 768, "n_head": 12, "n_layer": 10, "vocab_size": 50257}, "google/t5-v1_1-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "TheBloke/vicuna-7B-v1.5-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "chavinlo/alpaca-native": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "kimnt93/kmv-7b-03": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "NumbersStation/nsql-llama-2-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "cerebras/Cerebras-GPT-1.3B": {"n_inner": 8192, "n_embd": 2048, "n_head": 16, "n_layer": 24, "vocab_size": 50257}, "trl-internal-testing/tiny-T5ForConditionalGeneration-correct-vocab": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 32128}, "akreal/tiny-random-LlamaForCausalLM": {"architectures": ["LlamaForCausalLM"], "hidden_size": 16, "intermediate_size": 64, "num_attention_heads": 4, "num_hidden_layers": 2, "vocab_size": 32000}, "akreal/tiny-random-BloomForCausalLM": {"architectures": ["BloomForCausalLM"], "hidden_size": 32, "n_head": 4, "n_layer": 5, "vocab_size": 1024}, "NousResearch/Nous-Hermes-llama-2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ai-forever/rugpt3small_based_on_gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50264}, "VMware/open-llama-7b-v2-open-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "robertmyers/targon-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "TheBloke/Nous-Hermes-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "TheBloke/WizardLM-33B-V1.0-Uncensored-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "TheBloke/WizardLM-7B-uncensored-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "ramsrigouthamg/t5_paraphraser": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "tinkoff-ai/ruDialoGPT-medium": {"n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50261}, "OpenAssistant/falcon-7b-sft-mix-2000": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65040}, "bigcode/tiny_starcoder_py": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 768, "n_head": 12, "n_inner": 3072, "n_layer": 20, "vocab_size": 49152}, "rinna/japanese-gpt-1b": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 44928}, "TheBloke/orca_mini_v3_70B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "UBC-NLP/turjuman": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 110080}, "h2oai/h2ogpt-4096-llama2-70b-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Phind/Phind-CodeLlama-34B-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "elyza/ELYZA-japanese-Llama-2-7b-fast-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 45043}, "iarfmoose/t5-base-question-generator": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32102}, "TheBloke/Llama-2-7B-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "mrm8488/t5-base-finetuned-emotion": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "hkunlp/instructor-base": {"architectures": ["T5EncoderModel"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "fxmarty/onnx-tiny-random-gpt2-without-merge": {"architectures": ["GPT2Model"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 1024}, "fxmarty/onnx-tiny-random-gpt2-with-merge": {"architectures": ["GPT2Model"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 1024}, "microsoft/GODEL-v1_1-large-seq2seq": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32102}, "rinna/japanese-gpt-neox-3.6b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2816, "intermediate_size": 11264, "num_attention_heads": 22, "num_hidden_layers": 36, "vocab_size": 32000}, "cyberagent/open-calm-3b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 52224}, "eachadea/vicuna-7b-1.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "beomi/KoAlpaca-Polyglot-5.8B": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 16, "num_hidden_layers": 28, "vocab_size": 30080}, "grammarly/coedit-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32100}, "TheBloke/Platypus2-70B-Instruct-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "codellama/CodeLlama-34b-Python-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "noamwies/llama-test-gqa-with-better-transformer": {"architectures": ["LlamaForCausalLM"], "hidden_size": 128, "intermediate_size": 344, "num_attention_heads": 8, "num_hidden_layers": 2, "vocab_size": 2000}, "bigscience/bloomz-7b1-mt": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 30, "num_attention_heads": 32, "vocab_size": 250880}, "Salesforce/codet5p-770m": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32100}, "OpenAssistant/pythia-12b-sft-v8-7k-steps": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50288}, "augtoma/qCammel-70-x": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "NousResearch/Llama-2-13b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "plguillou/t5-base-fr-sum-cnndm": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32102}, "WeOpenML/PandaLM-7B-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "VMware/open-llama-7b-open-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "pankajmathur/orca_mini_v3_7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "google/t5-xl-lm-adapt": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "LinkSoul/Chinese-Llama-2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "line-corporation/japanese-large-lm-3.6b-instruction-sft": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 3072, "intermediate_size": 12288, "num_attention_heads": 32, "num_hidden_layers": 30, "vocab_size": 51200}, "OpenAssistant/oasst-sft-1-pythia-12b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50288}, "ehartford/WizardLM-7B-Uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "upstage/llama-30b-instruct-2048": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "cyberagent/open-calm-large": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 1536, "intermediate_size": 6144, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 52096}, "Gryphe/MythoLogic-L2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "eenzeenee/t5-small-korean-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 50358}, "google/t5-xxl-lm-adapt": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32128}, "mywateriswet/ShuanBot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "hf-internal-testing/tiny-random-bloom": {"architectures": ["BloomModel"], "hidden_size": 64, "n_head": 8, "n_inner": null, "n_layer": 2, "vocab_size": 250880}, "TheBloke/Llama-2-13B-chat-GGML": {}, "decapoda-research/llama-30b-hf": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "lmsys/longchat-7b-v1.5-32k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ziqingyang/chinese-alpaca-2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 55296}, "nlpai-lab/kullm-polyglot-5.8b-v2": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 16, "num_hidden_layers": 28, "vocab_size": 30080}, "google/byt5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3968, "d_model": 1536, "num_heads": 12, "num_layers": 18, "vocab_size": 384}, "stabilityai/stablelm-tuned-alpha-7b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 6144, "intermediate_size": 24576, "num_attention_heads": 48, "num_hidden_layers": 16, "vocab_size": 50432}, "PygmalionAI/pygmalion-1.3b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "stanford-crfm/BioMedLM": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2560, "n_head": 20, "n_inner": null, "n_layer": 32, "vocab_size": 28896}, "PY007/TinyLlama-1.1B-step-50K-105b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 2048, "intermediate_size": 5632, "num_attention_heads": 32, "num_hidden_layers": 22, "vocab_size": 32000}, "georgesung/llama2_7b_chat_uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "bigscience/mt0-small": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "TheBloke/WizardCoder-15B-1.0-GPTQ": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49153}, "google/t5-base-lm-adapt": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "OpenAssistant/falcon-40b-sft-top1-560": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65040}, "TheBloke/WizardLM-30B-Uncensored-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32001}, "TheBloke/WizardCoder-Python-34B-V1.0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32001}, "garage-bAInd/Camel-Platypus2-70B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "DeepFloyd/t5-v1_1-xxl": {"architectures": ["T5EncoderModel"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32128}, "EleutherAI/pythia-1b-deduped": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 8, "num_hidden_layers": 16, "vocab_size": 50304}, "TheBloke/CodeLlama-7B-Instruct-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32016}, "kfkas/Llama-2-ko-7b-Chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 46336}, "valhalla/t5-small-qa-qg-hl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32102}, "FlagAlpha/Llama2-Chinese-13b-Chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Open-Orca/OpenOrcaxOpenChat-Preview2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "trl-internal-testing/tiny-random-LlamaForCausalLM": {"architectures": ["LlamaForCausalLM"], "hidden_size": 16, "intermediate_size": 64, "num_attention_heads": 4, "num_hidden_layers": 2, "vocab_size": 32000}, "abhishek/llama-2-7b-hf-small-shards": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "togethercomputer/RedPajama-INCITE-7B-Base": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "Salesforce/codegen25-7b-multi": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 51200}, "fabiochiu/t5-base-tag-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "MBZUAI/LaMini-Flan-T5-248M": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "bigscience/bloomz-1b7": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 24, "num_attention_heads": 16, "vocab_size": 250880}, "valhalla/t5-base-qg-hl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32101}, "Wi/gptp": {"architectures": ["GPTPModel"], "n_embd": 128, "n_head": 4, "n_inner": null, "n_layer": 4, "vocab_size": 1000}, "medalpaca/medalpaca-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "yentinglin/Taiwan-LLaMa-v1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "rinna/japanese-gpt-neox-small": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 44416}, "TheBloke/llama2_7b_chat_uncensored-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "EleutherAI/pythia-1.4b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "daryl149/llama-2-13b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "flax-community/gpt-2-spanish": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "KoboldAI/LLAMA2-13B-Holodeck-1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jondurbin/airoboros-13b-gpt4-1.4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "mrm8488/t5-base-finetuned-question-generation-ap": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "OpenBuddy/openbuddy-llama2-13b-v8.1-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 37632}, "EleutherAI/pythia-6.9b-deduped": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "tscholak/3vnuv1vf": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32102}, "OpenAssistant/llama2-70b-oasst-sft-v10": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32007}, "TheBloke/vicuna-13B-v1.5-16K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "OpenAssistant/falcon-7b-sft-top1-696": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65040}, "sentence-transformers/sentence-t5-large": {"architectures": ["T5EncoderModel"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "TheBloke/Nous-Hermes-Llama2-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "mesolitica/finetune-translation-t5-super-super-tiny-standard-bahasa-cased": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 512, "d_model": 128, "num_heads": 6, "num_layers": 2, "vocab_size": 32100}, "Henk717/spring-dragon": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "openchat/openchat_v3.2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "WizardLM/WizardMath-70B-V1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32002}, "potsawee/t5-large-generation-squad-QuestionAnswer": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "TheBloke/Phind-CodeLlama-34B-v2-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "pankajmathur/orca_mini_3b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "fffrrt/ruGPT-3.5-13B-GPTQ": {"architectures": ["GPT2LMHeadModel"], "n_embd": 5120, "n_head": 40, "n_inner": null, "n_layer": 40, "vocab_size": 50272}, "kykim/gpt3-kor-small_based_on_gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 42000}, "PAIXAI/Astrid-1B-CPU": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 8, "num_hidden_layers": 16, "vocab_size": 50304}, "ElnaggarLab/ankh-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3840, "d_model": 1536, "num_heads": 16, "num_layers": 48, "vocab_size": 144}, "togethercomputer/RedPajama-INCITE-7B-Chat": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "ramsrigouthamg/t5_squad_v1": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "KETI-AIR/ke-t5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 64128}, "sentence-transformers/gtr-t5-base": {"architectures": ["T5EncoderModel"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "ramsrigouthamg/t5-large-paraphraser-diverse-high-quality": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "rinna/japanese-gpt2-small": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": 3072, "n_layer": 12, "vocab_size": 32000}, "rinna/bilingual-gpt-neox-4b-instruction-ppo": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2816, "intermediate_size": 11264, "num_attention_heads": 22, "num_hidden_layers": 36, "vocab_size": 65536}, "ramsrigouthamg/t5_boolean_questions": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "philschmid/flan-t5-base-samsum": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "google/t5-small-lm-adapt": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32128}, "matsuo-lab/weblab-10b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4864, "intermediate_size": 19456, "num_attention_heads": 38, "num_hidden_layers": 36, "vocab_size": 50304}, "stabilityai/stablecode-completion-alpha-3b-4k": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 49152}, "IDEA-CCNL/Ziya-LLaMA-7B-Reward": {"architectures": ["LlamaRewardModel"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "ichitaka/falcon-40b-instruct-8bit": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "TheBloke/WizardCoder-Python-13B-V1.0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "togethercomputer/Pythia-Chat-Base-7B": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "TheBloke/wizardLM-7B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "AUTOMATIC/promptgen-majinai-unsafe": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 6, "vocab_size": 50257}, "pinkmanlove/llama-7b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "lmsys/longchat-13b-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "togethercomputer/RedPajama-INCITE-7B-Instruct": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "lmsys/vicuna-13b-v1.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Salesforce/codet5-large": {"architectures": ["T5WithLMHeadModel"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32100}, "FredZhang7/anime-anything-promptgen-v2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 6, "vocab_size": 50257}, "Salesforce/xgen-7b-8k-inst": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 51200}, "jojo0217/step3_mk7": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 30008}, "EleutherAI/pythia-14m": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 128, "intermediate_size": 512, "num_attention_heads": 4, "num_hidden_layers": 6, "vocab_size": 50304}, "cerebras/Cerebras-GPT-590M": {"n_inner": 6144, "n_embd": 1536, "n_head": 12, "n_layer": 18, "vocab_size": 50257}, "dbmdz/german-gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50265}, "KoboldAI/GPT-NeoX-20B-Skein": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 6144, "intermediate_size": 24576, "num_attention_heads": 64, "num_hidden_layers": 44, "vocab_size": 50432}, "beomi/polyglot-ko-12.8b-safetensors": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 30080}, "sentence-transformers/sentence-t5-base": {"architectures": ["T5EncoderModel"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "decapoda-research/llama-65b-hf": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "mesolitica/finetune-translation-t5-small-standard-bahasa-cased": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32100}, "marcsun13/bloom-1b7_with_lm_head": {"architectures": ["BloomForCausalLM"], "hidden_size": 2048, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "MBZUAI/LaMini-Flan-T5-783M": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "medalpaca/medalpaca-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "JulesBelveze/t5-small-headline-generator": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32100}, "Michau/t5-base-en-generate-headline": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "TheBloke/Falcon-180B-Chat-GPTQ": {"architectures": ["FalconForCausalLM"], "hidden_size": 14848, "num_attention_heads": 232, "num_hidden_layers": 80, "vocab_size": 65024}, "Salesforce/xgen-7b-8k-base": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 51200}, "ai-forever/ruT5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "KRAFTON/KORani-v3-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "bigscience/mt0-xxl-mt": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 250112}, "garage-bAInd/Stable-Platypus2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/Wizard-Vicuna-13B-Uncensored-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "h2oai/h2ogpt-oasst1-512-12b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50688}, "Parth/result": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "declare-lab/flan-alpaca-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "sdadas/mt5-base-translator-en-pl": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250100}, "ziqingyang/chinese-llama-2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 55296}, "NousResearch/Nous-Hermes-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "pragmatic-programs/listener-suffix-idx-300k": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3584, "d_model": 1472, "num_heads": 6, "num_layers": 12, "vocab_size": 384}, "jinaai/jina-embedding-l-en-v1": {"architectures": ["T5EncoderModel"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "stabilityai/stablelm-base-alpha-3b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 16, "vocab_size": 50688}, "razent/SciFive-base-Pubmed_PMC": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "uer/gpt2-chinese-poem": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 22557}, "openchat/openchat_v3.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "IDEA-CCNL/Ziya-LLaMA-13B-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 39424}, "Sao10K/Mythical-Destroyer-V2-L2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "juierror/text-to-sql-with-table-schema": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "MingZhong/unieval-fact": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32100}, "TheBloke/vicuna-13B-v1.5-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "cerebras/Cerebras-GPT-256M": {"n_inner": 4352, "n_embd": 1088, "n_head": 17, "n_layer": 14, "vocab_size": 50257}, "declare-lab/flan-alpaca-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "ehartford/WizardLM-1.0-Uncensored-Llama2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "aubmindlab/aragpt2-base": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 64000}, "valhalla/t5-small-e2e-qg": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32102}, "elinas/llama-7b-hf-transformers-4.29": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "lmsys/vicuna-13b-delta-v1.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "garage-bAInd/Platypus2-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "PKU-Alignment/beaver-7b-v1.0-cost": {"architectures": ["LlamaModelForScore"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "allenai/unifiedqa-t5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "h2oai/h2ogpt-gm-oasst1-en-2048-open-llama-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "JackFram/llama-160m": {"architectures": ["LlamaForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 32000}, "daryl149/llama-2-7b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "akreal/tiny-random-t5": {"d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 99}, "cyberagent/open-calm-medium": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 1024, "intermediate_size": 4096, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 52096}, "The-Face-Of-Goonery/Huginn-13b-FP16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "facebook/tart-full-flan-t5-xl": {"architectures": ["EncT5ForSequenceClassification"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "csebuetnlp/banglat5_banglaparaphrase": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "FlagAlpha/Llama2-Chinese-7b-Chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "jerryjalapeno/Llama-2-1b-0-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 32000}, "NousResearch/Redmond-Puffin-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "bigscience/bloomz": {"architectures": ["BloomForCausalLM"], "n_layer": 70, "num_attention_heads": 112, "vocab_size": 250880}, "allenai/unifiedqa-t5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "WizardLM/WizardMath-7B-V1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "pragmatic-programs/speaker-prefix-idx-300k": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3584, "d_model": 1472, "num_heads": 6, "num_layers": 12, "vocab_size": 384}, "TheBloke/CodeLlama-13B-Instruct-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "TheBloke/Upstage-Llama-2-70B-instruct-v2-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "pinkmanlove/llama-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "VietAI/envit5-translation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 50048}, "cerebras/Cerebras-GPT-2.7B": {"n_inner": 10240, "n_embd": 2560, "n_head": 32, "n_layer": 32, "vocab_size": 50257}, "Open-Orca/LlongOrca-7B-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32003}, "hf-internal-testing/tiny-random-T5ForConditionalGeneration": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 32100}, "juierror/flan-t5-text2sql-with-schema-v2": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32101}, "BeIR/query-gen-msmarco-t5-base-v1": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "conceptofmind/LLongMA-2-13b-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "NousResearch/Yarn-Llama-2-13b-128k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "wangrongsheng/MiniGPT-4-LLaMA": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "hf-internal-testing/tiny-random-GPT2ForSequenceClassification": {"architectures": ["GPT2ForSequenceClassification"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 1024}, "zenham/wail_m_e4_16h_2k": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "h2oai/h2ogpt-4096-llama2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ai-forever/FRED-T5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 50364}, "FreedomIntelligence/phoenix-inst-chat-7b": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "castorini/monot5-base-msmarco": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "minlik/chinese-alpaca-plus-7b-merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 49954}, "joaogante/tiny-random-gpt2-with-generation-config": {"intermediate_size": 37, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "vocab_size": 1000}, "neulab/gpt2-finetuned-wikitext103": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "jarradh/llama2_70b_chat_uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "TigerResearch/tigerbot-13b-base": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 60928}, "rinna/japanese-gpt-neox-3.6b-instruction-sft-v2": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2816, "intermediate_size": 11264, "num_attention_heads": 22, "num_hidden_layers": 36, "vocab_size": 32000}, "bofenghuang/vigogne-2-7b-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/stable-vicuna-13B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "aiplanet/effi-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jondurbin/airoboros-33b-gpt4-m2.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "TheBloke/orca_mini_v3_13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "HuggingFaceH4/starchat-alpha": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49156}, "WizardLM/WizardMath-13B-V1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "upstage/Llama-2-70b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "anushehchaudry/llama-2-tiny-random": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8, "intermediate_size": 32, "num_attention_heads": 2, "num_hidden_layers": 1, "vocab_size": 32000}, "fangloveskari/ORCA_LLaMA_70B_QLoRA": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "HyperbeeAI/Tulpar-7b-v0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Llama-2-70B-Chat-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "csebuetnlp/mT5_m2m_crossSum_enhanced": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "TheBloke/Genz-70b-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "cerebras/Cerebras-GPT-6.7B": {"n_embd": 4096, "vocab_size": 50257, "n_layer": 32, "n_head": 32, "n_inner": 16384}, "ziqingyang/chinese-alpaca-2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 55296}, "google/t5-small-ssm-nq": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32128}, "EleutherAI/polyglot-ko-3.8b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 3072, "intermediate_size": 12288, "num_attention_heads": 24, "num_hidden_layers": 32, "vocab_size": 30080}, "kashif/stack-llama-2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "line-corporation/japanese-large-lm-1.7b": {"vocab_size": 51200, "n_embd": 2304, "n_layer": 24, "n_head": 24, "n_inner": 9216, "architectures": ["GPT2LMHeadModel"]}, "microsoft/codereviewer": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32216}, "TheBloke/guanaco-7B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "circulus/Llama-2-7b-orca-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "FlagAlpha/Atom-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 65000}, "Tap-M/Luna-AI-Llama2-Uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "K024/mt5-zh-ja-en-trimmed": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 85292}, "deep-learning-analytics/automatic-title-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "luodian/llama-7b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "stabilityai/stablelm-base-alpha-7b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 6144, "intermediate_size": 24576, "num_attention_heads": 48, "num_hidden_layers": 16, "vocab_size": 50432}, "OpenLemur/lemur-70b-chat-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32005}, "rahular/varta-t5": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 128128}, "rinna/japanese-gpt-neox-3.6b-instruction-sft": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2816, "intermediate_size": 11264, "num_attention_heads": 22, "num_hidden_layers": 36, "vocab_size": 32000}, "garage-bAInd/Platypus-30B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "WizardLM/WizardCoder-Python-7B-V1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "chavinlo/gpt4-x-alpaca": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "sentence-transformers/gtr-t5-xl": {"architectures": ["T5EncoderModel"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "wangrongsheng/MiniGPT-4-LLaMA-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "EleutherAI/pythia-12b-deduped": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50688}, "unicamp-dl/translation-pt-en-t5": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "bigscience/mt0-base": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "Pirr/pythia-13b-deduped-green_devil": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50277}, "trl-internal-testing/tiny-random-GPT2Model": {"architectures": ["GPT2Model"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 1024}, "MBZUAI/LaMini-GPT-1.5B": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1600, "n_head": 25, "n_inner": null, "n_layer": 48, "vocab_size": 50258}, "Universal-NER/UniNER-7B-all": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/koala-13B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Rostlab/prot_t5_xl_bfd": {"architectures": ["T5WithLMHeadModel"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 128}, "Voicelab/trurl-2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "explosion-testing/llama2-kv-sharing": {"architectures": ["LlamaForCausalLM"], "hidden_size": 256, "intermediate_size": 512, "num_attention_heads": 4, "num_hidden_layers": 5, "vocab_size": 1024}, "inpars/monot5-3b-inpars-v2-nq-promptagator": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "upstage/llama-65b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "microsoft/CodeGPT-small-py": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50001}, "VietAI/vit5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 36096}, "TheBloke/CodeUp-Llama-2-13B-Chat-HF-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/CodeLlama-34B-Instruct-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "elyza/ELYZA-japanese-Llama-2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "jondurbin/airoboros-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/CodeLlama-7B-Python-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "FlagAlpha/Llama2-Chinese-13b-Chat-4bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/CodeLlama-13B-Python-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Enoch/llama-65b-hf": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "chargoddard/platypus-2-22b-relora": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "togethercomputer/GPT-NeoXT-Chat-Base-20B": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 6144, "intermediate_size": 24576, "num_attention_heads": 64, "num_hidden_layers": 44, "vocab_size": 50432}, "porkorbeef/Llama-2-13b-sf": {"architectures": ["LlamaModel"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "ehartford/Wizard-Vicuna-13B-Uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "doas/test5": {"architectures": ["LlamaModel"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "klosax/open_llama_3b_350bt_preview": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "Writer/camel-5b-hf": {"architectures": ["GPT2LMHeadModel"], "n_embd": 4096, "n_head": 32, "n_inner": 16384, "n_layer": 24, "vocab_size": 50258}, "Filosofas/DialoGPT-medium-PALPATINE2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "nomic-ai/gpt4all-falcon": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "reciprocate/llama2-7b-gsm8k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/CodeLlama-13B-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "pankajmathur/orca_mini_v3_13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "projecte-aina/aguila-7b": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 50257}, "TheBloke/WizardLM-13B-V1.1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "MBZUAI/LaMini-GPT-124M": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50258}, "google/mt5-xxl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 250112}, "MaRiOrOsSi/t5-base-finetuned-question-answering": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "satvikag/chatbot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "LMFlow/Robin-7b-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "daryl149/llama-2-70b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "acrastt/Puma-3B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "TheBloke/orca_mini_v3_7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "taeminlee/kogpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50000}, "NousResearch/Llama-2-70b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "rinna/japanese-gpt2-xsmall": {"architectures": ["GPT2LMHeadModel"], "n_embd": 512, "n_head": 8, "n_inner": 2304, "n_layer": 6, "vocab_size": 32000}, "ziqingyang/chinese-llama-2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 55296}, "hf-internal-testing/tiny-random-t5-v1.1": {"d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 1103}, "pankajmathur/Lima_Unchained_70b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "chargoddard/llama2-22b-blocktriangular": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/CodeLlama-7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32016}, "BeIR/query-gen-msmarco-t5-large-v1": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "TheBloke/CodeLlama-13B-Instruct-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "acrastt/Marx-3B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "PygmalionAI/pygmalion-2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "shibing624/chinese-alpaca-plus-7b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 49954}, "TheBloke/OpenOrcaxOpenChat-Preview2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "syzymon/long_llama_3b_instruct": {"architectures": ["LongLlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "bofenghuang/vigogne-2-7b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Gustavosta/MagicPrompt-Dalle": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "muchad/idt5-qa-qg": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 30002}, "TheBloke/vicuna-13b-v1.3.0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TigerResearch/tigerbot-13b-base-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 60928}, "ehartford/WizardLM-13B-Uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "clibrain/Llama-2-7b-ft-instruct-es": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "google/t5_xxl_true_nli_mixture": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 65536, "d_model": 1024, "num_heads": 128, "num_layers": 24, "vocab_size": 32128}, "unikei/t5-base-split-and-rephrase": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "microsoft/Promptist": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50257}, "stas/mt5-tiny-random": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 256, "d_model": 64, "num_heads": 4, "num_layers": 8, "vocab_size": 5100}, "AIDC-ai-business/Luban-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "microsoft/GODEL-v1_1-base-seq2seq": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32102}, "CalderaAI/30B-Lazarus": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "acrastt/Marx-3B-V2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "h2oai/h2ogpt-4096-llama2-70b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "ajibawa-2023/scarlett-33b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "jondurbin/airoboros-l2-70b-2.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "rubentito/vt5-base-spdocvqa": {"architectures": ["HF_VT5"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "aisquared/dlite-v2-774m": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50260}, "elyza/ELYZA-japanese-Llama-2-7b-fast": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 45043}, "quantumaikr/llama-2-70b-fb16-korean": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "TheBloke/CodeLlama-34B-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "microsoft/DialogRPT-updown": {"architectures": ["GPT2ForSequenceClassification"], "n_embd": 1024, "n_head": 16, "n_layer": 24, "vocab_size": 50257}, "TheBloke/CodeLlama-34B-Instruct-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "garage-bAInd/Platypus2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "trl-internal-testing/tiny-BloomForCausalLM-correct-vocab": {"architectures": ["BloomForCausalLM"], "hidden_size": 32, "n_head": 4, "n_layer": 5, "vocab_size": 250880}, "TheBloke/Llama-2-7B-GGML": {}, "TheBloke/Wizard-Vicuna-7B-Uncensored-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "wenge-research/yayi-7b-llama2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32005}, "coffeeee/nsfw-story-generator2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "jondurbin/airoboros-33b-gpt4-2.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "totally-not-an-llm/EverythingLM-13b-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "datificate/gpt2-small-spanish": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "mrm8488/t5-base-finetuned-wikiSQL": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "bofenghuang/vigogne-2-13b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "OpenAssistant/stablelm-7b-sft-v7-epoch-3": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 6144, "intermediate_size": 24576, "num_attention_heads": 48, "num_hidden_layers": 16, "vocab_size": 50288}, "bhenrym14/airoboros-33b-gpt4-1.4.1-lxctx-PI-16384-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "flozi00/codellama-34b-german-assistant-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "WizardLM/WizardCoder-1B-V1.0": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 49153}, "upstage/llama-30b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "ehartford/dolphin-llama2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Open-Orca/LlongOrca-13B-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32004}, "NousResearch/Nous-Hermes-Llama2-70b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32001}, "ml6team/mt5-small-german-query-generation": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "bigscience/mt0-xxl": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 250112}, "EleutherAI/pythia-2.8b-deduped-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50304}, "TheBloke/wizardLM-7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "conceptofmind/LLongMA-2-7b-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "lmsys/vicuna-7b-delta-v1.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "bofenghuang/vigogne-7b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "csebuetnlp/banglat5_nmt_en_bn": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "trl-internal-testing/tiny-random-T5Model": {"architectures": ["T5Model"], "d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 1302}, "OpenBuddy/openbuddy-llama2-70b-v10.1-bf16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 37632}, "TheBloke/wizard-vicuna-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "JosephusCheung/Guanaco": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "openchat/opencoderplus": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49153}, "jacobmorrison/tk-instruct-large-lora-experiments": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32100}, "PygmalionAI/metharme-1.3b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "TheBloke/orca_mini_13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "EleutherAI/pythia-70m-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "project-baize/baize-v2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "line-corporation/japanese-large-lm-1.7b-instruction-sft": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2304, "n_head": 24, "n_inner": 9216, "n_layer": 24, "vocab_size": 51200}, "TheBloke/WizardLM-13B-V1-0-Uncensored-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/llama-2-70b-Guanaco-QLoRA-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "MBZUAI/LaMini-Flan-T5-77M": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32128}, "csebuetnlp/banglat5_nmt_bn_en": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Trelis/Llama-2-7b-chat-hf-function-calling-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ehartford/Wizard-Vicuna-7B-Uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "llSourcell/medllama2_7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "WizardLM/WizardLM-13B-V1.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "Gryphe/MythoMix-L2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/StableBeluga2-70B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "VietAI/vit5-large-vietnews-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 36096}, "adasnew/t5-small-xsum": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "Intel/t5-small-xsum-int8-dynamic": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32100}, "daspartho/prompt-extend": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 52000}, "EleutherAI/pythia-160m-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 50304}, "Ar4ikov/gpt2-650k-stable-diffusion-prompt-generator": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "ehartford/WizardLM-Uncensored-Falcon-7b": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65025}, "CobraMamba/mamba-gpt-3b-v3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "TheBloke/llama2_70b_chat_uncensored-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "ai-forever/FRED-T5-1.7B": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1536, "num_heads": 24, "num_layers": 24, "vocab_size": 50364}, "MBZUAI/LaMini-Cerebras-590M": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1536, "n_head": 12, "n_inner": 6144, "n_layer": 18, "vocab_size": 50258}, "mrm8488/llama-2-coder-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "guardrail/llama-2-7b-guanaco-instruct-sharded": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "rinna/bilingual-gpt-neox-4b-8k": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2816, "intermediate_size": 11264, "num_attention_heads": 22, "num_hidden_layers": 36, "vocab_size": 65536}, "mrm8488/falcoder-7b": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "circulus/Llama-2-13b-orca-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "allenai/tk-instruct-3b-def": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32100}, "pierreguillou/gpt2-small-portuguese": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50257}, "junelee/wizard-vicuna-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "castorini/monot5-3b-msmarco-10k": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "TheBloke/Llama-2-70B-Chat-GGML": {}, "TheBloke/CodeLlama-7B-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32016}, "yeontaek/llama-2-13B-ensemble-v5": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "ybelkada/flan-t5-xl-sharded-bf16": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "WizardLM/WizardCoder-3B-V1.0": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 2816, "n_head": 22, "n_inner": 11264, "n_layer": 36, "vocab_size": 49153}, "Langboat/mengzi-t5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "MBZUAI/LaMini-GPT-774M": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50258}, "ToddGoldfarb/Cadet-Tiny": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "TigerResearch/tigerbot-7b-base": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 60928}, "UrukHan/t5-russian-spell": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "LinkSoul/Chinese-Llama-2-7b-4bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Vicuna-13B-CoT-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "EleutherAI/pythia-1.4b-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "MayaPH/GodziLLa2-70B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "TheBloke/wizardLM-13B-1.0-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "Gryphe/MythoBoros-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "abacusai/Giraffe-v2-13b-32k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jondurbin/airoboros-l2-13b-gpt4-1.4.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "razent/SciFive-base-Pubmed": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "TehVenom/Pygmalion-13b-Merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "garage-bAInd/SuperPlatty-30B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "jondurbin/airoboros-l2-7b-gpt4-2.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Rostlab/ProstT5": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 150}, "TheBloke/guanaco-7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "JackFram/llama-68m": {"architectures": ["LlamaForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 2, "vocab_size": 32000}, "MBZUAI/LaMini-Cerebras-111M": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": 3072, "n_layer": 10, "vocab_size": 50258}, "ehartford/Wizard-Vicuna-30B-Uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "stockmark/gpt-neox-japanese-1.4b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50000}, "TheBloke/MythoMax-L2-13B-GGML": {}, "MBZUAI/LaMini-Cerebras-256M": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1088, "n_head": 17, "n_inner": 4352, "n_layer": 14, "vocab_size": 50258}, "jondurbin/airoboros-l2-13b-gpt4-2.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "The-Face-Of-Goonery/Chronos-Beluga-v2-13bfp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "lmqg/t5-base-squad-qag": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32101}, "Voicelab/trurl-2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "ehartford/Samantha-1.11-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "clibrain/Llama-2-13b-ft-instruct-es": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "deepse/CodeUp-Llama-2-13b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "mrm8488/t5-base-finetuned-sarcasm-twitter": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "ToolBench/ToolLLaMA-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "marella/gpt-2-ggml": {}, "Henk717/airochronos-33B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "stanford-crfm/alias-gpt2-small-x21": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "WizardLM/WizardLM-30B-V1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32001}, "timdettmers/guanaco-33b-merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "xkianteb/alg_ppo_separate_lr_1e-6_n_epochs_10_v_epochs_10_kl_target_1.0_clip_range_0.2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "TheBloke/wizard-mega-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "bigscience/mt0-xl": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 250112}, "luffycodes/nash-vicuna-13b-v1dot5-ep2-w-rag-w-simple": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "h2oai/h2ogpt-oig-oasst1-256-6_9b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "fabiochiu/t5-base-medium-title-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "OpenAssistant/falcon-40b-sft-mix-1226": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65040}, "Writer/palmyra-base": {"architectures": ["GPT2LMHeadModel"], "n_embd": 4096, "n_head": 32, "n_inner": 16384, "n_layer": 24, "vocab_size": 50257}, "TheBloke/llama-2-70b-Guanaco-QLoRA-GGML": {}, "Rostlab/prot_t5_base_mt_uniref50": {"architectures": ["T5WithLMHeadModel"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 256}, "Lajonbot/Llama-2-13b-hf-instruct-pl-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "WizardLM/WizardLM-7B-V1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "pankajmathur/orca_mini_7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "yhyhy3/open_llama_7b_v2_med_instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "NousResearch/CodeLlama-7b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32016}, "OpenBuddy/openbuddy-llama2-13b-v11.1-bf16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 37632}, "hf-internal-testing/tiny-random-GPT2ForQuestionAnswering": {"architectures": ["GPT2ForQuestionAnswering"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 1024}, "explosion-testing/llama2-fewer-kv-heads": {"architectures": ["LlamaForCausalLM"], "hidden_size": 256, "intermediate_size": 512, "num_attention_heads": 4, "num_hidden_layers": 5, "vocab_size": 1024}, "hetpandya/t5-base-tapaco": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "PygmalionAI/pygmalion-2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "mrm8488/t5-base-finetuned-imdb-sentiment": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "explosion-testing/falcon-test": {"architectures": ["FalconForCausalLM"], "hidden_size": 32, "num_attention_heads": 4, "num_hidden_layers": 5, "vocab_size": 1024}, "ehartford/WizardLM-33B-V1.0-Uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "TheBloke/StableBeluga-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/WizardLM-Uncensored-SuperCOT-StoryTelling-30B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32001}, "jinaai/jina-embedding-s-en-v1": {"architectures": ["T5EncoderModel"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "FelixChao/vicuna-33b-coder": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "TheBloke/llama-30b-supercot-SuperHOT-8K-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "quantumaikr/llama-2-70b-fb16-orca-chat-10k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "TheBloke/airoboros-l2-13B-gpt4-1.4.1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "EleutherAI/pythia-31m": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 256, "intermediate_size": 1024, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "hf-internal-testing/tiny-random-GPT2ForTokenClassification": {"architectures": ["GPT2ForTokenClassification"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 1024}, "jondurbin/airoboros-l2-70b-gpt4-1.4.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "kimsan0622/gpt2-medium": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 64007}, "TheBloke/EverythingLM-13B-16K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Linly-AI/Chinese-LLaMA-2-13B-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 40076}, "BlackSamorez/rudialogpt3_medium_based_on_gpt2_2ch": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "EleutherAI/pythia-2.8b-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50304}, "TheBloke/llama-2-7B-Guanaco-QLoRA-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "google/byt5-xl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 6720, "d_model": 2560, "num_heads": 32, "num_layers": 36, "vocab_size": 384}, "TheBloke/wizard-vicuna-13B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TehVenom/Pygmalion-Vicuna-1.1-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "openaccess-ai-collective/wizard-mega-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jondurbin/airoboros-l2-7b-gpt4-m2.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "openchat/openchat_v3.2_super": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "openaccess-ai-collective/manticore-13b-chat-pyg": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Neko-Institute-of-Science/pygmalion-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "unicamp-dl/ptt5-small-portuguese-vocab": {"architectures": ["T5WithLMHeadModel"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "hf-internal-testing/tiny-random-T5ForQuestionAnswering": {"architectures": ["T5ForQuestionAnswering"], "d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 32100}, "microsoft/CodeGPT-small-java-adaptedGPT2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50260}, "unicamp-dl/ptt5-base-portuguese-vocab": {"architectures": ["T5WithLMHeadModel"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Fredithefish/ScarletPajama-3B-HF": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "hf-internal-testing/tiny-random-T5ForSequenceClassification": {"architectures": ["T5ForSequenceClassification"], "d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 32100}, "TheBloke/Nous-Hermes-Llama-2-7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "calvindoingstuff/DialoGPT-medium-luffy": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "lvkaokao/llama2-7b-hf-chat-lora-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "skt/ko-gpt-trinity-1.2B-v0.5": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1920, "n_head": 16, "n_inner": 7680, "n_layer": 24, "vocab_size": 51200}, "saibo/llama-1B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 2, "vocab_size": 32000}, "vonjack/Qwen-LLaMAfied-HFTok-7B-Chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 151936}, "TheBloke/CodeLlama-34B-Python-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "GAIR/rst-all-11b": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 65536, "d_model": 1024, "num_heads": 128, "num_layers": 24, "vocab_size": 32128}, "GeorgiaTechResearchInstitute/starcoder-gpteacher-code-instruct": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49153}, "jondurbin/airoboros-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "aisquared/dlite-v2-1_5b": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1600, "n_head": 25, "n_inner": null, "n_layer": 48, "vocab_size": 50260}, "aiassociates/t5-small-grammar-correction-german": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "asi/gpt-fr-cased-small": {"n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50000}, "csebuetnlp/mT5_m2o_chinese_simplified_crossSum": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "openthaigpt/openthaigpt-1.0.0-alpha-7b-chat-ckpt-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "jondurbin/airoboros-l2-13b-2.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "sentence-transformers/sentence-t5-xl": {"architectures": ["T5EncoderModel"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "OpenBuddy/openbuddy-openllama-3b-v10-bf16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 37120}, "TheBloke/guanaco-33B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "h2oai/h2ogpt-oasst1-512-20b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 6144, "intermediate_size": 24576, "num_attention_heads": 64, "num_hidden_layers": 44, "vocab_size": 50432}, "Open-Orca/OpenOrca-Preview1-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "WizardLM/WizardLM-13B-V1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "garage-bAInd/Camel-Platypus2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "wxjiao/alpaca-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "TheBloke/Wizard-Vicuna-13B-Uncensored-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "FelixChao/vicuna-7B-chemical": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Arc53/docsgpt-14b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "OpenAssistant/llama2-13b-megacode2-oasst": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32007}, "TheBloke/Lemur-70B-Chat-v1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32005}, "EleutherAI/pythia-6.9b-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "grimpep/L2-MythoMax22b-instruct-Falseblock": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "Austism/chronos-hermes-13b-v2-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "UBC-NLP/AraT5v2-base-1024": {"d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 110208}, "fireballoon/baichuan-vicuna-chinese-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 64000}, "abeja/gpt2-large-japanese": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 32000}, "TheBloke/Airoboros-L2-70B-2.1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "vicgalle/gpt2-alpaca-gpt4": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50260}, "flax-community/gpt2-small-indonesian": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "TheBloke/Nous-Hermes-13B-SuperHOT-8K-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "imone/LLaMA2_13B_with_EOT_token": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "Corianas/111m": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": 3072, "n_layer": 10, "vocab_size": 50257}, "The-Face-Of-Goonery/Huginn-v3-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ehartford/Samantha-1.11-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "heegyu/WizardVicuna-3B-0719": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "acrastt/Griffin-3B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "totally-not-an-llm/EverythingLM-13b-V2-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ikala/bloom-zh-3b-chat": {"architectures": ["BloomForCausalLM"], "hidden_size": 2560, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250688}, "Gryphe/MythoLogic-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "AlekseyKorshuk/vicuna-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "flax-community/gpt2-medium-persian": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50000}, "ehartford/samantha-1.1-llama-33b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "garage-bAInd/Platypus2-70B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "OpenLemur/lemur-70b-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32024}, "ausboss/llama-30b-supercot": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "jondurbin/airoboros-l2-70b-gpt4-m2.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "lmqg/mt5-small-koquad-qg-ae": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "TheBloke/OpenAssistant-SFT-7-Llama-30B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32016}, "TheBloke/h2ogpt-gm-oasst1-en-2048-falcon-40b-v2-GPTQ": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "GOAT-AI/GOAT-7B-Community": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "h2oai/h2ogpt-gm-oasst1-en-1024-20b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 6144, "intermediate_size": 24576, "num_attention_heads": 64, "num_hidden_layers": 44, "vocab_size": 50432}, "beaugogh/pythia-1.4b-deduped-sharegpt": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50280}, "amurshak/llama-2-7b-miniguanaco": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "psyche/kollama2-7b-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "IlyaGusev/fred_t5_ru_turbo_alpaca": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1536, "num_heads": 24, "num_layers": 24, "vocab_size": 50365}, "potsawee/t5-large-generation-race-Distractor": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "heegyu/WizardVicuna-Uncensored-3B-0719": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "TheBloke/openchat_v2_openorca_preview-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "CalderaAI/13B-Legerdemain-L2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "SebastianSchramm/Cerebras-GPT-111M-instruction": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": 3072, "n_layer": 10, "vocab_size": 50258}, "Mikael110/llama-2-7b-guanaco-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Locutusque/gpt2-large-conversational": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50260}, "CalderaAI/13B-Ouroboros": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "chaoyi-wu/MedLLaMA_13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "YeungNLP/firefly-llama2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "garage-bAInd/GPlatty-30B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "pankajmathur/orca_mini_v2_13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "pankajmathur/model_007_13b_v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "chargoddard/Chronorctypus-Limarobormes-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "timdettmers/guanaco-65b-merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "digitous/13B-HyperMantis": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ckiplab/gpt2-base-chinese": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 21128}, "ehartford/dolphin-llama-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jphme/orca_mini_v2_ger_7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "malhajar/Platypus2-70B-instruct-4bit-gptq": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "heegyu/WizardVicuna-open-llama-3b-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "pankajmathur/model_007": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "vicgalle/gpt2-alpaca": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50260}, "h2oai/h2ogpt-gm-oasst1-en-2048-open-llama-7b-preview-300bt-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "stabilityai/stablecode-completion-alpha-3b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 49152}, "aisquared/dlite-v2-355m": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50260}, "google/byt5-xxl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 12352, "d_model": 4672, "num_heads": 64, "num_layers": 36, "vocab_size": 384}, "ehartford/Samantha-1.11-CodeLlama-34b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "h2oai/h2ogpt-gm-oasst1-multilang-1024-20b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 6144, "intermediate_size": 24576, "num_attention_heads": 64, "num_hidden_layers": 44, "vocab_size": 50432}, "TheBloke/koala-7B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ehartford/WizardLM-30B-Uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32001}, "clibrain/Llama-2-ft-instruct-es": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "quantumaikr/llama-2-70b-fb16-guanaco-1k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "psyche/kogpt": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1536, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 32002}, "wenge-research/yayi-7b": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250684}, "Aspik101/WizardVicuna-Uncensored-3B-instruct-PL-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "rombodawg/LosslessMegaCoder-llama2-7b-mini": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32007}, "TurkuNLP/gpt3-finnish-medium": {"architectures": ["BloomModel"], "hidden_size": 1024, "n_head": 16, "n_layer": 24, "vocab_size": 131072}, "pankajmathur/orca_mini_13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Mikael110/llama-2-13b-guanaco-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "totally-not-an-llm/PuddleJumper-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "jondurbin/airoboros-13b-gpt4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "CobraMamba/mamba-gpt-3b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "zarakiquemparte/zarablend-l2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Locutusque/gpt2-conversational-or-qa": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50262}, "frank098/Wizard-Vicuna-13B-juniper": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jondurbin/airoboros-gpt-3.5-turbo-100k-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "jondurbin/airoboros-33b-gpt4-1.4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "jondurbin/airoboros-l2-70b-gpt4-2.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "MBZUAI/LaMini-Cerebras-1.3B": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 50258}, "h2oai/h2ogpt-research-oasst1-llama-65b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "nkpz/llama2-22b-daydreamer-v3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "Aspik101/trurl-2-13b-pl-instruct_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "OpenAssistant/pythia-12b-pre-v8-12.5k-steps": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50288}, "breadlicker45/dough-instruct-base-001": {"architectures": ["LlamaForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 50402}, "OpenBuddy/openbuddy-llama-30b-v7.1-bf16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 37632}, "andreaskoepf/llama2-13b-megacode2_min100": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32007}, "ehartford/Samantha-1.11-70b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "flax-community/t5-recipe-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "BreadAi/PM_modelV2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1088, "n_head": 17, "n_inner": 4352, "n_layer": 14, "vocab_size": 50257}, "minlik/chinese-alpaca-33b-merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 49954}, "jordiclive/Llama-2-70b-oasst-1-200": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32016}, "Lajonbot/tableBeluga-7B-instruct-pl-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "sia-ai/llama-2-7b-1-percent-open-orca-1000-steps-v0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "h2oai/h2ogpt-gm-oasst1-en-1024-12b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50688}, "jondurbin/airoboros-33b-gpt4-1.2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "openchat/openchat_8192": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "TaylorAI/Flash-Llama-3B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "yeontaek/llama-2-13B-ensemble-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Kirili4ik/ruDialoGpt3-medium-finetuned-telegram": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "WangZeJun/bloom-820m-chat": {"architectures": ["BloomForCausalLM"], "hidden_size": 1536, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 46145}, "4bit/Llama-2-70b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "chargoddard/llama2-22b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "augtoma/qCammel-13": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "NlpHUST/gpt2-vietnamese": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Monero/Manticore-13b-Chat-Pyg-Guanaco": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "NousResearch/CodeLlama-34b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "aisquared/dlite-v2-124m": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50260}, "pankajmathur/orca_mini_v2_7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "The-Face-Of-Goonery/Huginn-22b-Prototype": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "DevaMalla/llama7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "openaccess-ai-collective/manticore-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "nkpz/llama2-22b-chat-wizard-uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "davzoku/cria-llama2-7b-v1.3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TaylorAI/Flash-Llama-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Fredithefish/ReasonixPajama-3B-HF": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "TheBloke/Platypus-30B-SuperHOT-8K-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "LoupGarou/WizardCoder-Guanaco-15B-V1.1": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49153}, "TheBloke/guanaco-65B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "hakurei/lotus-12B": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50688}, "bofenghuang/vigogne-33b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "grimpep/llama2-22B-GPLATTY": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "concedo/Pythia-70M-ChatSalad": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50278}, "rombodawg/LosslessMegaCoder-llama2-13b-mini": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32007}, "TaylorAI/Flash-Llama-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/chronos-wizardlm-uc-scot-st-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "OpenBuddy/openbuddy-llama-65b-v8-bf16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 37632}, "ajibawa-2023/scarlett-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/medalpaca-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "elinas/chronos-33b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "OpenBuddy/openbuddy-atom-13b-v9-bf16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 49953}, "OpenAssistant/pythia-12b-sft-v8-rlhf-2k-steps": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50282}, "TheTravellingEngineer/llama2-7b-chat-hf-v4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Ejafa/vicuna_7B_vanilla_1.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "yulan-team/YuLan-Chat-2-13b-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 51200}, "huashiyiqike/testmodel": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": 3072, "n_layer": 10, "vocab_size": 50257}, "TheBloke/WizardLM-30B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32001}, "notstoic/PygmalionCoT-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Vicuna-33B-1-3-SuperHOT-8K-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "FelixChao/vicuna-7B-physics": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/tulu-30B-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32001}, "jondurbin/airoboros-65b-gpt4-1.4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "uukuguy/speechless-llama2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "digitous/13B-Chimera": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jondurbin/airoboros-7b-gpt4-1.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "grimpep/llama2-28B-Airo03": {"architectures": ["LlamaForCausalLM"], "hidden_size": 7296, "intermediate_size": 22016, "num_attention_heads": 57, "num_hidden_layers": 40, "vocab_size": 32000}, "ehartford/CodeLlama-34b-Instruct-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "YeungNLP/firefly-ziya-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 39424}, "TheTravellingEngineer/bloom-560m-RLHF-v2": {"architectures": ["BloomForCausalLM"], "hidden_size": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "TheTravellingEngineer/llama2-7b-chat-hf-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "uukuguy/speechless-hermes-coig-lite-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "BreadAi/gpt-Youtube": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 50304}, "Aspik101/llama-30b-instruct-2048-PL-lora": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "beaugogh/Llama2-13b-sharegpt4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "gaodrew/gaodrew-gorgonzola-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "OpenBuddy/openbuddy-llama2-13b-v11-bf16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 37632}, "TheBloke/guanaco-13B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/WizardLM-13B-V1-1-SuperHOT-8K-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "NousResearch/CodeLlama-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "BreadAi/MusePy-1-2": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "jondurbin/airoboros-33b-gpt4-1.3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "YeungNLP/firefly-bloom-7b1": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "grimpep/llama2-22b-wizard_vicuna": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "Fredithefish/Guanaco-3B-Uncensored": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "digitous/Alpacino13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "mncai/SGPT-1.3B-insurance-epoch10": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 30080}, "TheTravellingEngineer/llama2-7b-chat-hf-dpo": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "yeontaek/Platypus2xOpenOrca-13B-LoRa": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "TheBloke/gpt4-alpaca-lora-30b-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "bhenrym14/airophin-13b-pntk-16k-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/Kimiko-13B-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "porkorbeef/Llama-2-13b-12_153950": {"architectures": ["LlamaModel"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "PSanni/Deer-3b": {"architectures": ["BloomForCausalLM"], "hidden_size": 2560, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250683}, "IGeniusDev/llama13B-quant8-testv1-openorca-customdataset": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Neko-Institute-of-Science/metharme-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "alibidaran/medical_transcription_generator": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Panchovix/airoboros-33b-gpt4-1.2-SuperHOT-8k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "digitous/Alpacino30b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "lgaalves/gpt2-dolly": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "TaylorAI/FLAN-Llama-7B-2_Llama2-7B-Flash_868_full_model": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "zarakiquemparte/zarafusionex-1.1-l2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "OpenAssistant/pythia-12b-sft-v8-2.5k-steps": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50288}, "TheBloke/airoboros-13B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/robin-33B-v2-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "Aspik101/trurl-2-7b-pl-instruct_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "llama-anon/petra-13b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "TFLai/gpt2-turkish-uncased": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "health360/Healix-3B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "TheBloke/Mythalion-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "h2oai/h2ogpt-gm-oasst1-en-2048-open-llama-7b-preview-300bt": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "pe-nlp/llama-2-13b-vicuna-wizard": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "yeontaek/Platypus2-13B-QLoRa": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "acrastt/OmegLLaMA-3B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "jslin09/bloom-560m-finetuned-fraud": {"architectures": ["BloomForCausalLM"], "hidden_size": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "YeungNLP/firefly-bloom-2b6-v2": {"architectures": ["BloomForCausalLM"], "hidden_size": 2560, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 46145}, "xzuyn/LLaMa-1-MedicWizard-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "Azure99/blossom-v2-3b": {"architectures": ["BloomForCausalLM"], "hidden_size": 2560, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "TheBloke/Airoboros-L2-13B-2.1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "MetaIX/GPT4-X-Alpasta-30b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32016}, "PocketDoc/Dans-PersonalityEngine-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "vicgalle/alpaca-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Corianas/590m": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1536, "n_head": 12, "n_inner": 6144, "n_layer": 18, "vocab_size": 50257}, "OpenBuddy/openbuddy-openllama-13b-v7-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 38656}, "gywy/llama2-13b-chinese-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 49376}, "Corianas/Quokka_590m": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1536, "n_head": 12, "n_inner": 6144, "n_layer": 18, "vocab_size": 50260}, "aisquared/dlite-v1-355m": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50260}, "aisquared/dlite-v1-774m": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50260}, "Fredithefish/RedPajama-INCITE-Chat-3B-Instruction-Tuning-with-GPT-4": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "project-baize/baize-v2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/Project-Baize-v2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "FabbriSimo01/GPT_Large_Quantized": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50257}, "ajibawa-2023/carl-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Azure99/blossom-v1-3b": {"architectures": ["BloomForCausalLM"], "hidden_size": 2560, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "Aspik101/30B-Lazarus-instruct-PL-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "yeontaek/Platypus2xOpenOrca-13B-IA3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "Enno-Ai/ennodata-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "FabbriSimo01/Cerebras_1.3b_Quantized": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 50257}, "migtissera/Synthia-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "pe-nlp/llama-2-13b-platypus-vicuna-wizard": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "yeontaek/Platypus2xOpenOrca-13B-IA3-ensemble": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "Corianas/1.3b": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 50257}, "Rachneet/gpt2-xl-alpaca": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1600, "n_head": 25, "n_inner": null, "n_layer": 48, "vocab_size": 50257}, "Aeala/VicUnlocked-alpaca-30b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "TheBloke/VicUnlocked-30B-LoRA-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "Monero/WizardLM-Uncensored-SuperCOT-StoryTelling-30b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32001}, "bavest/fin-llama-33b-merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "openchat/openchat_v2_w": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "FabbriSimo01/Bloom_1b_Quantized": {"architectures": ["BloomForCausalLM"], "hidden_size": 1536, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "Aspik101/tulu-7b-instruct-pl-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "TheTravellingEngineer/llama2-7b-chat-hf-v3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "yeontaek/llama-2-70b-IA3-guanaco": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Kunhao/pile-7b-250b-tokens": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 49152}, "yeontaek/llama-2-13b-QLoRA": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "yeontaek/llama-2-13b-Beluga-QLoRA": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "yeontaek/Platypus2xOpenOrca-13B-IA3-v3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "ausboss/llama7b-wizardlm-unfiltered": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/h2ogpt-oasst1-512-30B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "bofenghuang/vigogne-7b-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "NYTK/PULI-GPTrio": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 150016}, "LLMs/WizardLM-30B-V1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32001}, "openaccess-ai-collective/minotaur-13b-fixed": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheTravellingEngineer/bloom-1b1-RLHF-v2": {"architectures": ["BloomForCausalLM"], "hidden_size": 1536, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "yeontaek/Platypus2xOpenOrca-13B-IA3-v4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "BreadAi/DiscordPy": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1088, "n_head": 17, "n_inner": 4352, "n_layer": 14, "vocab_size": 50257}, "TehVenom/oasst-sft-6-llama-33b-xor-MERGED-16bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32016}, "gaodrew/gaodrew-llama-30b-instruct-2048-Open-Platypus-100steps": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "yeontaek/Platypus2xOpenOrca-13B-IA3-v2.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "yeontaek/Platypus2xOpenOrca-13B-LoRa-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "dvruette/oasst-pythia-12b-6000-steps": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50288}, "LoupGarou/WizardCoder-Guanaco-15B-V1.0": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49153}, "KnutJaegersberg/gpt-2-xl-EvolInstruct": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1600, "n_head": 25, "n_inner": null, "n_layer": 48, "vocab_size": 50257}, "Lajonbot/WizardLM-13B-V1.2-PL-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "yeontaek/Platypus2-13B-IA3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "zarakiquemparte/zaraxe-l2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "BreadAi/gpt-YA-1-1_70M": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "dvruette/oasst-pythia-12b-reference": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50288}, "Panchovix/WizardLM-33B-V1.0-Uncensored-SuperHOT-8k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "titan087/OpenLlama13B-Guanaco": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "w601sxs/b1ade-1b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 8, "num_hidden_layers": 16, "vocab_size": 50304}, "Andron00e/YetAnother_Open-Llama-3B-LoRA": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "quantumaikr/QuantumLM": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jondurbin/airoboros-65b-gpt4-2.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Aspik101/llama-30b-2048-instruct-PL-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "yeontaek/Platypus2-13B-LoRa": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "zarakiquemparte/zarafusionix-l2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "huggingtweets/gladosystem": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "eachadea/legacy-vicuna-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "Aeala/GPT4-x-AlpacaDente2-30b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32016}, "shibing624/chinese-llama-plus-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 49953}, "euclaise/gpt-neox-122m-minipile-digits": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 48262}, "TheBloke/UltraLM-13B-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "lvkaokao/llama2-7b-hf-instruction-lora": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "BreadAi/StoryPy": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "dvruette/oasst-pythia-12b-flash-attn-5000-steps": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50288}, "aisquared/dlite-v1-124m": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "ewof/koishi-instruct-3b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "h2oai/h2ogpt-gm-oasst1-en-1024-open-llama-7b-preview-400bt": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/airoboros-7b-gpt4-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "jondurbin/airoboros-13b-gpt4-1.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/tulu-13B-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "yhyhy3/med-orca-instruct-33b": {"architectures": ["LlamaModel"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "heegyu/LIMA-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "abhishek/llama2guanacotest": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "heegyu/LIMA2-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Corianas/Quokka_256m": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1088, "n_head": 17, "n_inner": 4352, "n_layer": 14, "vocab_size": 50260}, "golaxy/gogpt-560m": {"architectures": ["BloomForCausalLM"], "hidden_size": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "OptimalScale/robin-7b-v2-delta": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "bofenghuang/vigogne-13b-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "klosax/pythia-160m-deduped-step92k-193bt": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 50304}, "golaxy/gogpt2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 68420}, "YeungNLP/firefly-llama2-13b-v1.2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "WhoTookMyAmogusNickname/NewHope_HF_not_official": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "ehartford/CodeLlama-34b-Python-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "KnutJaegersberg/megatron-GPT-2-345m-EvolInstruct": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": 4096, "n_layer": 24, "vocab_size": 50257}, "Aeala/Alpaca-elina-65b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Monero/WizardLM-30B-Uncensored-Guanaco-SuperCOT-30b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32001}, "csitfun/llama-7b-logicot": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "OptimalScale/robin-65b-v2-delta": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "LLMs/WizardLM-13B-V1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "CobraMamba/mamba-gpt-3b-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "aisquared/dlite-v1-1_5b": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1600, "n_head": 25, "n_inner": null, "n_layer": 48, "vocab_size": 50257}, "nthngdy/pythia-owt2-70m-100k": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "LLMs/AlpacaGPT4-7B-elina": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Yhyu13/oasst-rlhf-2-llama-30b-7k-steps-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32006}, "jondurbin/airoboros-7b-gpt4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "grantprice/Cerebras-GPT-590M-finetuned-DND": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1536, "n_head": 12, "n_inner": 6144, "n_layer": 18, "vocab_size": 50257}, "TheBloke/robin-13B-v2-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/robin-65b-v2-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "FPHam/Free_Sydney_13b_HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "acrastt/RedPajama-INCITE-Chat-Instruct-3B-V1": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "jondurbin/airoboros-65b-gpt4-m2.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "heegyu/LIMA2-7b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "BreadAi/MuseCan": {"architectures": ["GPT2LMHeadModel"], "n_embd": 960, "n_head": 15, "n_inner": 9, "n_layer": 5, "vocab_size": 50304}, "ausboss/llama-13b-supercot": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "openaccess-ai-collective/manticore-30b-chat-pyg-alpha": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "OptimalScale/robin-13b-v2-delta": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "quantumaikr/llama-2-7b-hf-guanaco-1k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Fredithefish/RedPajama-INCITE-Chat-3B-ShareGPT-11K": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "CalderaAI/13B-BlueMethod": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "SaylorTwift/gpt2_test": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50257}, "WeOpenML/PandaLM-Alpaca-7B-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "WeOpenML/Alpaca-7B-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "sumo43/lora_moe_7b_baseline": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "wenge-research/yayi-13b-llama2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32005}, "golaxy/gowizardlm": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 68420}, "abhiramtirumala/DialoGPT-sarcastic-medium": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Corianas/Quokka_2.7b": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2560, "n_head": 32, "n_inner": 10240, "n_layer": 32, "vocab_size": 50260}, "Corianas/256_5epoch": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1088, "n_head": 17, "n_inner": 4352, "n_layer": 14, "vocab_size": 50257}, "dvruette/llama-13b-pretrained": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "TheBloke/alpaca-lora-65B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "ashercn97/giraffe-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Aspik101/Vicuzard-30B-Uncensored-instruct-PL-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32001}, "TheBloke/dromedary-65b-lora-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Yhyu13/chimera-inst-chat-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "ehartford/based-30b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "concedo/Vicuzard-30B-Uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32001}, "64bits/LexPodLM-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "MayaPH/GodziLLa-30B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "Aspik101/vicuna-7b-v1.3-instruct-pl-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "julianweng/Llama-2-7b-chat-orcah": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "heegyu/RedTulu-Uncensored-3B-0719": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "Aspik101/Llama-2-7b-hf-instruct-pl-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "quantumaikr/QuantumLM-70B-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "BreadAi/gpt-YA-1-1_160M": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 50304}, "dvruette/oasst-pythia-12b-pretrained-sft": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50288}, "Aeala/GPT4-x-AlpacaDente-30b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32016}, "TehVenom/Pygmalion_AlpacaLora-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "LLMs/Stable-Vicuna-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "quantumaikr/open_llama_7b_hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Aeala/GPT4-x-Alpasta-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "Fredithefish/CrimsonPajama": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "openaccess-ai-collective/hippogriff-30b-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "camel-ai/CAMEL-13B-Role-Playing-Data": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/landmark-attention-llama7b-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32002}, "TheBloke/robin-33B-v2-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "TheBloke/GPlatty-30B-SuperHOT-8K-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "TheBloke/Chinese-Alpaca-33B-SuperHOT-8K-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 49954}, "TheBloke/CAMEL-33B-Combined-Data-SuperHOT-8K-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "klosax/open_llama_13b_600bt_preview": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Aspik101/Nous-Hermes-13b-pl-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "jondurbin/airoboros-l2-7b-gpt4-1.4.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "YeungNLP/firefly-llama-30b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "ashercn97/manatee-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "lizhuang144/starcoder_mirror": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49152}, "Aspik101/vicuna-13b-v1.5-PL-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Aspik101/Redmond-Puffin-13B-instruct-PL-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "Aspik101/StableBeluga-13B-instruct-PL-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "RoversX/llama-2-7b-hf-small-shards-Samantha-V1-SFT": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Corianas/Quokka_1.3b": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 50260}, "nthngdy/pythia-owt2-70m-50k": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "danielhanchen/open_llama_3b_600bt_preview": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "TheBloke/VicUnlocked-alpaca-65B-QLoRA-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "IDEA-CCNL/Ziya-LLaMA-13B-Pretrain-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 39424}, "kevinpro/Vicuna-13B-CoT": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "wahaha1987/llama_7b_sharegpt94k_fastchat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "openaccess-ai-collective/minotaur-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/tulu-7B-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "golaxy/gogpt-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 68420}, "Aeala/Enterredaas-33b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "kingbri/chronolima-airo-grad-l2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheTravellingEngineer/bloom-560m-RLHF": {"architectures": ["BloomForCausalLM"], "hidden_size": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "HWERI/Llama2-7b-sharegpt4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "l3utterfly/llama2-7b-layla": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "yeontaek/llama-2-13b-Guanaco-QLoRA": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "duliadotio/dulia-13b-8k-alpha": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "yeontaek/llama-2-13B-ensemble-v3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "dvruette/oasst-gpt-neox-20b-3000-steps": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 6144, "intermediate_size": 24576, "num_attention_heads": 64, "num_hidden_layers": 44, "vocab_size": 50288}, "dvruette/oasst-gpt-neox-20b-1000-steps": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 6144, "intermediate_size": 24576, "num_attention_heads": 64, "num_hidden_layers": 44, "vocab_size": 50288}, "huggingtweets/jerma985": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Dampish/Dante-2.8B": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "TheBloke/Planner-7B-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "klosax/pythia-70m-deduped-step44k-92bt": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "klosax/open_llama_7b_400bt_preview": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "golaxy/gogpt2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 68420}, "Lajonbot/Llama-2-7b-chat-hf-instruct-pl-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheTravellingEngineer/llama2-7b-chat-hf-guanaco": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Lajonbot/vicuna-7b-v1.5-PL-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "kingbri/airolima-chronos-grad-l2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "yeontaek/llama-2-70B-ensemble-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "dvruette/oasst-llama-13b-2-epochs": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "dvruette/llama-13b-pretrained-sft-epoch-1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "dvruette/llama-13b-pretrained-dropout": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "hakurei/instruct-12b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50277}, "dvruette/gpt-neox-20b-full-precision": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 6144, "intermediate_size": 24576, "num_attention_heads": 64, "num_hidden_layers": 44, "vocab_size": 50288}, "Monero/WizardLM-13b-OpenAssistant-Uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "Yhyu13/llama-30B-hf-openassitant": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "jondurbin/airoboros-65b-gpt4-1.2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "camel-ai/CAMEL-33B-Combined-Data": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "MBZUAI/bactrian-x-llama-13b-merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "dsvv-cair/alpaca-cleaned-llama-30b-bf16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "YeungNLP/firefly-llama-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "YeungNLP/firefly-llama-13b-v1.2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "heegyu/WizardVicuna2-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "dvruette/oasst-llama-13b-1000-steps": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "dvruette/llama-13b-pretrained-sft-do2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "pillowtalks-ai/delta13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "illuin/test-custom-llama": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "MrNJK/gpt2-xl-sft": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1600, "n_head": 25, "n_inner": null, "n_layer": 48, "vocab_size": 50257}, "PocketDoc/Dans-PileOfSets-Mk1-llama-13b-merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jondurbin/airoboros-65b-gpt4-1.3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "bhenrym14/airoboros-33b-gpt4-1.4.1-PI-8192-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "frank098/WizardLM_13B_juniper": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "golaxy/goims": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 68420}, "dvruette/oasst-pythia-6.9b-4000-steps": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50288}, "mncai/chatdoctor": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "wannaphong/openthaigpt-0.1.0-beta-full-model_for_open_llm_leaderboard": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "golaxy/gogpt-3b-bloom": {"architectures": ["BloomForCausalLM"], "hidden_size": 2560, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "golaxy/gogpt-7b-bloom": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "jondurbin/airoboros-33b-gpt4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "jondurbin/airoboros-13b-gpt4-1.2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jondurbin/airoboros-7b-gpt4-1.2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "jondurbin/airoboros-13b-gpt4-1.3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jondurbin/airoboros-7b-gpt4-1.4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "jondurbin/airoboros-13b-gpt4-1.4-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jondurbin/airoboros-7b-gpt4-1.4.1-qlora": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "frank098/orca_mini_3b_juniper": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "Lajonbot/vicuna-13b-v1.3-PL-lora_unload": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jxhong/CAlign-alpaca-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "quantumaikr/KoreanLM-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "keyfan/vicuna-chinese-replication-v1.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 49953}, "jondurbin/airoboros-7b-gpt4-1.3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "jerryjalapeno/nart-100k-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "xzuyn/Alpacino-SuperCOT-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "wahaha1987/llama_13b_sharegpt94k_fastchat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "quantumaikr/QuantumLM-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Undi95/ReMM-SLERP-L2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "huggingtweets/bladeecity-jerma985": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "pszemraj/pythia-6.9b-HC3": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "CalderaAI/30B-Epsilon": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "TFLai/OpenOrca-Platypus2-13B-QLoRA-0.80-epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "alpindale/pygmalion-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "jondurbin/airoboros-c34b-2.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "abacaj/starcoderbase-1b-sft": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 49153}, "bongchoi/test-llama2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TinyPixel/lima-test": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "quantumaikr/llama-2-70B-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "yeontaek/llama-2-13B-ensemble-v4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "cointegrated/rut5-base-absum": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 30000}, "pankajmathur/model_420_preview": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Fredithefish/Guanaco-3B-Uncensored-v2": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "yeontaek/llama-2-70B-ensemble-v4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Writer/palmyra-large": {"architectures": ["GPT2LMHeadModel"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 44, "vocab_size": 50257}, "RobbeD/OpenLlama-Platypus-3B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "TFLai/OrcaMini-Platypus2-13B-QLoRA-0.80-epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "NobodyExistsOnTheInternet/PuffedConvo13bLoraE4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Sao10K/Medusa-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/Manticore-13B-Chat-Pyg-Guanaco-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TFLai/Nous-Hermes-Platypus2-13B-QLoRA-0.80-epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "dhmeltzer/llama-7b-SFT_eli5_wiki65k_1024_r_64_alpha_16_merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TFLai/MythoMix-Platypus2-13B-QLoRA-0.80-epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "chargoddard/llama-2-34b-uncode": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "zarakiquemparte/zaraxls-l2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TFLai/Stable-Platypus2-13B-QLoRA-0.80-epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Danielbrdz/Barcenas-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "alan-turing-institute/mt5-large-finetuned-mnli-xtreme-xnli": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 250112}, "TFLai/Limarp-Platypus2-13B-QLoRA-0.80-epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TFLai/PuddleJumper-Platypus2-13B-QLoRA-0.80-epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "The-Face-Of-Goonery/Huginn-13b-v4.5": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jondurbin/airoboros-l2-7b-2.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "allenai/unifiedqa-v2-t5-large-1363200": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "TheBloke/OpenAssistant-Llama2-13B-Orca-8K-3319-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "synapsoft/Llama-2-7b-hf-flan2022-1.2M": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "yeontaek/Platypus2-13B-LoRa-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "KES/T5-KES": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "robowaifudev/megatron-gpt2-345m": {"vocab_size": 50257, "n_embd": 1024, "n_layer": 24, "n_head": 16, "n_inner": 4096, "architectures": ["GPT2LMHeadModel"]}, "Sao10K/Mythical-Destroyer-L2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "CHIH-HUNG/llama-2-13b-dolphin_20w": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "The-Face-Of-Goonery/Huginn-13b-V4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "haining/scientific_abstract_simplification": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "ChanonUtupon/openthaigpt-merge-lora-llama-2-7B-3470k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 56554}, "chaoyi-wu/PMC_LLAMA_7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "CHIH-HUNG/llama-2-13b-OpenOrca_5w": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "clibrain/lince-zero": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "TheBloke/Project-Baize-v2-7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "uukuguy/speechless-codellama-platypus-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "CHIH-HUNG/llama-2-13b-dolphin_5w": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "yeontaek/airoboros-2.1-llama-2-13B-QLoRa": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "uukuguy/speechless-llama2-luban-orca-platypus-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Norquinal/llama-2-7b-claude-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TFLai/Luban-Platypus2-13B-QLora-0.80-epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "conceptofmind/Open-LLongMA-3b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "Norquinal/llama-2-7b-claude-chat-rp": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "lgaalves/llama-2-7b-hf_open-platypus": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ziqingyang/chinese-llama-2-7b-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 55296}, "yeontaek/llama-2-13B-ensemble-v6": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "yeontaek/llama-2-70B-ensemble-v7": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "ubikpt/t5-small-finetuned-cnn": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "rajkumarrrk/t5-base-fine-tuned-on-cnn-dm": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "google/t5-efficient-xl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "TFLai/Airboros2.1-Platypus2-13B-QLora-0.80-epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "dhmeltzer/llama-7b-SFT_ds_eli5_1024_r_64_alpha_16_merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "h2oai/h2ogpt-4096-llama2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "dhmeltzer/llama-7b-SFT_ds_wiki65k_1024_r_64_alpha_16_merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TFLai/Ensemble5-Platypus2-13B-QLora-0.80-epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "TFLai/Athena-Platypus2-13B-QLora-0.80-epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "4bit/Llama-2-7b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TFLai/MythicalDestroyerV2-Platypus2-13B-QLora-0.80-epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TFLai/OpenOrcaPlatypus2-Platypus2-13B-QLora-0.80-epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "Salesforce/codegen25-7b-mono": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 51200}, "Sao10K/Stheno-L2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "yeontaek/WizardCoder-Python-13B-LoRa": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "uukuguy/speechless-orca-platypus-coig-lite-2k-0.6e-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "casperhansen/vicuna-7b-v1.5-awq": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "luffycodes/nash-vicuna-33b-v1dot3-ep2-w-rag-w-simple": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "CHIH-HUNG/llama-2-13b-OpenOrca_20w": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "google/t5-efficient-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "TheBloke/orca_mini_v2_7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "tianyil1/denas-llama2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Sao10K/Stheno-Inverted-L2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "junelee/ko_vicuna_7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Seungyoun/codellama-7b-instruct-pad": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32025}, "TheBloke/Kimiko-v2-13B-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "uukuguy/speechless-orca-platypus-coig-lite-4k-0.5e-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "uukuguy/speechless-orca-platypus-coig-lite-4k-0.6e-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "Undi95/UndiMix-v1-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "h2oai/h2ogpt-gm-oasst1-en-2048-falcon-7b": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "yeontaek/llama-2-70B-ensemble-v6": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "TheBloke/vicuna-13B-v1.5-16K-GGML": {}, "KnutJaegersberg/black_goo_recipe_a": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "PKU-Alignment/beaver-7b-v1.0-reward": {"architectures": ["LlamaModelForScore"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "KnutJaegersberg/black_goo_recipe_b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "lgaalves/gpt2_open-platypus": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "cointegrated/rut5-base-multitask": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 30000}, "h2oai/h2ogpt-gm-oasst1-multilang-2048-falcon-7b": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "Cheng98/llama-160m": {"architectures": ["LlamaForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 32000}, "Andron00e/YetAnother_Open-Llama-3B-LoRA-OpenOrca": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "lgaalves/gpt2_guanaco-dolly-platypus": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "gagan3012/k2t-base": {"architectures": ["T5WithLMHeadModel"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "marcchew/Platypus-2-7B-LaMini-14K": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "lgaalves/gpt2_platypus-dolly-guanaco": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "czearing/article-title-generator": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "luffycodes/mcq-vicuna-13b-v1.5": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Universal-NER/UniNER-7B-definition": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Meli/GPT2-Prompt": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50259}, "s-nlp/ruT5-base-detox": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "cointegrated/rut5-base-paraphraser": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 30000}, "DevaMalla/llama7b_alpaca_bf16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "Universal-NER/UniNER-7B-type": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/starchat-beta-GPTQ": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49156}, "bigscience/sgpt-bloom-7b1-msmarco": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 30, "num_attention_heads": 32, "vocab_size": 250682}, "4bit/Llama-2-13b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "uukuguy/speechless-llama2-hermes-orca-platypus-wizardlm-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ClueAI/PromptCLUE-base-v1-5": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "budecosystem/genz-13b-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/LlongOrca-13B-16K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32004}, "ozcangundes/mt5-multitask-qa-qg-turkish": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250102}, "EleutherAI/pythia-410m-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 1024, "intermediate_size": 4096, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "CHIH-HUNG/llama-2-13b-FINETUNE2_3w": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "sonoisa/t5-base-japanese-v1.1": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "bolbolzaban/gpt2-persian": {"n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 25000, "architectures": ["GPT2LMHeadModel"]}, "google/t5-large-ssm": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "DeepPavlov/rudialogpt3_medium_based_on_gpt2_v2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "Mikivis/xuanxuan": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "uukuguy/speechless-llama2-hermes-orca-platypus-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "KnutJaegersberg/black_goo_recipe_c": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "beaugogh/Llama2-7b-sharegpt4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Salesforce/codet5p-770m-py": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32100}, "codefuse-ai/CodeFuse-CodeLlama-34B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "AUTOMATIC/promptgen-majinai-safe": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 6, "vocab_size": 50257}, "reciprocate/shepherd-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Devio/test-22B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "acrastt/Bean-3B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "TheBloke/L2-MythoMax22b-Instruct-Falseblock-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "vihangd/smartplat-3b-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "jinaai/jina-embedding-b-en-v1": {"architectures": ["T5EncoderModel"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "yahma/llama-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "uukuguy/speechless-codellama-orca-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "CHIH-HUNG/llama-2-13b-FINETUNE1_17w": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "VMware/open-llama-13b-open-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ToolBench/ToolLLaMA-2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "luffycodes/mcq-hal-vicuna-13b-v1.5": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "TheBloke/BigTranslate-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 53613}, "PeanutJar/LLaMa-2-PeanutButter_v18_A-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "openbmb/UltraLM-65b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Mikivis/gpt2-large-lora-sft": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50257}, "Devio/test-3b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 16, "vocab_size": 32000}, "akhooli/gpt2-small-arabic": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50257}, "Rardilit/Panther_v1": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ValiantLabs/ShiningValiant": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Devio/test100": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "Devio/testC": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/Chronoboros-33B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "TheBloke/Pygmalion-13B-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "vihangd/smartplat-3b-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "laituan245/t5-v1_1-small-smiles2caption-ft-from-pretrained-c4": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32128}, "4bit/Llama-2-7b-Chat-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "CHIH-HUNG/llama-2-13b-FINETUNE2_3w-gate_up_down_proj": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "CHIH-HUNG/llama-2-13b-FINETUNE2_3w-q_k_v_o_proj": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/vicuna-33B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "Devio/test-1400": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/gpt4-alpaca-lora-30B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "notstoic/pygmalion-13b-4bit-128g": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "NousResearch/Yarn-Llama-2-7b-128k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Den4ikAI/FRED-T5-LARGE_text_qa": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 50364}, "valhalla/t5-base-qa-qg-hl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32102}, "Undi95/ReMM-L2-13B-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/Zarablend-L2-7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "KnutJaegersberg/black_goo_recipe_d": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "ckip-joint/bloom-1b1-zh": {"architectures": ["BloomModel"], "hidden_size": 1536, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "seonglae/llama-2-13b-chat-hf-gptq": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Trelis/Llama-2-7b-chat-hf-sharded-bf16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "KnutJaegersberg/LLongMA-3b-LIMA": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "h2oai/h2ogpt-gm-oasst1-en-xgen-7b-8k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 51200}, "PeanutJar/LLaMa-2-PeanutButter_v18_B-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ehartford/WizardLM-1.0-Uncensored-CodeLlama-34b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "uukuguy/speechless-codellama-orca-platypus-13b-0.10e": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "DeepESP/gpt2-spanish": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "paust/pko-flan-t5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 50358}, "ThomasNLG/t5-qa_squad2neg-en": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "PharMolix/BioMedGPT-LM-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "eenzeenee/t5-base-korean-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 50358}, "porkorbeef/Llama-2-13b-public": {"architectures": ["LlamaModel"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/WizardLM-Uncensored-Falcon-7B-GPTQ": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65025}, "dahara1/weblab-10b-instruction-sft-GPTQ": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4864, "intermediate_size": 19456, "num_attention_heads": 38, "num_hidden_layers": 36, "vocab_size": 50277}, "CHIH-HUNG/llama-2-13b-FINETUNE2_TEST_2.2w": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "mrm8488/t5-small-finetuned-emotion": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "gurgutan/saiga2-13b-4bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "IlyaGusev/rut5_base_sum_gazeta": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 30000}, "TheBloke/Llama-2-13B-German-Assistant-v4-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 37632}, "gaodrew/OpenOrca-Platypus2-13B-thera-1250": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "minlik/chinese-llama-7b-merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 49953}, "TheBloke/Stable-Platypus2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/Luna-AI-Llama2-Uncensored-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "allenai/t5-small-squad2-question-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "microsoft/bloom-deepspeed-inference-fp16": {"architectures": ["BloomModel"], "n_layer": 70, "num_attention_heads": 112, "vocab_size": 250880}, "csebuetnlp/banglat5": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "potsawee/t5-large-generation-race-QuestionAnswer": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "grammarly/coedit-xl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32100}, "Narrativaai/bloom-560m-finetuned-totto-table-to-text": {"architectures": ["BloomForCausalLM"], "hidden_size": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "jjaaaww/posi_13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "IDEA-CCNL/Randeng-T5-784M-MultiTask-Chinese": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32596}, "Undi95/Nous-Hermes-13B-Code": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "paust/pko-t5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 50358}, "learnanything/llama-7b-huggingface": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "weiren119/Taiwan-LLaMa-v1.0-4bits-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ml6team/keyphrase-generation-t5-small-inspec": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32100}, "TheBloke/CodeLlama-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "Undi95/MLewd-L2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "tscholak/cxmefzzi": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 32102}, "Gaivoronsky/ruGPT-3.5-13B-8bit": {"architectures": ["GPT2LMHeadModel"], "n_embd": 5120, "n_head": 40, "n_inner": null, "n_layer": 40, "vocab_size": 50272}, "SatoruDano/llama-2-7b-finetuned_v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ClueAI/PromptCLUE-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "uukuguy/speechless-codellama-orca-airoboros-13b-0.10e": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "anonymous-german-nlp/german-gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 52000}, "fxmarty/gpt2-tiny-onnx": {"intermediate_size": 37, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "vocab_size": 1000}, "prakharz/DIAL-FLANT5-XL": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32100}, "h2oai/h2ogpt-oasst1-falcon-40b": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "synapsoft/Llama-2-7b-chat-hf-flan2022-1.2M": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Undi95/ReMM-L2-13B-PIPPA": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "CHIH-HUNG/llama-2-13b-FINETUNE1_17w-gate_up_down_proj": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Fredithefish/Guanaco-7B-Uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "conceptofmind/Yarn-Llama-2-13b-128k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Undi95/LewdEngine": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/CodeLlama-7B-Instruct-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32016}, "CHIH-HUNG/llama-2-13b-Open_Platypus_and_ccp_2.6w": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "CHIH-HUNG/llama-2-13b-FINETUNE1_17w-q_k_v_o_proj": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jondurbin/airoboros-33b-2.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "Salesforce/codet5p-220m-py": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "Danielbrdz/CodeBarcenas-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "SJ-Ray/Re-Punctuate": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "EasthShin/Youth_Chatbot_Kogpt2-base": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 51200}, "ThomasNLG/t5-qg_squad1-en": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "EleutherAI/pythia-160m-deduped-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 50304}, "MBZUAI/LaMini-T5-223M": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "HooshvareLab/gpt2-fa": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 42001}, "TFLai/Nova-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "conceptofmind/LLongMA-2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TDC2023/trojan-base-pythia-1.4b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "phpaiola/ptt5-base-summ-xlsum": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "TFLai/SpeechlessV1-Nova-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/stablecode-instruct-alpha-3b-GPTQ": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 49152}, "stanford-crfm/music-small-800k": {"vocab_size": 55028, "n_embd": 768, "n_layer": 12, "n_head": 12, "n_inner": null, "architectures": null}, "TFLai/EnsembleV5-Nova-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "declare-lab/flan-alpaca-xl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "jpwahle/t5-large-word-sense-disambiguation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "lizhuang144/flan-t5-large-factual-sg": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "DKYoon/mt5-base-lm-adapt": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "TheBloke/guanaco-65B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Salesforce/codegen25-7b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 51200}, "bigscience-data/sgpt-bloom-1b7-nli": {"architectures": ["BloomModel"], "n_inner": null, "n_layer": 24, "num_attention_heads": 16, "vocab_size": 250880}, "TurkuNLP/gpt3-finnish-small": {"architectures": ["BloomModel"], "hidden_size": 768, "n_head": 12, "n_layer": 12, "vocab_size": 131072}, "jordiclive/flan-t5-3b-summarizer": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "marblyso/DialoGPT-small-what-the-fuck": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "retrieva-jp/t5-small-short": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32128}, "codeparrot/codeparrot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1600, "n_head": 25, "n_inner": null, "n_layer": 48, "vocab_size": 32768}, "openthaigpt/openthaigpt-1.0.0-beta-7b-chat-ckpt-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 56554}, "Rocketknight1/falcon-rw-1b": {"architectures": ["FalconForCausalLM"], "hidden_size": 2048, "num_attention_heads": 32, "num_hidden_layers": 24, "vocab_size": 50304}, "TaylorAI/Flash-Llama-30M-20001": {"architectures": ["LlamaForCausalLM"], "hidden_size": 384, "intermediate_size": 1024, "num_attention_heads": 12, "num_hidden_layers": 4, "vocab_size": 32000}, "castorini/t5-base-canard": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "r3dhummingbird/DialoGPT-medium-joshua": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "IDEA-CCNL/Wenzhong2.0-GPT2-110M-BertTokenizer-chinese": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": 3072, "n_layer": 12, "vocab_size": 21133}, "TigerResearch/tigerbot-13b-chat-8bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 60928}, "pranavpsv/gpt2-genre-story-generator": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50266}, "Photolens/llama-2-7b-langchain-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ck46/t5-base-hotpot-qa-qg": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32102}, "castorini/monot5-small-msmarco-10k": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "yujiepan/llama-2-tiny-random": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8, "intermediate_size": 32, "num_attention_heads": 2, "num_hidden_layers": 1, "vocab_size": 32000}, "castorini/doc2query-t5-base-msmarco": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "oliverguhr/spelling-correction-multilingual-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "allenai/unifiedqa-t5-11b": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 65536, "d_model": 1024, "num_heads": 128, "num_layers": 24, "vocab_size": 32128}, "TheBloke/CodeLlama-34B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "snorkelai/sdnet": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "SiberiaSoft/SiberianFRED-T5-XL": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1536, "num_heads": 24, "num_layers": 24, "vocab_size": 50365}, "sultan/ArabicT5-Base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 20, "vocab_size": 32000}, "nikaashpuri/gpt-expt-sp-v3-K-600-MA-Mac-actions-kmeans-v16": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 1913}, "TheBloke/Yarn-Llama-2-13B-128K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "allenai/cosmo-xl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "flax-community/gpt2-bengali": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "EleutherAI/pythia-410m-deduped-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 1024, "intermediate_size": 4096, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "Writer/palmyra-small": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": 3072, "n_layer": 12, "vocab_size": 50257}, "LukasStankevicius/t5-base-lithuanian-news-summaries-175": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "laituan245/molt5-large-caption2smiles": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "google/ul2": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 16384, "d_model": 4096, "num_heads": 16, "num_layers": 32, "vocab_size": 32128}, "Suva/uptag-keyphrase-model": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "TheBloke/orca_mini_7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TusharJoshi89/title-generator": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "juierror/flan-t5-text2sql-with-schema": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "hf-tiny-model-private/tiny-random-T5ForConditionalGeneration": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 32100}, "stacked-summaries/flan-t5-large-stacked-samsum-1024": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "TheBloke/WizardLM-33B-V1-0-Uncensored-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "persiannlp/mt5-base-parsinlu-opus-translation_fa_en": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "gurgutan/ruGPT-13B-4bit": {"architectures": ["GPT2LMHeadModel"], "n_embd": 5120, "n_head": 40, "n_inner": null, "n_layer": 40, "vocab_size": 50272}, "TheBloke/upstage-llama-30b-instruct-2048-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "sdadas/polish-gpt2-medium": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": 4096, "n_layer": 24, "vocab_size": 51200}, "aubmindlab/aragpt2-medium": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 64000}, "SEBIS/code_trans_t5_large_source_code_summarization_python_multitask_finetune": {"architectures": ["T5Model"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "maximxls/text-normalization-ru-terrible": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 256, "num_heads": 4, "num_layers": 3, "vocab_size": 5120}, "TheBloke/llama-2-13B-Guanaco-QLoRA-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ziqingyang/chinese-alpaca-2-13b-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 55296}, "KETI-AIR/ke-t5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 64128}, "ibm/qcpg-sentences": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32158}, "tiiuae/falcon-rw-7b": {"architectures": ["FalconForCausalLM"], "hidden_size": 4096, "num_attention_heads": 64, "num_hidden_layers": 36, "vocab_size": 65024}, "timdettmers/guanaco-13b-merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "h2oai/h2ogpt-oig-oasst1-falcon-40b": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "coffeeee/nsfw-story-generator": {"architectures": ["GPT2Model"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "zpn/llama-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "crumb/bloom-560m-RLHF-SD2-prompter-aesthetic": {"architectures": ["BloomForCausalLM"], "hidden_size": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "kalpeshk2011/dipper-paraphraser-xxl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32128}, "TheBloke/WizardLM-13B-V1.0-Uncensored-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/WizardLM-13B-V1-1-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "allenai/unifiedqa-t5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "oliverguhr/spelling-correction-german-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "ThomasSimonini/t5-end2end-question-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32101}, "asi/gpt-fr-cased-base": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1792, "n_head": 14, "n_inner": null, "n_layer": 24, "vocab_size": 50000}, "lora-x/backpack-gpt2": {"architectures": ["BackpackGPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50264}, "TheBloke/Vigogne-2-13B-Instruct-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ai-forever/ruT5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "ml6team/keyphrase-generation-t5-small-openkp": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32100}, "mrm8488/t5-base-finetuned-e2m-intent": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "nikaashpuri/gpt-expt-sp-v3-K-600-MA-Mac-actions-kmeans-v14": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 1902}, "TheBloke/Marx-3b-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "TheBloke/Dolphin-Llama2-7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "oscorrea/scores-falcon40b-sm-merged": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "lmqg/t5-small-squad-qag": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32101}, "ehartford/WizardLM-Uncensored-Falcon-40b": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65025}, "persiannlp/mt5-base-parsinlu-sentiment-analysis": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "VietAI/vit5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 36100}, "thanathorn/mt5-cpe-kmutt-thai-sentence-sum": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "Blackroot/Hermes-Kimiko-13B-f16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "CarperAI/stable-vicuna-13b-delta": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "j5ng/kullm-12.8b-GPTQ-8bit": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 30080}, "TheBloke/ReMM-SLERP-L2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Weni/WeniGPT-L-70": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "valhalla/t5-small-qg-hl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32102}, "retrieva-jp/t5-xl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "TheBloke/Wizard-Vicuna-30B-Superhot-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "openllmplayground/openalpaca_3b_600bt_preview": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "ArmelR/starcoder-gradio-v0": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49152}, "chanind/frame-semantic-transformer-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "akreal/tiny-random-gpt2": {"intermediate_size": 37, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "vocab_size": 99}, "Neko-Institute-of-Science/LLaMA-7B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Writer/palmyra-med-20b": {"architectures": ["GPT2LMHeadModel"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 44, "vocab_size": 50259}, "SiberiaSoft/SiberianPersonaFred": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1536, "num_heads": 24, "num_layers": 24, "vocab_size": 50364}, "mrm8488/spanish-gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "unicamp-dl/translation-en-pt-t5": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "OFA-Sys/gsm8k-rft-llama7b-u13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "liuhaotian/LLaVA-13b-delta-v0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32003}, "huggingface/falcon-40b-gptq": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "Ravi07bec/llama-qlora-65b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "PKU-Alignment/alpaca-7b-reproduced": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "Unbabel/gec-t5_small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "TheBloke/Speechless-Llama2-Hermes-Orca-Platypus-WizardLM-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "MIIB-NLP/Arabic-question-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 110080}, "google/t5-large-ssm-nq": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "stanford-crfm/arwen-gpt2-medium-x21": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "sentence-transformers/gtr-t5-xxl": {"architectures": ["T5EncoderModel"], "d_ff": 65536, "d_model": 1024, "num_heads": 128, "num_layers": 24, "vocab_size": 32128}, "TheBloke/Nous-Hermes-Llama2-70B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32001}, "paust/pko-t5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 50358}, "allenai/tk-instruct-11b-def": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 65536, "d_model": 1024, "num_heads": 128, "num_layers": 24, "vocab_size": 32128}, "amphora/FinABSA": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32105}, "TurkuNLP/gpt3-finnish-13B": {"architectures": ["BloomModel"], "hidden_size": 5120, "n_head": 40, "n_layer": 40, "vocab_size": 131072}, "PAIXAI/Astrid-LLama-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Aalaa/opt-125m-wikitext2": {"architectures": ["OPTForCausalLM"], "hidden_size": 768, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 50272}, "hf-internal-testing/tiny-random-GPTNeoXForQuestionAnswering": {"architectures": ["GPTNeoXForQuestionAnswering"], "hidden_size": 32, "intermediate_size": 37, "num_attention_heads": 4, "num_hidden_layers": 5, "vocab_size": 1024}, "cssupport/t5-small-awesome-text-to-sql": {"vocab_size": 32128, "d_model": 512, "d_ff": 2048, "num_layers": 6, "num_heads": 8, "architectures": ["T5ForConditionalGeneration"]}, "TheBloke/MythoMix-L2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "conceptofmind/Hermes-LLongMA-2-13b-8k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "lysandre/arxiv-nlp": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50257}, "Pcik/DialoGPT-medium-Kirby": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "PY007/SLM_1-4B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 2048, "intermediate_size": 5632, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50432}, "ceshine/t5-paraphrase-paws-msrp-opinosis": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "microsoft/bloom-deepspeed-inference-int8": {"architectures": ["BloomModel"], "n_layer": 70, "num_attention_heads": 112, "vocab_size": 250880}, "TheBloke/PuddleJumper-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "gorilla-llm/gorilla-falcon-7b-hf-v0": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "TheBloke/starcoder-GPTQ": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49152}, "lmsys/longchat-7b-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "DAMO-NLP-MT/polylm-1.7b": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 256000}, "Salesforce/xgen-7b-4k-base": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 51200}, "DAMO-NLP-MT/polylm-13b": {"architectures": ["PolyLMHeadModel"], "n_embd": 5120, "n_head": 40, "n_inner": 20480, "n_layer": 40, "vocab_size": 256000}, "dbddv01/gpt2-french-small": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "EleutherAI/pythia-70m-deduped-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "algolet/mt5-base-chinese-qg": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250101}, "hf-internal-testing/tiny-random-BloomForQuestionAnswering": {"architectures": ["BloomForQuestionAnswering"], "hidden_size": 32, "n_head": 4, "n_layer": 5, "vocab_size": 1024}, "hf-internal-testing/tiny-random-BloomForTokenClassification": {"architectures": ["BloomForTokenClassification"], "hidden_size": 32, "n_head": 4, "n_layer": 5, "vocab_size": 1024}, "flax-community/t5-base-cnn-dm": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "hf-internal-testing/tiny-random-BloomForSequenceClassification": {"architectures": ["BloomForSequenceClassification"], "hidden_size": 32, "n_head": 4, "n_layer": 5, "vocab_size": 1024}, "tau/t5-v1_1-large-rss": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "TheBloke/airoboros-l2-13b-gpt4-m2.0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "hf-internal-testing/tiny-random-GPTNeoXForSequenceClassification": {"architectures": ["GPTNeoXForSequenceClassification"], "hidden_size": 32, "intermediate_size": 37, "num_attention_heads": 4, "num_hidden_layers": 5, "vocab_size": 1024}, "allegro/plt5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 50048}, "TheBloke/stable-vicuna-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "hf-internal-testing/tiny-random-GPTNeoXForTokenClassification": {"architectures": ["GPTNeoXForTokenClassification"], "hidden_size": 32, "intermediate_size": 37, "num_attention_heads": 4, "num_hidden_layers": 5, "vocab_size": 1024}, "TheBloke/WizardLM-7B-V1-0-Uncensored-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "HuggingFaceH4/tiny-random-LlamaForSequenceClassification": {"architectures": ["LlamaForSequenceClassification"], "hidden_size": 16, "intermediate_size": 64, "num_attention_heads": 4, "num_hidden_layers": 2, "vocab_size": 32000}, "hf-internal-testing/tiny-random-GPTNeoXModel": {"architectures": ["GPTNeoXModel"], "hidden_size": 32, "intermediate_size": 37, "num_attention_heads": 4, "num_hidden_layers": 5, "vocab_size": 1024}, "IlyaGusev/rut5_base_headline_gen_telegram": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 30000}, "lgaalves/gpt2_camel_physics-platypus": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "lightonai/alfred-40b-0723": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "KETI-AIR/ke-t5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 64128}, "ibm/regen-disambiguation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "vihangd/smartplat-3b-v3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "TheBloke/OpenBuddy-Llama2-13B-v11.1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 37632}, "BlinksFly/Harry_Potter-Ai": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "conceptofmind/Yarn-Llama-2-7b-128k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "liujch1998/vera": {"architectures": ["T5EncoderModel"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32128}, "kaist-ai/CoT-T5-11B": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32128}, "lintang/t5-v1_1-base-flan": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "sentence-transformers/sentence-t5-xxl": {"architectures": ["T5EncoderModel"], "d_ff": 65536, "d_model": 1024, "num_heads": 128, "num_layers": 24, "vocab_size": 32128}, "TheBloke/vicuna-7B-v1.5-16K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "retrieva-jp/t5-large-long": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "retrieva-jp/t5-base-long": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "upstage/SOLAR-0-70b-8bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "jerteh/gpt2-vrabac": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 49152}, "Parth/boolean": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "hf-internal-testing/tiny-random-GPTBigCodeForSequenceClassification": {"architectures": ["GPTBigCodeForSequenceClassification"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 1024}, "hf-internal-testing/tiny-random-GPTBigCodeForTokenClassification": {"architectures": ["GPTBigCodeForTokenClassification"], "n_embd": 32, "n_head": 4, "n_inner": 37, "n_layer": 5, "vocab_size": 1024}, "megagonlabs/t5-base-japanese-web": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32000}, "MisguidedKerbal/DialoGPT-kerbalV3": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "praeclarum/cuneiform": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "uw-hai/polyjuice": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "reciprocate/tiny-llama": {"architectures": ["LlamaForCausalLM"], "hidden_size": 64, "intermediate_size": 64, "num_attention_heads": 1, "num_hidden_layers": 1, "vocab_size": 32000}, "luqh/ClinicalT5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "stanford-crfm/celebrimbor-gpt2-medium-x81": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "TheBloke/CodeLlama-13B-Python-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "laituan245/molt5-large-smiles2caption": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "TurkuNLP/gpt3-finnish-8B": {"architectures": ["BloomModel"], "hidden_size": 4096, "n_head": 32, "n_layer": 32, "vocab_size": 131072}, "NeuML/t5-small-txtsql": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "malteos/bloom-6b4-clp-german": {"hidden_size": 4096, "n_head": 32, "n_layer": 30, "vocab_size": 50304}, "GT4SD/multitask-text-and-chemistry-t5-base-augm": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "allenai/open-instruct-stanford-alpaca-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "CarperAI/randomwalks": {"architectures": ["GPT2LMHeadModel"], "n_embd": 144, "n_head": 6, "n_inner": null, "n_layer": 6, "vocab_size": 23}, "unicamp-dl/mt5-13b-mmarco-100k": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 250112}, "lmqg/t5-small-squad-qg-ae": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32101}, "naltukhov/joke-generator-rus-t5": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "hf-internal-testing/tiny-random-UMT5Model": {"architectures": ["UMT5Model"], "d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 256300}, "rentcarsAI/falcon-7b-codegenerator-qlora-merged": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "panggi/t5-base-indonesian-summarization-cased": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "hf-internal-testing/tiny-random-UMT5ForQuestionAnswering": {"architectures": ["UMT5ForQuestionAnswering"], "d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 256300}, "UBC-NLP/AraT5-base": {"d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 110080}, "kmewhort/stable-diffusion-prompt-bolster": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 52000}, "TheBloke/Llama-2-13B-GGML": {}, "gaussalgo/T5-LM-Large-text2sql-spider": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "DAMO-NLP-MT/polylm-multialpaca-13b": {"architectures": ["GPT2LMHeadModel"], "n_embd": 5120, "n_head": 40, "n_inner": 20480, "n_layer": 40, "vocab_size": 256000}, "hf-internal-testing/tiny-random-UMT5ForSequenceClassification": {"architectures": ["UMT5ForSequenceClassification"], "d_ff": 37, "d_model": 32, "num_heads": 4, "num_layers": 5, "vocab_size": 256300}, "tinkoff-ai/ruDialoGPT-small": {"n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50261}, "indonesian-nlp/gpt2-medium-indonesian": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "Salesforce/mixqg-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "EleutherAI/pythia-1b-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 8, "num_hidden_layers": 16, "vocab_size": 50304}, "NinedayWang/PolyCoder-2.7B": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50304}, "zanchat/falcon-1b": {"architectures": ["RWForCausalLM"], "hidden_size": 2048, "n_head": 32, "n_layer": 24, "vocab_size": 50304}, "Goodnoway/DialoGPT-nerbalV2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "crumb/llama2-7b-shard-bf16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "sagawa/ReactionT5-retrosynthesis": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 268}, "DKYoon/mt5-large-lm-adapt": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 250112}, "lintang/t5-v1_1-xl-flan": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "castorini/monot5-large-msmarco-10k": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "Ichsan2895/Merak-7B-v3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "stanford-crfm/caprica-gpt2-small-x81": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "vicgalle/gpt2-open-instruct-v1": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50260}, "philschmid/llama-2-7b-instruction-generator": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "aubmindlab/aragpt2-large": {"architectures": ["GPT2LMHeadModel"], "intermediate_size": 5120, "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 64000}, "NonzeroCornet34/DialoGPT-small-philbot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Trelis/Llama-2-7b-chat-hf-sharded-bf16-5GB": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "deep-learning-analytics/wikihow-t5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "JDBN/t5-base-fr-qg-fquad": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32102}, "stanford-crfm/durin-gpt2-medium-x343": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "abjbpi/Dwight_Schrute": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "Spico/Humback-Myx": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "T-Systems-onsite/mt5-small-sum-de-en-v2": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250100}, "kaiyuy/leandojo-lean3-tacgen-byt5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3584, "d_model": 1472, "num_heads": 6, "num_layers": 12, "vocab_size": 384}, "pinkmanlove/llama-33b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "lintang/t5-v1_1-large-flan": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "Naseej/noon-7b": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "chizhikchi/sci-five-radsum23": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32101}, "impyadav/GPT2-FineTuned-Hinglish-Song-Generation": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "elinas/llama-13b-hf-transformers-4.29": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/GodziLLa2-70B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "TheBloke/Llama-2-70B-OASST-1-200-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32016}, "jacobmorrison/tk-instruct-base-lora-experiments": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "h2oai/h2ogpt-gm-oasst1-en-2048-open-llama-3b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "ingen51/DialoGPT-medium-GPT4": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "cointegrated/rut5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 30000}, "PocketDoc/Dans-CreepingSenseOfDoom": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "tsmatz/mt5_summarize_japanese": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "domenicrosati/QA2D-t5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "gorkemgoknar/gpt2chatbotenglish": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50262}, "DeliveryBoy/DiabloGPT-medium-Kurisu": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "philschmid/instruct-igel-001": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_layer": 30, "vocab_size": 50304}, "xDAN2099/xDAN_13B_Zh_Base": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 60928}, "codeparrot/codeparrot-small": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 32768}, "paust/pko-t5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 50358}, "flozi00/Llama-2-13b-german-assistant-v4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 37632}, "doc2query/msmarco-t5-base-v1": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "microsoft/DialogRPT-depth": {"architectures": ["GPT2ForSequenceClassification"], "n_embd": 1024, "n_head": 16, "n_layer": 24, "vocab_size": 50257}, "nomic-ai/gpt4all-13b-snoozy": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "NousResearch/Yarn-Llama-2-13b-64k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "mrm8488/t5-base-e2e-question-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "postbot/gpt2-medium-emailgen": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "vanilladucky/Friends_chatting_bot_redefined": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "TheBloke/LlongOrca-7B-16K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32003}, "mutamuta/DialoGPT-spongebob-small": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Ar4ikov/gpt2-medium-650k-stable-diffusion-prompt-generator": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "TheBloke/HermesLimaRP-L2-7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "clibrain/Llama-2-7b-ft-instruct-es-gptq-4bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Yarn-Llama-2-7B-128K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "lmqg/mt5-small-jaquad-qg-ae": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "allenai/tk-instruct-base-def-pos": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "davidkim205/komt-Llama-2-7b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "tangy0/llama-2-7b-dtlpy_v0.4chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TigerResearch/tigerbot-70b-base": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 60928}, "hadifar/eventextraction": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "TintinMeimei/NousResearch-Llama-2-7b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/airoboros-l2-13b-gpt4-2.0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Nekochu/Llama-2-13B-fp16-french": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "minhtoan/t5-translation-vietnamese-nom": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 300, "num_heads": 8, "num_layers": 6, "vocab_size": 30100}, "BELLE-2/BELLE-Llama2-13B-chat-0.4M": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "bigscience/T0": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32128}, "andreaskoepf/pythia-1.4b-gpt4all-pretrain": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50288}, "Salesforce/codet5-base-codexglue-clone": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "Chae/scottbot_med": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "TheBloke/LLaMA-7b-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "sagard21/python-code-explainer": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32100}, "stanfordnlp/SteamSHP-flan-t5-xl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "MarinHinawa/DialoGPT-medium-Ene": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "aiautomationlab/german-news-title-gen-mt5": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "TheBloke/vicuna-13B-1.1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/Chronos-Hermes-13B-SuperHOT-8K-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "microsoft/DialogRPT-human-vs-machine": {"architectures": ["GPT2ForSequenceClassification"], "n_embd": 1024, "n_head": 16, "n_layer": 24, "vocab_size": 50257}, "uer/gpt2-distil-chinese-cluecorpussmall": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 6, "vocab_size": 21128}, "h2oai/h2ogpt-gm-oasst1-en-2048-falcon-40b-v1": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "davidkim205/komt-Llama-2-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ibm/qcpg-questions": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32155}, "gavin124/gpt2-finetuned-cnn-summarization-v2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50260}, "hogru/MolReactGen-GuacaMol-Molecules": {"architectures": ["GPT2LMHeadModel"], "n_embd": 144, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 93}, "stanford-crfm/darkmatter-gpt2-small-x343": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "conceptofmind/Yarn-Llama-2-7b-64k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Radicalkiddo/DialoGPT-small-Radical": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Ninja5000/DialoGPT-medium-HarryPotter": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "theblackcat102/alpaca-title-generator-mt0-large": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 250112}, "transfaeries/Twilight-Sparkle-GPT": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "TheBloke/Vigogne-2-7B-Instruct-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "markofhope/DialoGPT-medium-HarringtonBot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "seeksery/DialoGPT-calig3": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "beomi/kcgpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 55000}, "vilm/vietcuna-3b": {"architectures": ["BloomForCausalLM"], "hidden_size": 2560, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "IDEA-CCNL/Randeng-T5-784M": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32596}, "vwxyzjn/starcoderbase-triviaqa": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49152}, "IDEA-CCNL/Wenzhong2.0-GPT2-3.5B-chinese": {"architectures": ["GPT2LMHeadModel"], "n_embd": 3072, "n_head": 32, "n_inner": 12288, "n_layer": 30, "vocab_size": 50304}, "TheBloke/Llama-2-7b-Chat-GGUF": {}, "MingZhong/unieval-dialog": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32100}, "OpenAssistant/falcon-40b-megacode2-oasst": {"architectures": ["FalconForCausalLM"], "hidden_size": 8192, "num_attention_heads": 128, "num_hidden_layers": 60, "vocab_size": 65152}, "axiong/PMC_LLaMA_13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "codeparrot/codeparrot-small-multi": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": 3072, "n_layer": 12, "vocab_size": 32768}, "EleutherAI/pythia-6.9b-deduped-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "Riiid/sheep-duck-llama-2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "alibaba-pai/pai-bloom-1b1-text2prompt-sd": {"architectures": ["BloomForCausalLM"], "hidden_size": 1536, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "TheBloke/Chronos-Beluga-v2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "malmarjeh/t5-arabic-text-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 110080}, "GarfExit/DialogGPT-medium-707": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "audreycl/DialoGPT-RPF": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "florentiino/DialoGPT-small-harrypotter": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "JazzyLucas/DialoGPT-small-TonyStark": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "marblyso/DialoGPT-medium-marina": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "polandball/GPT-Polen": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "GarrisonBot/DialoGPT-medium-herbertgarrison": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "XuYipei/kw-cutegpt-13b-ift": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 49954}, "TheBloke/Pygmalion-7B-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "timothykim04/DialoGPT-medium-harrypotter": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "allegro/plt5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 50048}, "lengoctuong/gpt2-finetuned-wikitext2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "PyaeSoneK/Fine_Tuned_Pythia_smallest_140_legal": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 1024, "intermediate_size": 4096, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "psyche/KoT5-paraphrase-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "microsoft/DialogRPT-width": {"architectures": ["GPT2ForSequenceClassification"], "n_embd": 1024, "n_head": 16, "n_layer": 24, "vocab_size": 50257}, "Dahoas/pythia-1B-static-sft": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "jerteh/gpt2-orao": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 49152}, "TheBloke/LosslessMegaCoder-Llama2-13B-Mini-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32007}, "Ngao/DialoGPT-small-ngao": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "4i-ai/Llama-2-7b-alpaca-es": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "asifhugs/open_llama_7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "RajuKandasamy/tamillama_tiny_30m": {"architectures": ["LlamaForCausalLM"], "hidden_size": 256, "intermediate_size": 786, "num_attention_heads": 8, "num_hidden_layers": 16, "vocab_size": 32000}, "stabilityai/StableBeluga1-Delta": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Linly-AI/Chinese-LLaMA-2-7B-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 40076}, "flax-community/gpt2-base-thai": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "shalomma/llama-7b-embeddings": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/llama2-7b-chat-codeCherryPop-qLoRA-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "KhanAdeeb/model-tony-stark": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "spy24/autonlp-UK-to-US-600416931": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "DKYoon/mt5-small-lm-adapt": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "TheBloke/Llama-2-70B-GGML": {}, "TheBloke/model_007-70B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "abhi-8/DialoGPT-medium-Joshua-twevy": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "camenduru/MiniGPT4-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "paripi/Malishka": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "SiberiaSoft/SiberianPersonaFred_large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 50364}, "Alred/t5-small-finetuned-summarization-cnn": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "Leomas/DialoGPT-medium-Leomas": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "TehVenom/Pygmalion-7b-Merged-Safetensors": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "marblyso/DialoGPT-medium-pearl": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "lmqg/mt5-small-dequad-qg-ae": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "TheBloke/WizardLM-Uncensored-Falcon-40B-GPTQ": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65025}, "NlpHUST/t5-small-vi-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "Elucia/Diluc_Bot_1.3": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "h2oai/h2ogpt-16k-codellama-34b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "microsoft/CodeGPT-small-java": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 52000}, "Starry/COUNTNARC": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "OpenMEDLab/PULSE-7bv5": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "marblyso/DialoGPT-medium-aubrey": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "Ashypaws/DialoGPT-medium-Ashybot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "YTTD/DialoGPT-medium-sou": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "marblyso/DialoGPT-medium-hero": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "Trelis/Llama-2-7b-chat-hf-function-calling-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "NousResearch/CodeLlama-7b-hf-flash": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32016}, "TheBloke/CodeLlama-34B-Python-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "musabgultekin/functionary-7b-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "amasand/gpt2-imdb-pos-ppo": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "bigscience/bloomz-7b1-p3": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 30, "num_attention_heads": 32, "vocab_size": 250880}, "rirv938/wizard-vicuna-13b-uncensored-awq-4bit-g128": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "marblyso/DialoGPT-medium-marblesbagel": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "vilm/vietcuna-7b-v3": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "stas/t5-very-small-random": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 256, "d_model": 64, "num_heads": 4, "num_layers": 8, "vocab_size": 32128}, "KeLiu/Title-Gen": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "vampiregirl/DialoGPT-medium-lennoxram": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "sharpbai/Llama-2-7b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "sam2ai/openllama_odia_3b_base": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "lmqg/mt5-small-esquad-qg-ae": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "stanfordnlp/SteamSHP-flan-t5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "allenai/tulu-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "JNDankwah/DialoGPT-small-ThorCB": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "lmqg/mt5-small-ruquad-qg": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "lmqg/mt5-small-ruquad-qg-ae": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "Dinocroth/DialoGPT-medium-Trevor-PhilipsV2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "Speedemon/jake-peralta-ai": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "chanind/frame-semantic-transformer-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "stanford-crfm/music-medium-800k": {"vocab_size": 55028, "n_embd": 1024, "n_layer": 24, "n_head": 16, "n_inner": null, "architectures": null}, "h2oai/h2ogpt-16k-codellama-7b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32016}, "TheBloke/Pygmalion-2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "huggingface-course/codeparrot-ds": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50000}, "KakoSi/AcciGPT-smol": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "lmqg/mt5-small-itquad-qg-ae": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "EggsInAJar/DialoGPT-small-MerrickBot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "razent/SciFive-large-Pubmed": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "armandnlp/gpt2-TOD_finetuned_SGD": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50271}, "RuterNorway/Llama-2-13b-chat-norwegian": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "AIDC-ai-business/Marcoroni-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "deep-learning-analytics/GrammarCorrector": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "redrussianarmy/gpt2-turkish-cased": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "lmqg/mt5-small-frquad-qg-ae": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "psyche/KoT5-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "loitran/DialoGPT-medium-peppapig": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "openchat/openchat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "saikatc/NatGen": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "Coderhuynin/DialoGPT-large-TonyStark": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "declare-lab/flan-sharegpt-xl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "TheBloke/Chronos-Hermes-13B-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "usvsnsp/pythia-6.9b-rm-full-hh-rlhf": {"architectures": ["GPTNeoXForSequenceClassification"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50277}, "yujiepan/llama-2-tiny-3layers-random": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8, "intermediate_size": 32, "num_attention_heads": 2, "num_hidden_layers": 3, "vocab_size": 32000}, "allenai/unifiedqa-v2-t5-3b-1363200": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "gsarti/it5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32103}, "simple2312/DialoGPT-Ellie": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "kashif/llama-7b_stack-exchange_RM_peft-adapter-merged": {"architectures": ["LlamaForSequenceClassification"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "larryvrh/mt5-translation-ja_zh": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 250112}, "j5ng/et5-typos-corrector": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 45100}, "vilsonrodrigues/falcon-7b-sharded": {"architectures": ["FalconForCausalLM"], "hidden_size": 4544, "num_attention_heads": 71, "num_hidden_layers": 32, "vocab_size": 65024}, "felinecity/ScaraBot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "persiannlp/mt5-base-parsinlu-translation_en_fa": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "Jonesy/HomersNightOut": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "conceptofmind/LLongMA-2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/LoKuS-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "shibing624/mengzi-t5-base-chinese-correction": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Lamia/DialoGPT-small-Sundrop": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Blizzchor/DialoGPT-medium-gamora": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "jlsalty9999/DialoGPT-medium-Riddle": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "uer/gpt2-chinese-lyric": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 21128}, "LMFlow/Full-Robin-7b-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "llm-book/t5-base-long-livedoor-news-corpus": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "nuggster/DialoGPT-small-ianbot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Tristan/gpt2_reward_summarization": {"architectures": ["GPT2ForSequenceClassification"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "MysteriousAmazon/DialoGPT-medium-freddy": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "wdidfau/Pygmalion-13b-Landmark-Attention-Merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "kaiyuy/leandojo-lean3-retriever-byt5-small": {"architectures": ["T5EncoderModel"], "d_ff": 3584, "d_model": 1472, "num_heads": 6, "num_layers": 12, "vocab_size": 384}, "kz919/ntk_scaled_open_llama_3b_32k": {"architectures": ["NTKScaledLlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "abhi-8/DialoGPT-medium-Rick": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "polymath707/llama-2-13b-miniguanaco": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Langboat/bloom-389m-zh": {"architectures": ["BloomForCausalLM"], "hidden_size": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 42437}, "Techcs002/DialoGPT-medium-AboTalkTest": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "AIDC-ai-business/Marcoroni-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "ybelkada/t5-3b-sharded": {"architectures": ["T5WithLMHeadModel"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "benjamin/gerpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "abhi-8/DialoGPT-medium-Michael": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "cahya/gpt2-small-indonesian-522M": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "marianna13/flan-t5-base-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Lakoc/fisher_dec_6_layers": {"architectures": ["GPT2Model"], "n_embd": 512, "n_head": 4, "n_inner": null, "n_layer": 6, "vocab_size": 5000}, "simple2312/DialoGPT-nayeon": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "sjrhuschlee/flan-t5-base-squad2": {"architectures": ["T5ForQuestionAnswering"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "eqhylxx/full-vicuna-160m": {"architectures": ["LlamaForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 32000}, "Ashypaws/DialoGPT-medium-Kitaibot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "TheBloke/Wizard-Vicuna-7B-Uncensored-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "NHStudios/DialoGPT-small-jake": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "ZipperXYZ/DialoGPT-medium-TheWorldMachineExpressive2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "IIC/mt5-spanish-mlsum": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "mattymchen/gense-base-plus": {"architectures": ["T5Model"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32101}, "DAMO-NLP/SeqGPT-560M": {"architectures": ["BloomForCausalLM"], "hidden_size": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "AMHR/T5-for-Adversarial-Paraphrasing": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Blizzchor/DialoGPT-medium-HarryBotter": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "brianveebee/DialoGPT-medium-bender": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "YTTD/DialoGPT-medium-keiji": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "anon8231489123/gpt4-x-alpaca-13b-native-4bit-128g": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "Pcik/DialoGPT-medium-Dante": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "mHossain/bangla-para-v3-500000": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "TheBloke/Llama-2-7B-GGUF": {}, "diwas7777/HarryBot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "seduerr/t5-small-pytorch": {"architectures": ["T5WithLMHeadModel"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "felinecity/DioloGPT-small-KaeyaBot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "lmsys/vicuna-7b-delta-v0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "inu-ai/dolly-japanese-gpt-1b": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 44928}, "TheBloke/Vicuna-33B-1-3-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "Dahoas/pythia-125M-static-sft": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 50304}, "Blizzchor/DialoGPT-medium-QuillLord": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "beomi/KoAlpaca-llama-1-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "patrickNLP/Graphix-3B": {"architectures": ["Model"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "Starry/HELLORUKAS": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "keans/DialoGPT-small-highjacker": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "DoesNoPro/DialoGPT-small-RaidenG": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "ThatSkyFox/DialoGPT-medium-whatsapp": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "EnterNameBros/Senko-san-medium-scl": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "TheBloke/CodeLlama-7B-Python-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "mrm8488/t5-small-finetuned-quora-for-paraphrasing": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "NonzeroCornet34/DialoGPT-small-hansolo": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "d0rj/rut5-base-summ": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "CAIRE-CedarsSinai/falcon-7b-qlora-chat-support-bot-faq-alzkb-version-2": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "el-profesor/code_t5": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "Soumyajit1008/DialoGPT-small-harryPotterssen": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "malteos/bloom-1b5-clp-german": {"architectures": ["BloomForCausalLM"], "hidden_size": 2048, "n_head": 16, "n_layer": 24, "vocab_size": 50304}, "yesuns/DialoGPT-small-yesun": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Stevo/DiagloGPT-medium-spamton": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Vision-CAIR/vicuna-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "TheBloke/airoboros-33B-gpt4-1-4-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "tanishqvashisht/DialoGPT-small-Joshua": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "TurkuNLP/gpt3-finnish-3B": {"architectures": ["BloomModel"], "hidden_size": 2560, "n_head": 32, "n_layer": 32, "vocab_size": 131072}, "lizhuang144/flan-t5-base-VG-factual-sg": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "TheBloke/Athena-v1-GGUF": {}, "xxyyy123/test-28b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "pastlecry/DialoGPT-small-harrypotter": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "DiscordRequestsAPI/NurDeeps-Bot-2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "channashi/DialoGPT-small-rocket": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "ritog/bangla-gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "TheBloke/Redmond-Puffin-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "Shakerlicious/DialoGPT-small-raquelbot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "lmqg/mt5-base-jaquad-qag": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250101}, "anon8231489123/vicuna-13b-GPTQ-4bit-128g": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "jacobmorrison/tk-instruct-small-lora-experiments": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32100}, "TheBloke/open-llama-13b-open-instruct-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "cedpsam/chatbot_fr": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "Photolens/llama-2-13b-langchain-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "avinashshrangee/DialoGPT-small-Ricky": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "YeungNLP/firefly-llama2-7b-pretrain": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 55296}, "efederici/it5-efficient-small-fanpage": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 32, "vocab_size": 32100}, "saikiranmaddukuri/chat_to_sql0.17": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "TheBloke/Llama2-28B-Air03-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 7296, "intermediate_size": 22016, "num_attention_heads": 57, "num_hidden_layers": 40, "vocab_size": 32000}, "crodri/falcon_aguila_meteocat": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 50257}, "Narsil/starcoder-gptq": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49152}, "CobraMamba/mamba-gpt-3b-v4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "YeungNLP/firefly-llama2-13b-pretrain": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 55296}, "TheBloke/airoboros-l2-7b-gpt4-1.4.1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "DecafNosebleed/DialoGPT-small-ScaraBot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "yazdipour/text-to-sparql-t5-small-qald9": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "ClassCat/gpt2-base-french": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50000}, "TheBloke/airoboros-33B-GPT4-m2.0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "quantumaikr/KoreanLM-1.5b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 1024, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "toyfreak/DialoGPT-small-addy": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "spursyy/mT5_multilingual_XLSum_rust": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "lengoctuong/gpt2-finetuned-chatbot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50261}, "KnutJaegersberg/megatron-gpt2-345m-evol_instruct_v2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": 4096, "n_layer": 24, "vocab_size": 50257}, "zkdtckk/falcon40-instruct-qlora-tta-v1": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "TheBloke/Nous-Hermes-13B-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "TheBloke/Nous-Hermes-Llama2-GGML": {}, "IkariDev/Athena-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/WizardLM-7B-V1.0-Uncensored-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/llama-2-13B-German-Assistant-v2-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "cahya/gpt2-large-indonesian-522M": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50257}, "VietAI/envit5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 50048}, "kam1run/DialoGPT-large-kami": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50257}, "uukuguy/speechless-codellama-dolphin-orca-platypus-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "aluserhuggingface/DialoGPT-small-harrypotter": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "TheBloke/gpt4-x-vicuna-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "Pcik/DialoGPT-medium-Ruby": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "TheBloke/LLaMA-30b-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "sdadas/polish-gpt2-small": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": 3072, "n_layer": 12, "vocab_size": 51200}, "ahxt/llama2_xs_460M_experimental": {"architectures": ["LlamaForCausalLM"], "hidden_size": 1024, "intermediate_size": 4096, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "lemon234071/t5-base-Chinese": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 35364}, "4bit/pyg-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "squarelike/Gugugo-koen-1.3B-V1.0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 30080}, "lvwerra/t5-imdb": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "psymon/KoLlama2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Maxwere/DiabloGPT-medium-maxbot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "nafisehNik/mt5-persian-summary": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "nams/nams-bot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "lmqg/mt5-small-esquad-qag": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "mattbit/gpt2wb": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "ghazikhanihamed/TooT-PLM-P2S": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 48, "vocab_size": 144}, "lonewanderer27/YoshinoriBot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "VinVanGogh/Llama-2-7b-Aixiety-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "GroNLP/gpt2-medium-italian-embeddings": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 30001}, "IDEA-CCNL/Randeng-T5-784M-QA-Chinese": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32601}, "kingbri/airo-llongma-2-13B-16k-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "lvwerra/starcoderbase-gsm8k": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49152}, "mofawzy/gpt2-arabic-sentence-generator": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50000}, "lmqg/mt5-small-itquad-qag": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "sharpbai/Llama-2-13b-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "lomahony/eleuther-pythia70m-hh-sft": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "Salesforce/codet5-large-ntp-py": {"architectures": ["T5WithLMHeadModel"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32100}, "TheBloke/Samantha-1.11-CodeLlama-34B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "Lenza/DialoGPT-medium-Kobayashi": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "davidviriato/DialoGPT-small-joshua": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Shakerlicious/DialoGPT-small-descentbot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "TurkuNLP/gpt3-finnish-xl": {"architectures": ["BloomModel"], "hidden_size": 2064, "n_head": 24, "n_layer": 24, "vocab_size": 131072}, "TheBloke/starcoderplus-GPTQ": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49152}, "TheBloke/Airoboros-L2-7B-2.1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Mikivis/gpt2-large-lora-sft1": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50257}, "gagan3012/k2t": {"architectures": ["T5WithLMHeadModel"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "MerlynMind/merlyn-education-safety": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50688}, "explosion-testing/refined-web-model-test": {"architectures": ["RWForCausalLM"], "hidden_size": 32, "n_head": 4, "n_layer": 5, "vocab_size": 1024}, "explosion-testing/falcon-no-parallel-attn-test": {"architectures": ["RWForCausalLM"], "hidden_size": 32, "n_head": 4, "n_layer": 5, "vocab_size": 1024}, "Marxav/frpron": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 268}, "AmbricJohnson5888/claura": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "TheBloke/CodeLlama-7B-Instruct-GGUF": {}, "felinecity/DioloGPT-small-LisaBot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "lmqg/mt5-small-frquad-qag": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "RobiKenobi/DialoGPT-medium-pete": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "TheBloke/Vicuna-13B-CoT-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/airoboros-33B-gpt4-1.4-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "SEBIS/code_trans_t5_base_code_documentation_generation_java_multitask": {"architectures": ["T5Model"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "retrieva-jp/t5-base-medium": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "elinas/chronos-13b-4bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "abhinavkulkarni/meta-llama-Llama-2-7b-chat-hf-w4-g128-awq": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Luban-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "uer/t5-base-chinese-cluecorpussmall": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 21228}, "ClueAI/ChatYuan-large-v1": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "helenai/gpt2-ov": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "fireballoon/baichuan-vicuna-chinese-7b-gptq": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 64000}, "Trelis/Llama-2-7b-chat-hf-hosted-inference-8bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Starry/KARENTRIES": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "umm-maybe/SportsFanGhost": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "TheBloke/airoboros-13B-gpt4-1.4-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TabbyML/StarCoder-1B": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 49152}, "TFLai/Nova-13B-50-step": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "Mikivis/gpt2-large-lora-sft2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50257}, "CHIH-HUNG/llama-2-13b-Open_Platypus_and_ccp_2.6w-3_epoch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "BlackSamorez/falcon-40b-tiny-testing": {"architectures": ["RWForCausalLM"], "hidden_size": 256, "n_head": 4, "n_layer": 2, "vocab_size": 65024}, "Rocketknight1/tiny-random-falcon-40b": {"architectures": ["FalconForCausalLM"], "hidden_size": 1024, "num_attention_heads": 128, "num_hidden_layers": 2, "vocab_size": 65024}, "TheBloke/WizardLM-1.0-Uncensored-Llama2-13B-GGML": {}, "TheBloke/Zarafusionex-1.1-L2-7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "lmqg/t5-large-squad-qg": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32101}, "casperhansen/falcon-7b-awq": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "Azure99/blossom-v2-llama2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "DeepESP/gpt2-spanish-medium": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "StudentLLM/Alpagasus-2-13b-QLoRA-merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Weni/WeniGPT": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "niicovila/llama-v2-tst-law": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Undi95/CreativityEngine": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "DB13067/Peterbot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "EleutherAI/pythia-12b-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50688}, "allenai/tulu-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "TheBloke/airoboros-l2-13b-gpt4-m2.0-GGML": {}, "TheBloke/Griffin-3B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "imthanhlv/vigpt2medium": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "m3hrdadfi/gpt2-persian-qa": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50000}, "TheBloke/MythoMax-L2-Kimiko-v2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "CHIH-HUNG/llama-2-13b-FINETUNE1_17w-r16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ppn/DialoGPT-small-harrypotter": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "lmqg/mt5-base-ruquad-qag": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250101}, "TheBloke/Firefly-Llama2-13B-v1.2-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "persiannlp/mt5-large-parsinlu-opus-translation_fa_en": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 250112}, "simple2312/DialoGPT-Twice": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "declare-lab/flan-alpaca-xxl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32128}, "ChanceFocus/finma-7b-nlp": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "osunlp/attrscore-flan-t5-xl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "likenneth/honest_llama2_chat_7B": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Hugherinit/hi": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32119}, "vaibhav9/GPT2-qa": {"architectures": ["GPT2ModelForQuestionAnswering"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "st3rl4nce/t5-small-finetuned-pubmed": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "uonlp/okapi-ro-llama": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ThomasNLG/t5-weighter_cnndm-en": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32100}, "google/t5-11b-ssm-tqa": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 65536, "d_model": 1024, "num_heads": 128, "num_layers": 24, "vocab_size": 32128}, "lizhuang144/flan-t5-small-VG-factual-sg": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32128}, "hyunjae/skt-kogpt2-kullm-v2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 51200}, "Mirage-Studio/llama-gaan-2-7b-chat-hf-dutch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/LosslessMegaCoder-Llama2-7B-Mini-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32007}, "lmqg/t5-small-squad-qg": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32101}, "castorini/doc2query-t5-large-msmarco": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "TheBloke/manticore-13b-chat-pyg-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "22h/open-cabrita3b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 52000}, "alzoubi36/priva_t5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "TheBloke/vicuna-7B-v0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "TheBloke/falcon-7b-instruct-GGML": {}, "Rozi05/QuoteVibes_Model_Trained": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Tidum/DialoGPT-large-Michael": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "valhalla/t5-small-qg-prepend": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32102}, "lmqg/t5-large-squad-qag": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32101}, "abhiramtirumala/DialoGPT-sarcastic": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "mindrage/Manticore-13B-Chat-Pyg-Guanaco-GGML": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "h2oai/h2ogpt-gm-oasst1-en-2048-open-llama-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Salesforce/dialogstudio-t5-base-v1.0": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32103}, "allenai/unifiedqa-v2-t5-base-1363200": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "kleinay/qanom-seq2seq-model-joint": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32101}, "puugz/DialoGPT-small-spiderman": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "UrukHan/t5-russian-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "HuggingFaceH4/tiny-random-LlamaForSeqClass": {"architectures": ["LlamaForSequenceClassification"], "hidden_size": 16, "intermediate_size": 64, "num_attention_heads": 4, "num_hidden_layers": 2, "vocab_size": 32000}, "JosephusCheung/Qwen-LLaMAfied-7B-Chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 151936}, "Abzu/orca-mini-v3-70b-gptq-q4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "wnic00/t5-small-finetune-bilingual-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32100}, "ChukSamuels/DialoGPT-small-Dr.FauciBot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "macavaney/doc2query-t5-base-msmarco": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "nlp-waseda/comet-t5-base-japanese": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32000}, "stjiris/t5-portuguese-legal-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Icaruas/V2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "imxly/t5-pegasus": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 50000}, "stefan-it/german-gpt2-larger": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50265}, "noahkim/KoT5_news_summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 50358}, "hoskinson-center/proofGPT-v0.1": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "TheBloke/WizardMath-7B-V1.0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "FieldSu/distil_student_24": {"architectures": ["RWForCausalLM"], "hidden_size": 1136, "n_head": 71, "n_layer": 8, "vocab_size": 65024}, "shyamsn97/Mario-GPT2-700-context-length": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 6, "vocab_size": 50257}, "dgnk007/eagle": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "sharpbai/Llama-2-7b-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "jackyv/DialoGPT-small-pinocchio": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "felinecity/DioloGPT-small-KaeyaBot2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "toyfreak/DialoGPT-small-shy": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "chavinlo/alpaca-13b": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "TehVenom/Pygmalion-7b-4bit-GPTQ-Safetensors": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "georgesung/open_llama_7b_qlora_uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ostorc/rick-sanchez-chatbot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "polymath707/llama-2-7b-miniguanaco": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "KBlueLeaf/guanaco-7b-leh-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Chronos-Hermes-13B-v2-GGML": {}, "approach0/mathy-vicuna-13B-FFT-phase2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "gorilla-llm/gorilla-7b-hf-delta-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "j5ng/kullm-5.8b-GPTQ-8bit": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 16, "num_hidden_layers": 28, "vocab_size": 30080}, "bitadin/checkpoint-230167": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "NekoPunchBBB/Llama2-13b-hf-Open-Platypus-QLoRA-att": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "mrm8488/t5-small-finetuned-wikiSQL": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "ozcangundes/T5-base-for-BioQA": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "AriakimTaiyo/gpt2-chat": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_layer": 36, "vocab_size": 50257}, "TheBloke/WizardLM-13B-V1.2-GGML": {}, "TheBloke/Trurl-2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "ccore/opt-125-smart-test": {"architectures": ["OPTForCausalLM"], "hidden_size": 768, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 50272}, "James-WYang/BigTranslate": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 53613}, "Trelis/Llama-2-7b-chat-hf-function-calling": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Wikidepia/IndoT5-base-paraphrase": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "csebuetnlp/mT5_m2m_crossSum": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "seanmor5/tiny-llama-test": {"architectures": ["LlamaForCausalLM"], "hidden_size": 32, "intermediate_size": 64, "num_attention_heads": 2, "num_hidden_layers": 2, "vocab_size": 32000}, "explosion-testing/refined-web-model-new-decoder-test": {"architectures": ["RWModel"], "hidden_size": 256, "n_head": 4, "n_layer": 5, "vocab_size": 1024}, "jondurbin/airocoder-34b-2.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "lmqg/t5-base-squad-qg": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32101}, "PORTULAN/gervasio-ptpt-base": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 8, "num_hidden_layers": 16, "vocab_size": 50304}, "UWB-AIR/barticzech-1.0": {"architectures": ["MBartForConditionalGeneration"], "d_model": 1024, "num_hidden_layers": 12, "vocab_size": 50265}, "TokenBender/llama2-7b-chat-hf-codeCherryPop-qLoRA-merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Voicelab/trurl-2-7b-8bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "TheBloke/Llama-2-13B-chat-GGUF": {}, "VietAI/vit5-base-vietnews-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 36096}, "lmqg/t5-small-squad-ae": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32101}, "retrieva-jp/t5-base-short": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "grammarly/coedit-xxl": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32100}, "heack/HeackMT5-ZhSum100k": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "TheBloke/LLaMA-13b-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TFMC/ELYZA-japanese-Llama-2-7b-instruct-GPTQ-4bit-64g": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "mxmax/Chinese_Chat_T5_Base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "elinas/chronos-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "kajdun/iubaris-13b-v3_GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jmeadows17/MathT5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32104}, "TheBloke/Kimiko-7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "nlp-waseda/gpt2-small-japanese": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 32000}, "rshrott/description-together-ai": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "noah-ai/mt5-base-question-generation-vi": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "AI4PD/ZymCTRL": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 458}, "bitadin/gpt-4-long-titles-v2-flan-t5-base-llm-12": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "shorthillsai/flan-t5-large-absa": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "TheBloke/CodeLlama-13B-oasst-sft-v10-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "prithivida/active_to_passive_styletransfer": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "lcw99/t5-large-korean-text-summary": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 50358}, "EleutherAI/pythia-1.4b-deduped-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "sdadas/polish-gpt2-large": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": 5120, "n_layer": 36, "vocab_size": 51200}, "uonlp/okapi-vi-bloom": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "cenkersisman/gpt2-turkish-900m": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "IlyaGusev/rugpt_large_turbo_instructed": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1536, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50258}, "Waterhorse/chessgpt-base-v1": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "jondurbin/spicyboros-13b-2.2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "echarlaix/t5-small-openvino": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "mrm8488/santacoder-finetuned-the-stack-bash-shell": {"architectures": ["GPT2LMHeadCustomModel"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 49280}, "ckip-joint/bloom-3b-zh": {"architectures": ["BloomForCausalLM"], "hidden_size": 2560, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "Dawnstarhunter/DialoGPT-medium-Eveline": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "lmqg/t5-base-squad-ae": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32101}, "mesolitica/finetune-translation-t5-small-standard-bahasa-cased-v2": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32100}, "liuhaotian/LLaVA-7b-delta-v0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32004}, "yzhuang/autotree_llama_small_snxor_l1_2_vit": {"architectures": ["LlamaForAutoTree"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 6, "vocab_size": 32000}, "mrm8488/t5-base-finetuned-wikiSQL-sql-to-en": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "aleksickx/llama-7b-hf": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "yongzx/pythia-70m-sft-hh": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "sonoisa/t5-base-english-japanese": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "BramVanroy/Llama-2-13b-chat-dutch": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Bhuvana/t5-base-spellchecker": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "PlanTL-GOB-ES/gpt2-base-bne": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50261}, "lmqg/mt5-small-jaquad-qg": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "Mirage-Studio/llama-gaan-2-7b-chat-hf-dutch-epoch-5": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "microsoft/DialogRPT-human-vs-rand": {"architectures": ["GPT2ForSequenceClassification"], "n_embd": 1024, "n_head": 16, "n_layer": 24, "vocab_size": 50257}, "aubmindlab/aragpt2-mega": {"architectures": ["GPT2LMHeadModel"], "intermediate_size": 6144, "n_embd": 1536, "n_head": 24, "n_inner": null, "n_layer": 48, "vocab_size": 64000}, "liyuesen/druggpt": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 53083}, "conceptofmind/Hermes-LLongMA-2-7b-8k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/scarlett-33B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "TheBloke/EverythingLM-13b-V2-16K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "sartmis1/starcoder-v2-openapi-special-tokens": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49152}, "TheBloke/Phind-CodeLlama-34B-v1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "TheBloke/Yarn-Llama-2-7B-64K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Dolphin-Llama-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "kfkas/Legal-Llama-2-ko-7b-Chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 46336}, "Ichsan2895/Merak-7B-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "allenai/unifiedqa-v2-t5-base-1251000": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "sagawa/ReactionT5-product-prediction": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 268}, "lmqg/mt5-small-jaquad-ae": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "Narrativa/mT5-base-finetuned-tydiQA-xqa": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "allenai/macaw-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "gagan3012/k2t-new": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "google/t5-efficient-tiny-nl2": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 256, "num_heads": 4, "num_layers": 2, "vocab_size": 32128}, "sam2ai/open_llama_3b_odia_gptq_128_4bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "lmqg/mt5-small-dequad-qg": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "mrm8488/mT5-small-finetuned-tydiqa-for-xqa": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "zjunlp/knowlm-13b-zhixi": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "h2oai/h2ogpt-16k-codellama-13b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "mymusise/gpt2-medium-chinese": {"architectures": ["TFGPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 8021}, "ai-forever/mGPT-13B": {"architectures": ["GPT2LMHeadModel"], "n_embd": 5120, "n_head": 40, "n_inner": null, "n_layer": 40, "vocab_size": 100000}, "TinaLiHF/fined-tuned-T5small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "TheBloke/airoboros-l2-7B-gpt4-2.0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Mihakram/AraT5-base-question-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 110080}, "fjungstedt/t5-criteria-text-to-json": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "luqh/ClinicalT5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "h2oai/h2ogpt-16k-codellama-13b-python": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "masakhane/afri-mt5-base": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "YeungNLP/bloom-1b4-zh": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 24, "num_attention_heads": 16, "vocab_size": 46145}, "shekharchatterjee/temp-model-174": {}, "TheBloke/Kimiko-v2-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jeffwan/vicuna-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "kz919/ntk_scaled_open_llama_13b_32k": {"architectures": ["NTKScaledLlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "lmqg/t5-base-squad-qg-ae": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32101}, "r3dhummingbird/DialoGPT-small-harrypotter": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "camenduru/MiniGPT4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "TheBloke/open-llama-7b-open-instruct-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "MoinFaisal/Llama-2-7b-chat-finetune": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/CodeLlama-13B-Instruct-GGUF": {}, "fbellame/llama2-pdf-to-quizz-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "fractalego/fact-checking": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "michelecafagna26/gpt2-medium-finetuned-sst2-sentiment": {"architectures": ["GPT2ForSequenceClassification"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "TheBloke/WizardLM-Uncensored-SuperCOT-StoryTelling-30B-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32001}, "TheBloke/Airoboros-7B-GPT4-1-4-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Airoboros-L2-70B-GPT4-m2.0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Arc53/docsgpt-7b-falcon": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "alenusch/mt5large-ruparaphraser": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 250112}, "ApoTro/slovak-t5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32100}, "microsoft/dolly-v2-7b-olive-optimized": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50280}, "huggingtweets/gordonramsay": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "prithivida/formal_to_informal_styletransfer": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "model-attribution-challenge/gpt2-xl": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1600, "n_head": 25, "n_layer": 48, "vocab_size": 50257}, "saiful9379/Bangla_GPT2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 33391}, "deepse/CodeUp-Llama-2-7b-chat-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ziqingyang/chinese-llama-2-13b-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 55296}, "ChandlerU11/t5_fine": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "TheBloke/Guanaco-3B-Uncensored-v2-GPTQ": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "mamiksik/T5-commit-message-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32104}, "conceptofmind/Yarn-Llama-2-13b-64k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "mesolitica/llama-13b-hf-16384-fpf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Sao10K/Stheno-1.2-L2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "gsarti/it5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32103}, "Den4ikAI/FRED-T5-XL-interpreter": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1536, "num_heads": 24, "num_layers": 24, "vocab_size": 50364}, "TheBloke/WizardCoder-Guanaco-15B-V1.1-GPTQ": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49153}, "seonglae/llama-2-7b-chat-hf-gptq": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/llama2_7b_chat_uncensored-GGML": {}, "ecosumit/gpt-model": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "allegro/plt5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 50048}, "cointegrated/rut5-small": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 20100}, "it5/it5-large-question-answering": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32103}, "tscholak/1zha5ono": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32102}, "optible/unifiedqa-t5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "CleverShovel/falcon-7b-instruct-sharded-bf16": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "TheBloke/Pygmalion-13B-SuperHOT-8K-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "BlackSamorez/llama-2-tiny-testing": {"architectures": ["LlamaForCausalLM"], "hidden_size": 128, "intermediate_size": 11008, "num_attention_heads": 8, "num_hidden_layers": 2, "vocab_size": 2000}, "ianagra/Llama-2-7b-ALLM-virtual-sales-assistant": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "quantumaikr/KoreanLM-3B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 2048, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "quantumaikr/llama-2-70B-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "Deniskin/gpt3_medium": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1536, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50264}, "ozcangundes/mt5-small-turkish-summarization": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "EleutherAI/pythia-1b-deduped-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 8, "num_hidden_layers": 16, "vocab_size": 50304}, "flozi00/Llama-2-7b-german-assistant-v3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Mikivis/gpt2-large-lora-stf4": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50257}, "AK270802/DialoGPT-small-harrypotter": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "EleutherAI/pythia-12b-deduped-v0": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50688}, "EricPeter/Llama-2-multilingual": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Pygmalion-2-7B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "miguelvictor/python-gpt2-large": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_layer": 36, "vocab_size": 50257}, "h2oai/h2ogpt-16k-codellama-7b-python": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32016}, "ammarinjtkrbh/llama-2-7b-food-search": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "GroNLP/gpt2-small-dutch": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 40000}, "pszemraj/opt-350m-email-generation": {"architectures": ["OPTForCausalLM"], "hidden_size": 1024, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50265}, "caffsean/t5-small-finetuned-keyword-to-text-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "lmqg/mt5-small-dequad-ae": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "yuyijiong/T5-large-sentiment-analysis-Chinese-MultiTask": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32596}, "sonoisa/t5-qiita-title-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "YeungNLP/firefly-bloom-1b4": {"architectures": ["BloomForCausalLM"], "hidden_size": 2048, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 46145}, "samwit/koala-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Vicuna-13B-1-3-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Photolens/OpenOrcaxOpenChat-2-13b-langchain-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "Ichsan2895/Merak-7B-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "flozi00/Llama-2-7b-german-assistant-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ss1612/loki-chat": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "OpenBuddy/openbuddy-falcon-7b-v5-fp16": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 70144}, "wellecks/llmstep-mathlib4-pythia2.8b": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50278}, "dariolopez/llama-2-7b-oasst1-es": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/WizardLM-1.0-Uncensored-CodeLlama-34B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "brad1141/gpt2-finetuned-comp2": {"architectures": ["GPT2ForTokenClassification"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "TheBloke/chronos-hermes-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "lizhuang144/flan-t5-large-VG-factual-sg": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "nivos/pythia-410m-deduped-finetuned-final-activity-text-10epoch": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 1024, "intermediate_size": 4096, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "HamidRezaAttar/gpt2-product-description-generator": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "TheBloke/ORCA_LLaMA_70B_QLoRA-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "lmsys/vicuna-13b-delta-v0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "jacobmorrison/tk-instruct-xl-lora-experiments": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32100}, "GroNLP/gpt2-small-italian": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 30001}, "yihsuan/mt5_chinese_small": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250100}, "YTTD/DialoGPT-medium-souv2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "arubenruben/ptt5-portuguese-cnn-dailymail-azure-pt-pt": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "localmodels/Llama-2-7B-Chat-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "lgaalves/llama-2-13b-chat-platypus": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "it5/it5-large-question-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32103}, "psyche/KoT5": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "TheBloke/Llama2-70B-OASST-SFT-v10-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32007}, "deepparag/Aeona": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "lmqg/mt5-small-koquad-qg": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "lmqg/mt5-small-esquad-qg": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "NinedayWang/PolyCoder-0.4B": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 1024, "intermediate_size": 4096, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "ConvLab/t5-small-nlu-multiwoz21": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32100}, "SIC98/GPT2-python-code-generator": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "lmqg/mt5-small-itquad-qg": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "kaiyuy/leandojo-lean4-tacgen-byt5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3584, "d_model": 1472, "num_heads": 6, "num_layers": 12, "vocab_size": 384}, "usvsnsp/pythia-6.9b-sft": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "PlanTL-GOB-ES/gpt2-large-bne": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_layer": 36, "vocab_size": 50261}, "jordiclive/flan-t5-11b-summarizer-filtered": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32128}, "Jordine/scpoo": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "behnamsh/gpt2_camel_physics": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "lmqg/mt5-small-esquad-ae": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "MerlynMind/merlyn-education-teacher-assistant": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 36, "vocab_size": 50688}, "mesolitica/llama-7b-hf-16384-fpf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "MatthisHoules/rat-t5-qdmr-grounded-with-db": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "persiannlp/mt5-small-parsinlu-qqp-query-paraphrasing": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "lmqg/mt5-small-koquad-ae": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "lmqg/mt5-small-itquad-ae": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "openthaigpt/openthaigpt-gpt2-instructgpt-poc-0.0.4": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50268}, "ChanceFocus/finma-7b-full": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "vivekraina/Llama-2-7b-hf-8bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "dpml/vicuna_mt_450s": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "burberg92/resume_summary": {"architectures": ["BloomForCausalLM"], "hidden_size": 2048, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "Monero/Pygmalion-Metharme-7b-4bit-TopScore": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Icaruas/7bill8k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32002}, "dahara1/ELYZA-japanese-Llama-2-7b-fast-instruct-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 45043}, "TheBloke/Yarn-Llama-2-13B-64K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "prithivida/passive_to_active_styletransfer": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "lmqg/mt5-small-frquad-qg": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "JamesStratford/PLord-bot-DialoGPT-medium": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "yizhangliu/prompt-extend": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 52000}, "lmqg/mt5-small-frquad-ae": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250101}, "Beltenebros/DialoGPT-small-PerionOfGaul": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "sominw/rel23_conll": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "mncai/SGPT-5.8B-wiki-mirae-bank_securities-epoch5": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 16, "num_hidden_layers": 28, "vocab_size": 30080}, "MickyMike/VulRepair": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32105}, "ybelkada/t5-11b-sharded": {"architectures": ["T5WithLMHeadModel"], "d_ff": 65536, "d_model": 1024, "num_heads": 128, "num_layers": 24, "vocab_size": 32128}, "Einmalumdiewelt/T5-Base_GNAD_MaxSamples": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "digitous/13B-HyperMantis_GPTQ_4bit-128g": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "weqweasdas/hh_rlhf_rm_open_llama_3b": {"architectures": ["LlamaForSequenceClassification"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "TheBloke/WizardMath-13B-V1.0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "ziqingyang/chinese-alpaca-2-7b-16k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 55296}, "valhalla/t5-base-squad": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "ELiRF/mt5-base-dacsa-es": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "abhitopia/question-answer-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32102}, "TurkuNLP/gpt3-finnish-large": {"architectures": ["BloomModel"], "hidden_size": 1536, "n_head": 16, "n_layer": 24, "vocab_size": 131072}, "Abyss-fyf/DialoGPT-small-discord": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "TheBloke/OpenOrca-Platypus2-13B-GGML": {}, "TheBloke/Airoboros-L2-7B-2.1-GGUF": {}, "huggingtweets/googleai": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "it5/it5-base-question-answering": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32103}, "woodmtaylor/DialoGPT-medium-Heej": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "kimdwan/t5-base-korean-summarize-LOGAN": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 50358}, "Narrativa/mT5-base-finetuned-tydiQA-question-generation": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "huggingtweets/normmacdonald": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "r3dhummingbird/DialoGPT-medium-neku": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "yhavinga/t5-v1.1-base-dutch-cnn-test": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32103}, "kennethhendricks/DialoGPT-medium-jared-hendricks-gen1": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "retrieva-jp/t5-small-long": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32128}, "TheBloke/Vigogne-2-7B-Chat-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TigerResearch/tigerbot-7b-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 60928}, "Fredithefish/Guanaco-13B-Uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "consciousAI/question-answering-generative-t5-v1-base-s-q-c": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "TheBloke/open-llama-7B-v2-open-instruct-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "mosama/Llama-2-Medical-Merged-LoRA": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "bullmount/quanIta_t5": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32103}, "YeungNLP/bloomz-396m-zh": {"architectures": ["BloomForCausalLM"], "hidden_size": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 46145}, "GreenBitAI/LLaMA-7B-2bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "chgk13/decicoder-1b-openvino-int8": {"architectures": ["DeciCoderForCausalLM"], "hidden_size": 2048, "intermediate_size": 5888, "num_attention_heads": 32, "num_hidden_layers": 20, "vocab_size": 49152}, "bigscience/bloomz-mt": {"architectures": ["BloomForCausalLM"], "n_layer": 70, "num_attention_heads": 112, "vocab_size": 250880}, "LarkAI/codet5p-770m_nl2sql_oig": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32100}, "Linly-AI/Chinese-Falcon-7B": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 90046}, "ckip-joint/bloom-3b-zh-instruct": {"architectures": ["BloomForCausalLM"], "hidden_size": 2560, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "sgr23/llama2-fine-tuned-dolly-15k-dto": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "edbeeching/gpt2-imdb": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "cardiffnlp/flan-t5-small-tweet-emotion": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32128}, "TheBloke/airoboros-7B-gpt4-1.4-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/CodeLlama-7B-GGUF": {}, "TheBloke/Airoboros-c34B-2.1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "byeongal/Ko-DialoGPT": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 51200}, "ismaelfaro/gpt2-poems.en": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "tuner007/t5_abs_qa": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "kennethhendricks/DialoGPT-medium-PowPowGaming": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "DunnBC22/flan-t5-base-text_summarization_data": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "zarakiquemparte/hermeslimarp-l2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "MagicLEMP/llamavocat_13B_mixed_16K": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "4bit/ELYZA-japanese-Llama-2-7b-instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "EnglishVoice/t5-base-us-to-uk-english": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "devanshipatel/t5-gec-english-125k": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "helloollel/vicuna-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "nferroukhi/WizardLM-Uncensored-Falcon-7b-sharded-bf16": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65025}, "dacorvo/tiny-random-gpt2-neuronx": {"intermediate_size": 37, "n_embd": 32, "n_head": 4, "n_inner": null, "n_layer": 5, "vocab_size": 1000}, "JamesStratford/Pidrow-bot-DialoGPT-Large-Feb2023": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50257}, "tsuyuan/Llama-2-7b-unit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 41218}, "OFA-Sys/gsm8k-rft-llama7b2-u13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "uer/gpt2-chinese-ancient": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 25370}, "YTTD/DialoGPT-medium-safv3": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "Neko-Institute-of-Science/LLaMA-65B-HF": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "TheBloke/Spicyboros-13B-2.2-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "IDEA-CCNL/Randeng-T5-77M-MultiTask-Chinese": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32596}, "coreml-projects/Llama-2-7b-chat-coreml": {"architectures": ["LlamaForCausalLM"], "vocab_size": 32000}, "oscorrea/scores-lince-sm": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "morzecrew/FRED-T5-RefinedPersonaChat": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1536, "num_heads": 24, "num_layers": 24, "vocab_size": 50364}, "anjakuzev/harry_7": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Mythalion-13B-GGUF": {}, "Kryptone/monikAI": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "TheBloke/Luna-AI-Llama2-Uncensored-GGML": {}, "mlabonne/llama-2-7b-miniguanaco": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Hermes-LLongMA-2-7B-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "zlsl/l_erotic_kink_chat": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1536, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50259}, "Sao10K/Stheno-Inverted-1.2-L2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "castorini/duot5-base-msmarco": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "mrm8488/t5-base-finetuned-qasc": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "entropy/gpt2_zinc_87m": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 2707}, "MarkyMarx/DialoGPT-medium-jimmybot2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "stefan-it/secret-gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Narrativa/byt5-base-tweet-hate-detection": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3968, "d_model": 1536, "num_heads": 12, "num_layers": 18, "vocab_size": 384}, "nicholasKluge/Aira-2-124M": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50261}, "TheBloke/Samantha-1.11-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "castorini/monot5-large-msmarco": {"architectures": ["T5Model"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "PoloHuggingface/French_grammar_error_corrector": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32102}, "cambridgeltl/magic_mscoco": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50259}, "Gatozu35/tortoise-tts": {"architectures": ["GPT2InferenceModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 30, "vocab_size": 604}, "abacusai/Giraffe-v1-delta-13b-scaled-16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "flozi00/Llama-2-13B-german-assistant-v3-4bit-autogptq": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "HAERAE-HUB/tulu_13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32001}, "doc2query/msmarco-14langs-mt5-base-v1": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "Maciel/T5Corrector-base-v2": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "vilm/vietcuna-3b-v2": {"architectures": ["BloomForCausalLM"], "hidden_size": 2560, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "TitanML/ct2-int8-falcon-7b-instruct": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "ybelkada/llama-7b-GPTQ-test": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "h2oai/h2ogpt-16k-codellama-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32016}, "TigerResearch/tigerbot-70b-chat-v1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 60928}, "Supiri/t5-base-conversation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "msterbentz/t5-base-break-high": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "igorktech/rut5-small-chit-chat-intelligent": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 20100}, "kuleshov/llama-7b-4bit": {"architectures": ["LLaMAForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "hipnologo/gpt2-imdb-finetune": {"architectures": ["GPT2ForSequenceClassification"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "qwopqwop/danbooru-llama-gptq": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "t-dai-con/gpt-fine-tuned-v2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Platypus2-70B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "KETI-AIR/ke-t5-base-ko": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 64128}, "doc2query/all-t5-base-v1": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "GT4SD/multitask-text-and-chemistry-t5-base-standard": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "uer/gpt2-medium-chinese-cluecorpussmall": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 21128}, "UBC-NLP/AraT5-base-title-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 110080}, "dsivakumar/text2sql": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "power-greg/super-fast-llm": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2048, "n_head": 16, "n_inner": 2048, "n_layer": 4, "vocab_size": 2048}, "AlexWortega/instruct_rugptMedium": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50263}, "hiyouga/Llama-2-Chinese-13b-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "frank098/llama2-13b-8k-vnf-virtualization": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "OFA-Sys/gsm8k-rft-llama7b-sample100": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "EnterNameBros/Senko-ai-medium": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "PeanutJar/LLaMa-2-PeanutButter_v19_R8-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Sao10K/Medusa-1.1-L2-7B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "ChrisVCB/DialoGPT-medium-cmjs": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "indonesian-nlp/gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "persiannlp/mt5-small-parsinlu-squad-reading-comprehension": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "stmnk/codet5-small-code-summarization-python": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32100}, "emozilla/LLongMA-2-13b-16k-flash": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "bigscience/bloom-petals": {"architectures": ["BloomForCausalLM"], "hidden_size": 14336, "n_head": 112, "n_layer": 70, "vocab_size": 250880}, "procesaur/gpt2-srlat": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 52000}, "ashwinR/CodeExplainer": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32100}, "Chirayu/nl2pandas": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "OpenBuddy/openbuddy-falcon-7b-v6-bf16": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 70144}, "swbaek/tulu_65b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32001}, "huggingtweets/wallstreetbets": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Sultannn/gpt2-ft-id-puisi": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 35000}, "sonoisa/sentence-t5-base-ja-mean-tokens": {"architectures": ["T5Model"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "sdadas/polish-gpt2-xl": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1600, "n_head": 25, "n_inner": 6400, "n_layer": 48, "vocab_size": 51200}, "sjrhuschlee/flan-t5-large-squad2": {"architectures": ["T5ForQuestionAnswering"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "Hnabil/t5-address-standardizer": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Gryphe/MythoLogic-Mini-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Athena-v1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Undi95/MythoMax-L2-Kimiko-v2-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "magnifi/llama-augmented-contextual-2-epoch-6-merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "doc2query/msmarco-chinese-mt5-base-v1": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "Sakuna/t5_grammar_checker": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Dahoas/pythia-1B-response-full-static-sft": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "localmodels/Vicuna-7B-v1.3-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Sao10K/Stheno-1.1-L2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "mlabonne/drllama-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "IlyaGusev/rugpt3medium_sum_gazeta": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "describeai/gemini": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "mojians/E2E-QA-Mining": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32102}, "TheBloke/Wizard-Vicuna-13B-Uncensored-SuperHOT-8K-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "dnagpt/human_gpt2-v1": {"architectures": ["GPT2Model"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 25000}, "heegyu/WizardVicuna-Uncensored-pythia-160m-deduped": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 50304}, "maximuslee07/llama-2-7b-rockwell": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "DylanJHJ/fidt5-base-nq": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "laituan245/molt5-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "DancingIguana/music-generation": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 6, "vocab_size": 25000}, "Qiliang/flan-t5-large-summarization-finetuned-xsum": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "TheBloke/Vicuna-7B-CoT-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "hpcaitech/openmoe-base": {"architectures": ["OpenMoeForCausalLM"], "hidden_size": 768, "intermediate_size": 2048, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 256384}, "CalderaAI/13B-Thorns-l2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "CHIH-HUNG/llama-2-13b-FINETUNE1_17w-r4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "IlyaGusev/rugpt_medium_turbo_instructed": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50258}, "pankajmathur/orca_alpaca_3b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 3200, "intermediate_size": 8640, "num_attention_heads": 32, "num_hidden_layers": 26, "vocab_size": 32000}, "TheBloke/Wizard-Vicuna-7B-Uncensored-SuperHOT-8K-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "abacusai/Giraffe-v1-delta-13b-scaled-4": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/Huginn-v3-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "bloom-testing/test-bloomd-350m-main": {"architectures": ["BloomModel"], "n_inner": null, "n_layer": 24, "num_attention_heads": 16, "vocab_size": 250880}, "AI-Sweden/gpt-sw3-356m": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": 4096, "n_layer": 24, "vocab_size": 64000}, "raymondho/DialoGPT-small-harry": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "TheBloke/airochronos-33B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "TheBloke/OpenChat_v3.2-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "ahnyeonchan/OpenOrca-AYT-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "stanford-crfm/expanse-gpt2-small-x777": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "doc2query/msmarco-german-mt5-base-v1": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "ku-nlp/gpt2-medium-japanese-char": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 6000}, "llm-blender/gen_fuser_3b": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32100}, "lomahony/eleuther-pythia2.8b-hh-sft": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50304}, "TheBloke/Llama2-22B-GPLATTY-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "grammarly/coedit-xl-composite": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32100}, "imuncomfortable/DiabloGPT-small-CocoAtarashi": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "kaiyuy/leandojo-lean3-retriever-tacgen-byt5-small": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3584, "d_model": 1472, "num_heads": 6, "num_layers": 12, "vocab_size": 384}, "michaelwzhu/Chinese-LlaMA2-13B-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 55296}, "Xenova/llama2.c-stories110M": {"architectures": ["LlamaForCausalLM"], "hidden_size": 768, "intermediate_size": 2048, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 32000}, "Youngwoo9/T5_Pyeongsan": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 50358}, "literallywood/DialoGPT-small-ekansh": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "jondurbin/spicyboros-7b-2.2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "indobenchmark/indogpt": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 40005}, "it5/it5-efficient-small-el32-news-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 32, "vocab_size": 32100}, "mesolitica/finetune-translation-t5-base-standard-bahasa-cased-v2": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "Den4ikAI/FRED-T5-XL_instructor": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1536, "num_heads": 24, "num_layers": 24, "vocab_size": 50365}, "mlabonne/gpt2-GPTQ-4bit": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "steerapi/Llama-2-7b-chat-hf-onnx": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Langboat/bloom-1b4-zh": {"architectures": ["BloomForCausalLM"], "hidden_size": 2048, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 46145}, "neulab/docprompting-codet5-python-doc-retriever": {"architectures": ["BERTScorerForCL"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "AI-Sweden/gpt-sw3-20b": {"architectures": ["GPT2LMHeadModel"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 44, "vocab_size": 64000}, "syndi-models/article-title-generator": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "vgaraujov/Dummy5": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "TFLai/Orca-Nova-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32002}, "allenai/tk-instruct-11b-def-pos": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 65536, "d_model": 1024, "num_heads": 128, "num_layers": 24, "vocab_size": 32128}, "aspis/gpt2-genre-story-generation": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50267}, "lcw99/t5-base-korean-paraphrase": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 50358}, "Celestinian/TopicGPT": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50257}, "TheBloke/Redmond-Hermes-Coder-GPTQ": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49153}, "TheBloke/h2ogpt-gm-oasst1-en-2048-falcon-7b-v3-GPTQ": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "abhinavkulkarni/meta-llama-Llama-2-13b-chat-hf-w4-g128-awq": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "jypppp/llama-2-7b-manual_GPT_ver2": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Llama-2-7B-32K-Instruct-GGML": {}, "TheBloke/Yarn-Llama-2-7B-128K-GGML": {}, "quantumaikr/KoreanLM-llama-2-7B-finetuned": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 46336}, "google/t5-xl-ssm-nq": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "nikokons/gpt2-greek": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 22000}, "NYTK/PULI-GPT-3SX": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50048}, "Futyn-Maker/rugpt3small_based_on_gpt2-finetuned_teachers_quotes_small": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50264}, "localmodels/Llama-2-13B-Chat-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "SebastianSchramm/UniNER-7B-all-GPTQ-4bit-128g-actorder_True": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Airoboros-L2-70B-2.1-Creative-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "casperhansen/vicuna-7b-v1.5-awq-gemv": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "IDEA-CCNL/Wenzhong-GPT2-3.5B": {"architectures": ["GPT2LMHeadModel"], "n_embd": 3072, "n_head": 32, "n_inner": 12288, "n_layer": 30, "vocab_size": 50304}, "antoinelouis/belgpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50257}, "atkh6673/DialoGPT-small-trump": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "huggingface-course/mt5-small-finetuned-amazon-en-es": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "malteos/gpt2-xl-wechsel-german": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1600, "n_head": 25, "n_inner": 6400, "n_layer": 48, "vocab_size": 50304}, "KES/caribe-capitalise": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "pszemraj/flan-t5-large-instruct-dolly_hhrlhf": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "Tanmay09516/StableBeluga-7B-sharded-bf16-5GB": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "abhinavkulkarni/codellama-CodeLlama-7b-Python-hf-w4-g128-awq": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Spicyboros-7B-2.2-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "huggingtweets/elonmusk": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "BelleGroup/BELLE-7B-2M": {"architectures": ["BloomModel"], "n_inner": null, "n_layer": 30, "num_attention_heads": 32, "vocab_size": 250880}, "snoop2head/Gomoku-GPT2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 256, "n_head": 4, "n_inner": null, "n_layer": 4, "vocab_size": 404}, "AnimusOG/pygmalion-7b-4bit-128g-cuda-2048Token": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/airoboros-l2-7B-gpt4-m2.0-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Juniplayground/Mist_LLaMA-2-7B-1024_V3": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "DataLinguistic/DataLinguistic-34B-V1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32001}, "erikycd/chatbot_hadita": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50262}, "medicalai/ClinicalGPT-base-zh": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "TheBloke/orca_mini_v2_13b-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "NIRVANA/T5_academic_paraphraser": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "josmunpen/mt5-small-spanish-summarization": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "shahp7575/gpt2-horoscopes": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50260}, "yihsuan/best_model_0427_small_long": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250100}, "malteos/bloom-6b4-clp-german-oasst-v0.1": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_layer": 30, "vocab_size": 50272}, "openllmplayground/openalpaca_7b_700bt_preview": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Gaivoronsky/ruGPT-3.5-13B-fp16": {"architectures": ["GPT2LMHeadModel"], "n_embd": 5120, "n_head": 40, "n_inner": null, "n_layer": 40, "vocab_size": 50272}, "universeTBD/astrollama": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "gorkemgoknar/gpt2-small-turkish": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "huggingtweets/joejoinerr": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Prarabdha/T5-Transformer-RickBot": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "beomi/kollama-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 52000}, "mohammadtaghizadeh/flan-t5-base-imdb-text-classification": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "nicholasKluge/Aira-Instruct-774M": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1280, "n_head": 20, "n_inner": null, "n_layer": 36, "vocab_size": 50259}, "bhenrym14/airoboros-7b-gpt4-1.4.1-lxctx-PI-16384-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Alireza1044/michael_bert_lm": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 6, "vocab_size": 50257}, "shibing624/gpt2-dialogbot-base-chinese": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 10, "vocab_size": 13317}, "mesolitica/finetune-summarization-ms-t5-base-standard-bahasa-cased": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "lmqg/flan-t5-large-squad-qg": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32101}, "circulus/alpaca-7b": {"architectures": ["LlaMAForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "reeducator/vicuna-13b-free": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "flozi00/Llama-2-13b-german-assistant-v6-4bit-autogptq": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 37632}, "SasnayaLetovka/tinkoff-zhientaev-model": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50262}, "mesolitica/t5-base-standard-bahasa-cased": {"architectures": ["T5Model"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "EllyPony/flutterbot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "pszemraj/flan-t5-xl-grammar-synthesis": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 5120, "d_model": 2048, "num_heads": 32, "num_layers": 24, "vocab_size": 32128}, "jinxuewen/vicuna-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "fireballoon/baichuan-llama-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 64000}, "TheBloke/Vicuna-7B-v1-3-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "scural/arxiv_model": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "Undi95/CodeEngine": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "Codexister/DialoGPT-medium-KafkaBotV1": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "google/t5-xxl-ssm-nq": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 10240, "d_model": 4096, "num_heads": 64, "num_layers": 24, "vocab_size": 32128}, "uer/gpt2-chinese-couplet": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 21128}, "nicholasKluge/Aira-Instruct-355M": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50259}, "HIT-SCIR/huozi-7b-sft": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250880}, "NousResearch/CodeLlama-13b-Instruct-hf-flash": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32016}, "Enno-Ai/vigogne2-enno-13b-sft-lora-4bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "sonoisa/t5-base-japanese-article-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "Kyrmasch/t5-kazakh-qa": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 18947}, "TheBloke/airoboros-13b-gpt4-1.4-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TheBloke/Kimiko-13B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "arya555/vicuna-7b-v1.5-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Geo/gpt2_custom_c_q_and_a": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "laituan245/molt5-small-smiles2caption": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32128}, "cloudqi/cqi_brain_memory_summarizer_large_pt_v0": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "ybelkada/bloom-1b7-8bit": {"architectures": ["BloomForCausalLM"], "hidden_size": 2048, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "snipaid/snip-igel-500-v2-adapter-merged": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_layer": 30, "vocab_size": 50304}, "TabbyML/SantaCoder-1B": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 2048, "n_head": 16, "n_inner": 8192, "n_layer": 24, "vocab_size": 49280}, "TheBloke/Guanaco-33B-SuperHOT-8K-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 60, "vocab_size": 32000}, "hanseokhyeon/kullm-polyglot-5.8b-v2-GPTQ": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 4096, "intermediate_size": 16384, "num_attention_heads": 16, "num_hidden_layers": 28, "vocab_size": 30080}, "CAIRE-CedarsSinai/falcon-7b-qlora-chat-support-bot-faq-alzkb-version-1": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "pranavpsv/genre-story-generator-v2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50266}, "nandakishormpai/t5-small-machine-articles-tag-generation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "ITG/DialoGPT-medium-spanish-chitchat": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "4bit/falcon-7b-instruct-GPTQ": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65024}, "OpenBuddy/openbuddy-openllama-7b-v5-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 38449}, "papahawk/keya-560m": {"architectures": ["BloomForCausalLM"], "n_inner": null, "n_layer": 24, "num_attention_heads": 16, "vocab_size": 250880}, "abhinavkulkarni/tiiuae-falcon-40b-instruct-w4-g128-awq": {"architectures": ["RWForCausalLM"], "hidden_size": 8192, "n_head": 128, "n_layer": 60, "vocab_size": 65024}, "funstoryai/immersiveL-exp": {"architectures": ["BloomForCausalLM"], "hidden_size": 1536, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "Benson/llama-2-7b-miniguanaco-hf": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "clancystudios/DialoGPT-medium-Morty": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "huggingtweets/realdonaldtrump": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "charanhu/text_to_sql_2": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32102}, "beomi/kollama-13b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 52000}, "IDEA-CCNL/Ziya-LLaMA-13B-v1.1": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 39424}, "nicholasKluge/Aira-Instruct-PT-1B7": {"architectures": ["BloomForCausalLM"], "hidden_size": 2048, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250683}, "TheBloke/Llama2-22B-Daydreamer-v3-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 6656, "intermediate_size": 17920, "num_attention_heads": 52, "num_hidden_layers": 40, "vocab_size": 32000}, "yongzx/pythia-160m-sft-hh": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 768, "intermediate_size": 3072, "num_attention_heads": 12, "num_hidden_layers": 12, "vocab_size": 50304}, "h2oai/h2ogpt-16k-codellama-34b-python": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "nedima68/author_articles_GPT2_textgen_TR": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 52001}, "IronChef/MascotAI_Open_LLaMA_FINAL": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "unionai/pythia-1B-deduped-wikipedia-8bit": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2560, "intermediate_size": 10240, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 50432}, "Chirayu/nl2cql": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32100}, "TheBloke/Nous-Puffin-70B-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "TheBloke/Llama-2-70B-Orca-200k-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 28672, "num_attention_heads": 64, "num_hidden_layers": 80, "vocab_size": 32000}, "TheBloke/Llama-2-70B-chat-GGUF": {}, "sartmis1/CodeLlama-34b-instruct-openapi": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "flax-community/bengali-t5-base": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32103}, "csebuetnlp/mT5_m2o_hindi_crossSum": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "huggingtweets/fabrizioromano": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "yshen99/ZhiGuoLiZheng-GPT2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 21128}, "malalejandra/putinspeaks": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "Intel/fid_flan_t5_base_nq": {"architectures": ["FusionInDecoderForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "sjrhuschlee/flan-t5-base-mnli": {"architectures": ["T5ForSequenceClassification"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "TheBloke/Codegen25-7B-mono-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 51200}, "frank098/starcoder-vyatta": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49152}, "Xenova/llama2.c-stories42M": {"architectures": ["LlamaForCausalLM"], "hidden_size": 512, "intermediate_size": 1376, "num_attention_heads": 8, "num_hidden_layers": 8, "vocab_size": 32000}, "flozi00/Llama-2-13b-german-assistant-v5": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 37632}, "Andrei-Alex/Fine-Tuned-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/vicuna-7B-1.1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "sharpbai/alpaca-7b-merged": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}, "Clakmann/t5-base-Clakmann-thesis": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "RuterNorway/Llama-2-13b-chat-norwegian-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "osieosie/bloom-560m-4bit": {"architectures": ["BloomForCausalLM"], "hidden_size": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 250880}, "paulowoicho/t5-podcast-summarisation": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "liujch1998/rainier-large": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "gsdas/qct5": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "nicholasKluge/Aira-Instruct-1B5": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1600, "n_head": 25, "n_inner": null, "n_layer": 48, "vocab_size": 50259}, "kajdun/iubaris-13b-v3_GGML": {}, "csebuetnlp/mT5_m2o_english_crossSum": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "dehio/german-qg-t5-quad": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32101}, "emil2000/dialogpt-for-french-language": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "khalidsaifullaah/bengali-lyricist-gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "thinhda/chatbot": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50257}, "Finnish-NLP/llama-7b-finnish": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 64256}, "ehartford/WizardLM-7B-V1.0-Uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/WizardCoder-Guanaco-15B-V1.0-GPTQ": {"architectures": ["GPTBigCodeForCausalLM"], "n_embd": 6144, "n_head": 48, "n_inner": 24576, "n_layer": 40, "vocab_size": 49153}, "DUOMO-Lab/TransGPT-v0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 49954}, "TheBloke/Platypus2-70B-Instruct-GGUF": {}, "lmqg/t5-large-squad-qg-ae": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32101}, "rubentito/hivt5-base-mpdocvqa": {"architectures": ["HiVT5"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "cosimoiaia/Loquace-70m": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "metamyth/jennyNew": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "AlexWortega/LLama2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "totally-not-an-llm/AlpacaCielo2-7b-8k": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/orca_mini_v3_7B-GGML": {}, "zjunlp/knowlm-13b-base-v1.0": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ticoAg/gpt2-tigerbot-pt-zh": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "akshat3492/mT5": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "TheBloke/Falcon-180B-Chat-GGUF": {}, "unicamp-dl/mt5-base-mmarco-v2": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "malteos/gpt2-wechsel-german-ds-meg": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": 3072, "n_layer": 12, "vocab_size": 50304}, "phpaiola/ptt5-base-summ-temario": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "mesolitica/finetune-translation-t5-super-tiny-standard-bahasa-cased": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 256, "num_heads": 6, "num_layers": 2, "vocab_size": 32100}, "ademfatnassi/bonjourGPT-small": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "pr1me/llama2_13b_eros_instruct": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "Xenova/llama2.c-stories15M": {"architectures": ["LlamaForCausalLM"], "hidden_size": 288, "intermediate_size": 768, "num_attention_heads": 6, "num_hidden_layers": 6, "vocab_size": 32000}, "sekarmulyani/gpt2-ulasan-beauty-products-gen": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "akhooli/gpt2-small-arabic-poetry": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50257}, "mrm8488/spanish-t5-small-sqac-for-qa": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32103}, "flozi00/falcon-7b-german-assistant-v2": {"architectures": ["RWForCausalLM"], "hidden_size": 4544, "n_head": 71, "n_layer": 32, "vocab_size": 65040}, "TheBloke/llama-2-13B-chat-limarp-v2-merged-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ticoAg/gpt2-tiger-sft-zh": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "thiagomf/Llama-2-7b-hf-sharded-bf16-1GB": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "THUMT/mGPT": {"architectures": ["GPT2LMHeadModel"], "vocab_size": 250100, "n_embd": 1024, "n_layer": 24, "n_head": 16, "n_inner": 4096}, "lmqg/flan-t5-base-squad-qg": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32101}, "h2oai/h2ogpt-gm-oasst1-en-2048-open-llama-7b-preview-700bt": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "TheBloke/Phind-CodeLlama-34B-Python-v1-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 8192, "intermediate_size": 22016, "num_attention_heads": 64, "num_hidden_layers": 48, "vocab_size": 32000}, "arogov/llama2_13b_chat_uncensored": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "ai-forever/mGPT-1.3B-bulgarian": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2048, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 100000}, "davesoma/SageBeluga13": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "pssubitha/llama-2-7b-sales-force-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "PyaeSoneK/pythia_70m_legalQA": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 512, "intermediate_size": 2048, "num_attention_heads": 8, "num_hidden_layers": 6, "vocab_size": 50304}, "hidude562/OpenMusenet-2.1-L": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1024, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50259}, "abeiler/huggingface-goatLora-goatV9-testData-morePushes": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "abinayam/gpt-2-tamil": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "persiannlp/mt5-base-parsinlu-squad-reading-comprehension": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 250112}, "pierreguillou/t5-base-qa-squad-v1.1-portuguese": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "lchaloupsky/czech-gpt2-oscar": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_layer": 12, "vocab_size": 50257}, "OpenHust/viet-gpt2": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "tiansz/ChatYuan-7B-merge": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "voidful/llama-v2-unit-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 8195}, "taaredikahan23/Llama-2-7b-chat-finetune": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "deutsche-telekom/mt5-small-sum-de-en-v1": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250100}, "hetpandya/t5-small-tapaco": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "sunhao666/chi-sum2": {"architectures": ["T5Model"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 21228}, "smartik/mt5-small-finetuned-gec-0.2": {"architectures": ["MT5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 250112}, "PORTULAN/gervasio-ptbr-base": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 8, "num_hidden_layers": 16, "vocab_size": 50304}, "clibrain/Llama-2-13b-ft-instruct-es-gptq-4bit": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "soketlabs/bhasha-7b-2k-hi": {"architectures": ["MPTForCausalLM"], "d_model": 4096, "vocab_size": 61772}, "codefuse-ai/CodeFuse-13B": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 5120, "intermediate_size": 20480, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 100831}, "Sentdex/GPyT": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 52000}, "it5/it5-large-news-summarization": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2816, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32103}, "FredZhang7/distilgpt2-stable-diffusion": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 6, "vocab_size": 50257}, "Rostlab/ProstT5_fp16": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 16384, "d_model": 1024, "num_heads": 32, "num_layers": 24, "vocab_size": 150}, "approach0/mathy-vicuna-13B-FFT": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "lighteternal/gpt2-finetuned-greek": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "stanford-crfm/battlestar-gpt2-small-x49": {"architectures": ["GPT2LMHeadModel"], "n_embd": 768, "n_head": 12, "n_inner": null, "n_layer": 12, "vocab_size": 50257}, "stacked-summaries/flan-t5-small-stacked-samsum-1024": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 1024, "d_model": 512, "num_heads": 6, "num_layers": 8, "vocab_size": 32128}, "TigerResearch/tigerbot-7b-base-v1": {"architectures": ["BloomForCausalLM"], "hidden_size": 4096, "n_head": 32, "n_inner": null, "n_layer": 30, "vocab_size": 250680}, "Chang-Su/llama-2-13b-chat-ko": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 39478}, "Clakmann/t5-base-Clakmann-thesis-epoch10": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "yekaraoglann/results": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 512, "num_heads": 8, "num_layers": 6, "vocab_size": 32128}, "bitadin/gpt-4-medium-titles-v2-flan-t5-base-llm-6": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 2048, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "google/t5_11b_trueteacher_and_anli": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 65536, "d_model": 1024, "num_heads": 128, "num_layers": 24, "vocab_size": 32128}, "TaylorAI/Flash-Llama-30M": {"architectures": ["LlamaForCausalLM"], "hidden_size": 384, "intermediate_size": 1024, "num_attention_heads": 12, "num_hidden_layers": 4, "vocab_size": 32000}, "flax-community/t5-base-wikisplit": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 3072, "d_model": 768, "num_heads": 12, "num_layers": 12, "vocab_size": 32128}, "razent/SciFive-large-Pubmed_PMC": {"architectures": ["T5ForConditionalGeneration"], "d_ff": 4096, "d_model": 1024, "num_heads": 16, "num_layers": 24, "vocab_size": 32128}, "inkoziev/rugpt_chitchat": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1536, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50258}, "lomahony/eleuther-pythia410m-hh-sft": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 1024, "intermediate_size": 4096, "num_attention_heads": 16, "num_hidden_layers": 24, "vocab_size": 50304}, "TheBloke/Vicuna-13B-v1.3-German-GPTQ": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "emozilla/LLongMA-2-13b-storysummarizer": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32032}, "yongzx/pythia-1b-sft-hh": {"architectures": ["GPTNeoXForCausalLM"], "hidden_size": 2048, "intermediate_size": 8192, "num_attention_heads": 8, "num_hidden_layers": 16, "vocab_size": 50304}, "TheBloke/airoboros-13b-gpt4-1.4-SuperHOT-8K-fp16": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "heegyu/llama-2-ko-7b-chat": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 46336}, "flozi00/Llama-2-7b-german-assistant-v3-4bit-autogptq": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "zarakiquemparte/zararp-l2-7b": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32000}, "Sao10K/Stheno-1.3-L2-13B": {"architectures": ["LlamaForCausalLM"], "hidden_size": 5120, "intermediate_size": 13824, "num_attention_heads": 40, "num_hidden_layers": 40, "vocab_size": 32000}, "TsinghuaAI/CPM-Generate": {"architectures": ["GPT2LMHeadModel"], "n_embd": 2560, "n_head": 32, "n_inner": null, "n_layer": 32, "vocab_size": 30000}, "AlexWortega/instruct_rugptlarge": {"architectures": ["GPT2LMHeadModel"], "n_embd": 1536, "n_head": 16, "n_inner": null, "n_layer": 24, "vocab_size": 50263}, "tatsu-lab/alpaca-7b-wdiff": {"architectures": ["LlamaForCausalLM"], "hidden_size": 4096, "intermediate_size": 11008, "num_attention_heads": 32, "num_hidden_layers": 32, "vocab_size": 32001}} \ No newline at end of file diff --git a/public/favicon.ico b/public/favicon.ico index a11777cc471a4344702741ab1c8a588998b1311a..f5e820a695166ce68172f80ebe9485953b8bbbf1 100644 GIT binary patch literal 15406 zcmeHuXLMFqmZfIR{OFk(_Dh>Q!`fbfCSx9TzTK;JAoOmoDKlhq+$9Y_9t4#`PPxa^(t6 zocO^!f9={eT)NtcGZ!!8_^C5^W5+)1K6n&2u3bmx<*T^ZaSZ`sZEb#Vp=6UqG!N+Y9^-7D8#PqJMr;fw&LwK{)8u&K7rZIv#@pZ z7Tmgd)5!d%x8FhHv_!;4#~>yu8gVhPNKHyXRe2?qWJrmwuKFCbVz?@mLk)DzUze#@Zo#=;} zifWuWbH>R3>Z`B9eY`s+jB|sBr$65lgfh3WALF4T%24nB_s^Tj~z4m`}C7f5JQ~> z1qQ;)(;L$>r<=Xb;J#_@h$&O?>{HJe9r$wp#*Q9?nFU2S_SIKr|99=&i5b*)cu0uR zUu{JtnriD&QCf~%>b;4!vta%L6!c89BL_o}GijqGD85R%6cWxmdh#37&fLX>53Z18&lOW&fAyQ+xO9!QQ=larDR$ z9OJsSfBG5z`t5i8?FzQ<`V3zk`vy1YFE?)7FhkF3X#cNq=zHrs>z==N)&2Jj3je^D ziZiwxejJKV_xq=HN>{p`{_*r3d2iji1r5a=#gwyW&zf=m{Q2(qqZrgRE?(@8Mbwvc zrlI)L^QstS4Lzs#{`~XL=BmFn4(>m6*Yo;La!Fs(rzJ;w`#BQ}^;t3TBIDYnibFBvX(UPJrUYdG0)8OP3a;J}Y(vE$nxvEhpoXnAuBUj67Ze8@PV^>lJv=e-(N zFJ9p~J8=G72QHky$iIJy@9k&Xar@@aW`C`9YR-d)kD{}C-s{W_$~A?r?LpOBdl2=^ zW`r$z69M(B;F140oKjlh7*&b6aL`#hdv4ti|iVk~Z3NMEl;!Sr0rWK1Y#Oev+WS1|S~ zhV1!lHzcd<%9{TQ^jpl=wu%GZrgv#WF4`Hk!ZrSur}1abG!1ai_eb%*@}y zSX86@Ro{p?EwfB+(|qN{WhiI7sw}H8-_9QeHhy_ zQ`6B{*U+U4?nf)-o1U2se_wwSy9e~`kJN-D=KER9(N*~R_}3;bYW=docKT#Wa;nj9 z)YNE9@|pyH<|E}MS0@)lFlWX@MpL2G0hClPt z7}trEC4hQ~M=AAPOj{Z}a4@EZg=54}y*n7gUB@AZIYqWOyQLM%aRWHN4{^%CeghF2 znTYU+6nMIOV!_-t9HFn>zJ2>n?7d2SR>B+=GC2$nKh%fo_rYj;d*;3=uygdqkioXJ z^>nlnuViOs!Y45dnfm%OpL3VqnPq16WjO6FZ(b@i(J?E7r*l8i|Sgd;>Ae zb|^8|IHM~suF-?*${-Gy$i1j2uOOb73^&*D#4&sy^(^ca795In-V;S%6*h1p){(F1 zw+3Q{)Wj6H`G&xLtP2u|llJY|d)NG0e@k-<0w#y>{Xu4Le7cN|cgyaI+^l+m?txZFC zJDPg68#avZE`#fMH)GqSMJ0T9qmeT%HUZ&b5eVfz2%m-0ZY7WA*E=Mq;)&knPTad= z+xENWmkhI7S}=5|9r`}n*W72#A59GD?c+<^N``z~dUbMeH1W6Vo+iH2jW!U1h^aBu zOFs8F-eC2LvU0P2?UC(3TeFvnZQDNn_*e7G{_KX2Fgna+UXjkE|0vGy?Bq zo6}5Ah&KDw$a@-U8xS24jnTxfk&$u4p+&^P&cx0m4W^dOXiq0Io+&2Dr#{-c^;h%H zoim3x+rh*Z`RR1#mjc?-B%g^GPTP_U4SbjEU%oxWb_n8#?L#MrGFQaHi8V|lW2bCH zHN_Op6UDU(d%L)}@;!dWSLGLf*|OzV^S8~Lhq1Jc-#*gU+#AcBGj_~a&OMy_843^j zz?|k*6H`VEw_{8UWlb`fwl>bh$?(uANMj6FjEZI5_RQ1IB9lJpG}aj-hDjGe=*PWP z{@TpiNcqRIJ^9~)wl>y8?igx23{O7sBqmMrW&9tHQOy0Z%so-`w}z@Zq$j1&mZsnZ z;tD56XA}3dUd3b8cWra$WATDT*s^I09JnWneTttGJ$+1lqZqz%!v|)5&1;QkpLqsR z;Ztcl(-;ey&DsM40x@LBa7>*VfvQsF_a@Y_#{3=StKnWqw~7U-U4#dee_sFNYsP;h zlk_=-dKpZgRZNh6B;)()crUF1_V&|iKygr)%2wX`k$~>Ip2}ad=x_)5jK!cmG{=JT4S(L zHtR9PW^)gJ*XNn^i`vQ>=HOx+J$(4i{1y&-?T>#%`OIQur)OZgMn)!bv$KhtG-nl> z>BF;|n$fJd*c}@cBbDn_$E%K8&G~0c&oOu>kC;dPQ(Q39)ZoHuTKmGb1qR#7{eoUf7?MzL@v*HSXG6v}SiKIazpD$@MCuaH=@kUH2(&O~!t^DFmBhK(CGpo#OBG7lH#<`J{bB)%-A&)0B` zRR-fNU$FuSj3tTmfh3N^#AGDXrV@$EvKaSr^9nJ4{vz(>a;*6M3aop1J>GczO}zWg zd-#w(_sJ)pn)*k%r?>x|C2z8pHn@J>I%4&g@XGr2ctajCnp1yU>$m)0*Q=}>uX8`F z->iGBXRi~ppJ7e%6YH%@w# z_O0i4|H$8Zw`xDhpmCi;&;KLyONQRVlI>pK_mofb+>aOdukyWk5>m7_Ssec!p2X6v zcv97)Mn1kUZj9 zEF05RHh1jUah(0>EVK)Kk0g$!v-3K2(zRxBOP9#u^p;Kf(07{4nz!dYhNnMAo{Y~*b`5K}b%_Q(2ct?L&(b*)FgpFVvWN51|VN5B3nj(zhz{`&pj_}y8<_jHo``T5rE zuG)e%<>d?1E%)OB^=XFqDD9K%Pk(dGJ+Jy({HZu9t^I!TA2@IbKc15OU9n5@TR7^@ z^}4VX>wD_-8uNs{SA5s?0(E!j>+kXQUp~RowQpk0n;Y=aN89kuyYF?yb;j~r*KgwI z8?-|{f0e#w_JUlN_$&>JQxmskxGrAX$#+S1+4ePYY~4JQrFToehmRb^$y2BA@(W9D zabFiH?|H86#Ko)l`Yd^@AJ1Xi_ouM&_zApz_*<;n^A(=naTu)|K11doHektyT{v~- zCwzC}`>s6|A4fZq+#32@LowqFWz#VHptTOluJyFjZqA)KYxYeXnI*qvAJ+BA7hh8T zpC~``R<}+if7kjtQTDF~Q1`_;B-9tX}&vHnC@PnR>ZF?r_V8#2N3skCMeJP}sH% zQwo|fDX|P*G5K(tnuT$}NpPCPTFafa{P;lFkMqMQmx)M8V?TrSjd0hO)%+)8f#>Y z?c!`Hr{ooHDgHE`S8;f%e{+cM=d{l5;`59SOUdKZ8?LCXTW;x7x>wI>4%elAWixAQ zt*MSfT!eUF!^QG%$uAC8HIkk=`8V#t%`O{~%+?yD|2N-$7a1ATksO~yS!2l$B$Gpk zLiE%qQ%_0f`YirRYlveF7atdAcm(l);$Pe5%%|;D8#xMS50&LrsH&($J>`_Fh6AQd zx@sM|&L%Hw>Nl=UTz3igKzu|6Yni#+r`87k&Ue?b<`PGCFZmT?UU~IRQ-4UW3FLmJ zk&}_kk*t$dn~F16U8r8CuCgD=FPj%X*G$ez_9K2Kh5i>vPQ#a6vG`W;XX1$EM@`hX z_^rmedh=OzUw%%W;hFSq)w>Tr*ax2UuR7M2t&QZD%d3rE#IfB=e)V(K{9!%&nqB;0 zLY#Wkv8<274R7ktzNWY;@qoVM%q;t#7R&y1atdvxl(krc;osFKknIPP_Y(gy(Tlt- zxj=E>;vK~Y%x;<8eI+ll^~g(s#`@59N_4H;yMKwm%8*R5v{q_t_b;n22yyzLI2 zwCqP$$-jEdAB+#iL`AW`o5;TFRJgggQT`yq$@eGEBu>p zM#ZsSZ7_4F|83z1$!!hU&+64{Xn%>OHysrog&_lnQ2z0zk8fvZXLxnV;5?q34P}gq zjO6_Zj5l39UU3NGoyBFUzb$T5{F=DMQRI-t*HqFs#Sh3o6`K@q^!uZI9x?mm$o&#W zpTrm#9G#5`6Z}n&T>Pch{ow~2DgSN5(_6CZD$Mip%j?K*q+?q2H1bcLhF6{F?!zIR`Hc8R>(E9(d63iPE3^P4PXQJnlfwKi+jbCXlCk z^x;R$n#D8qp?x^h#&u3XoY?$sK2B@XnE`Qt!Cb4jQ1$R!T-^=7`ADBW+>id$Lm2s; z7HlSeC;iKYEZME=TW`I^K1dez9fSw|`2qGU{=0S3lLl6)U zf|2B)6@$bdk98Ph&Nd`5Mv4O!m#;H4vIWT??p1!Leuw0@w;O44MiKoZEPN_^CcdUW zsJ*u5`xJ}i=fWh)E7RFan#ewidMpWv@x(N#w7UQ-U9uG0dG_aacm1NAW$9km9XocA z*D7ScBLMDvR{amfGWCs=19T?EnflV167i2g?;fG33sDiS?x8M8+uf(Sms%NOOprY^<+0y^)5xIuo}YM!T?A zITQov---qQ^2#fB$=`DicJgdYIpdp$w;#_{#hHG%bg0~^n5Wn+Y$ksk(0?HNDvU{< zJ|^}k7ps3JURpLI{AGGgja~f?AMW8mKJy5DdH{7P9#{ESy*}Bf@`-RpQ@8xGN%ctj z@_zL$<=@Ji;t|Co+~iQqm;BZmcJ11Q2FB!o$x-Z^4aW@jPPL!PO_D`JpA{dKW8~w5 zDU*5!>fiM5JAl2y0E0DzBh+7NrW}g%X~eSfkI>){)63HNqssDfTfl0J*IcW zXM`{0AL7!7(x>}A@>_E6o_KlfT7x}>KYPpn39&_EV+)cqO4ti^Fuhv!wu~IqqjaL) zm-bqC$d$dDM;YHT(=#C79K?P^f5tlXj+K8c{p*^MmWioRw2|q#<~%^~yf7(W7o!p*re^cguysy$PW zP3PeBKCPj?rq=ki7yC0YjGgi9>*)M|>`K^L{b~8JeAZgGaA{V42g;+W-2T; zcH}w1-T1wFhhEI})vO(4qpD$q6%})&M?ITN|5@?ki}#oR^UpbNK6`Vc%^3t?TIG~< z*1f{)>OpEBr5Bw$&^Zm^6V)bVjJxtLVSU+`MhRn3Z8dQP^&Cr`E5-~~tO<%R@n5bJUce%&*A)W3= zrw?<#qd60)GlGhZ!qYmZ*`N487+Jk2oyQ!{eNq2SHv1s)Q7HE&Bq)sfbwdbaymG5> zm4}ogn`tYYLgAC44jr78^&)rx4{rmQz zsIZXuYaIGAUpz?s;Lf=wP4s76^f&!sVFBU)OxEXv1`HrB2{84ad|(vi)^kH_hr!c3 zkmn(D&Dw<{gkjaE6t>j4TlMvnPZU?fxu1`d^I5*^MaI36#J*kzGusXw#d;(W{Tcg( z!IX=hefpU@@>{Xry6)ejjAu6e)6eb$!}qZg9i^9`K6y>Lq`}JTE=(RG5(CA z|4bnEl>W!Lc_W2bW)k;3k2-E`m}PhxbH&r%j#ZNM9E2)fpdG*9qoKjqJIuf}BD%<4=7vio`(?U+mcViLtNo z^o`KqFk`!lXF87}8(Ki@KcDz{^TthP9~9>m({yf0b)9N>^~EK>Fqmov^=V~4OBOA@ zul&b&7I(q?`8+EZYwWs!v8b$6e(A#6$j`)QoyW1aA7g5!!Wjja)6{Bsk=e9maUn^J zAEm7I9BAj#z3MRGCB@<)3)eZ*jCtx@+%^1e{Fy_hJ++5bZZipm~ zDJw2BXCu`=7q=(=shWANl50p~ER-*XG1sbQP(39Xl|SUSviq0Gb%^J*&J<`LgrmgK zD2IsOQ%$FPgdbJEtMC88`|tlse$`&5PMtFNXJK0#it`JM{?n6^nNxX2$Ip*7p%?2f zKb~DkGdyKEqP#*AZxo;Es%p*W1@qgmn>p6vpXIlb-Qsige;giLycqS=r_RXh zyn^;&CS&Cc)*D)vbX`wbC2uovLkq`zatVe5Nyzi*@bA8goY zY(lwQwk^AsyoG$f)~YzEtA4X|ug|Mitu#Ht`^n$VKH?KgmzwhqC45&Fdksfl&*$kc2QVbIZs`DAbsuyvv9!z)Nvp5|3-u{vWeR@GUS`Iw^ZiXBx3ATzOT5X z9C_4@Vw+?y)o1EN&xxBTqMlX5Du(HtZ&d~NkoLHq{SEb(EZys>yxY@;^nVk6zkIpT zzwDz}cEhta#YNJ`Oj9qZW@@OdHF-j{ULDV&tX;njrCe_!eKd(9xm)HG{wuogL7Umk6&fOfKj=V8vNm(Sjf-oJzWq8Hb$LrwEsOwTPrN_sZd z@NA3Do7}u}rlso)i{*>GuaZ}G(p&$Q{Mv`BSJ($;9R2p2Z}7tpKbV-LzOH)f>iPEE zE4@c^ocf9V#UFpf@e?O-od56Oz!%4Hp8Z|TW!a^k`-*3)4;=myhmU+^MhE-2y=DBj zK3nqZ9%~Jfy{G)<+yK|0`Fh@G-G9IL>zw^%t^Epnqnm)=g#XRdSZo}d4bzb$>~ z8B3nt@^?~>YaG4L8(H`Fe8;`!w)Cyvdg}jP_x@+Tv*fYlx32PEBY!VDv+n?=l zvfpbx)^FCm_qzVQdn|cWv-jj6B)`r(>3Qp(-q(B0*ZVtb{{QH;r=RtHw?11k@p#aG N|NCzZ{D(F0{{ghx{#yV5 literal 3870 zcma);c{J4h9>;%nil|2-o+rCuEF-(I%-F}ijC~o(k~HKAkr0)!FCj~d>`RtpD?8b; zXOC1OD!V*IsqUwzbMF1)-gEDD=A573Z-&G7^LoAC9|WO7Xc0Cx1g^Zu0u_SjAPB3vGa^W|sj)80f#V0@M_CAZTIO(t--xg= z!sii`1giyH7EKL_+Wi0ab<)&E_0KD!3Rp2^HNB*K2@PHCs4PWSA32*-^7d{9nH2_E zmC{C*N*)(vEF1_aMamw2A{ZH5aIDqiabnFdJ|y0%aS|64E$`s2ccV~3lR!u<){eS` z#^Mx6o(iP1Ix%4dv`t@!&Za-K@mTm#vadc{0aWDV*_%EiGK7qMC_(`exc>-$Gb9~W!w_^{*pYRm~G zBN{nA;cm^w$VWg1O^^<6vY`1XCD|s_zv*g*5&V#wv&s#h$xlUilPe4U@I&UXZbL z0)%9Uj&@yd03n;!7do+bfixH^FeZ-Ema}s;DQX2gY+7g0s(9;`8GyvPY1*vxiF&|w z>!vA~GA<~JUqH}d;DfBSi^IT*#lrzXl$fNpq0_T1tA+`A$1?(gLb?e#0>UELvljtQ zK+*74m0jn&)5yk8mLBv;=@}c{t0ztT<v;Avck$S6D`Z)^c0(jiwKhQsn|LDRY&w(Fmi91I7H6S;b0XM{e zXp0~(T@k_r-!jkLwd1_Vre^v$G4|kh4}=Gi?$AaJ)3I+^m|Zyj#*?Kp@w(lQdJZf4 z#|IJW5z+S^e9@(6hW6N~{pj8|NO*>1)E=%?nNUAkmv~OY&ZV;m-%?pQ_11)hAr0oAwILrlsGawpxx4D43J&K=n+p3WLnlDsQ$b(9+4 z?mO^hmV^F8MV{4Lx>(Q=aHhQ1){0d*(e&s%G=i5rq3;t{JC zmgbn5Nkl)t@fPH$v;af26lyhH!k+#}_&aBK4baYPbZy$5aFx4}ka&qxl z$=Rh$W;U)>-=S-0=?7FH9dUAd2(q#4TCAHky!$^~;Dz^j|8_wuKc*YzfdAht@Q&ror?91Dm!N03=4=O!a)I*0q~p0g$Fm$pmr$ zb;wD;STDIi$@M%y1>p&_>%?UP($15gou_ue1u0!4(%81;qcIW8NyxFEvXpiJ|H4wz z*mFT(qVx1FKufG11hByuX%lPk4t#WZ{>8ka2efjY`~;AL6vWyQKpJun2nRiZYDij$ zP>4jQXPaP$UC$yIVgGa)jDV;F0l^n(V=HMRB5)20V7&r$jmk{UUIe zVjKroK}JAbD>B`2cwNQ&GDLx8{pg`7hbA~grk|W6LgiZ`8y`{Iq0i>t!3p2}MS6S+ zO_ruKyAElt)rdS>CtF7j{&6rP-#c=7evGMt7B6`7HG|-(WL`bDUAjyn+k$mx$CH;q2Dz4x;cPP$hW=`pFfLO)!jaCL@V2+F)So3}vg|%O*^T1j>C2lx zsURO-zIJC$^$g2byVbRIo^w>UxK}74^TqUiRR#7s_X$e)$6iYG1(PcW7un-va-S&u zHk9-6Zn&>T==A)lM^D~bk{&rFzCi35>UR!ZjQkdSiNX*-;l4z9j*7|q`TBl~Au`5& z+c)*8?#-tgUR$Zd%Q3bs96w6k7q@#tUn`5rj+r@_sAVVLqco|6O{ILX&U-&-cbVa3 zY?ngHR@%l{;`ri%H*0EhBWrGjv!LE4db?HEWb5mu*t@{kv|XwK8?npOshmzf=vZA@ zVSN9sL~!sn?r(AK)Q7Jk2(|M67Uy3I{eRy z_l&Y@A>;vjkWN5I2xvFFTLX0i+`{qz7C_@bo`ZUzDugfq4+>a3?1v%)O+YTd6@Ul7 zAfLfm=nhZ`)P~&v90$&UcF+yXm9sq!qCx3^9gzIcO|Y(js^Fj)Rvq>nQAHI92ap=P z10A4@prk+AGWCb`2)dQYFuR$|H6iDE8p}9a?#nV2}LBCoCf(Xi2@szia7#gY>b|l!-U`c}@ zLdhvQjc!BdLJvYvzzzngnw51yRYCqh4}$oRCy-z|v3Hc*d|?^Wj=l~18*E~*cR_kU z{XsxM1i{V*4GujHQ3DBpl2w4FgFR48Nma@HPgnyKoIEY-MqmMeY=I<%oG~l!f<+FN z1ZY^;10j4M4#HYXP zw5eJpA_y(>uLQ~OucgxDLuf}fVs272FaMxhn4xnDGIyLXnw>Xsd^J8XhcWIwIoQ9} z%FoSJTAGW(SRGwJwb=@pY7r$uQRK3Zd~XbxU)ts!4XsJrCycrWSI?e!IqwqIR8+Jh zlRjZ`UO1I!BtJR_2~7AbkbSm%XQqxEPkz6BTGWx8e}nQ=w7bZ|eVP4?*Tb!$(R)iC z9)&%bS*u(lXqzitAN)Oo=&Ytn>%Hzjc<5liuPi>zC_nw;Z0AE3Y$Jao_Q90R-gl~5 z_xAb2J%eArrC1CN4G$}-zVvCqF1;H;abAu6G*+PDHSYFx@Tdbfox*uEd3}BUyYY-l zTfEsOqsi#f9^FoLO;ChK<554qkri&Av~SIM*{fEYRE?vH7pTAOmu2pz3X?Wn*!ROX ztd54huAk&mFBemMooL33RV-*1f0Q3_(7hl$<#*|WF9P!;r;4_+X~k~uKEqdzZ$5Al zV63XN@)j$FN#cCD;ek1R#l zv%pGrhB~KWgoCj%GT?%{@@o(AJGt*PG#l3i>lhmb_twKH^EYvacVY-6bsCl5*^~L0 zonm@lk2UvvTKr2RS%}T>^~EYqdL1q4nD%0n&Xqr^cK^`J5W;lRRB^R-O8b&HENO||mo0xaD+S=I8RTlIfVgqN@SXDr2&-)we--K7w= zJVU8?Z+7k9dy;s;^gDkQa`0nz6N{T?(A&Iz)2!DEecLyRa&FI!id#5Z7B*O2=PsR0 zEvc|8{NS^)!d)MDX(97Xw}m&kEO@5jqRaDZ!+%`wYOI<23q|&js`&o4xvjP7D_xv@ z5hEwpsp{HezI9!~6O{~)lLR@oF7?J7i>1|5a~UuoN=q&6N}EJPV_GD`&M*v8Y`^2j zKII*d_@Fi$+i*YEW+Hbzn{iQk~yP z>7N{S4)r*!NwQ`(qcN#8SRQsNK6>{)X12nbF`*7#ecO7I)Q$uZsV+xS4E7aUn+U(K baj7?x%VD!5Cxk2YbYLNVeiXvvpMCWYo=by@ diff --git a/public/index.html b/public/index.html index aa069f2..b4fcf9e 100644 --- a/public/index.html +++ b/public/index.html @@ -9,7 +9,7 @@ name="description" content="Web site created using create-react-app" /> - + - React App + LLM memory check + + diff --git a/public/itsovermeme.png b/public/itsovermeme.png new file mode 100644 index 0000000000000000000000000000000000000000..d1212672b353d743986744a7cfeb928988590b31 GIT binary patch literal 113209 zcmeFYXH-)`*9NKxNN)lH5u^)9OX$+17ekd!kQRCqkS0x$PC^R^p^Eh0dvBo#g7gwV zI#PlN7zBYE-}n2z`~UvFYn`*Q&g9Hlv&+nW_OoYVbhXvUNtj6P+_^)pp{}fd=MEkM zcQ+6b;#w@bo4j#9cfIx16z^1hee?tOfbXE7rEup?Z35|EYXaOe@oRNc+)JRIfA?J; zsM+$JJGX@z$_fVluMSu4clhk4%ia7Cfid>OXjFyr)Arb(l_n?huqCl-zEM3^XrNNW zx5g)?`KEFld}e*tEBpZ%46g1zzYTqSd~Ftg-sO6i`rckv-!5HW49R0|P9ERZUPXmoM9uyE>v8+rV@2UK)H`Xr{>$hTgJ!|Q+c6)_ayha(j&P1>TWc=%l)oR@OgZ_r`Dbjw8c^R=hbOoDE3+P?OCz>$_@53_ckc7TK;-k{;2D6w%T{? z?~a7|EmQX;^SXkUvn#%Q@9p*4ZR3r^E>7;-CfVxapk3(|iLMhk=BeeM(XTfrS@M5I zbeeW=^C*K${s}POd0=t8zj_ky_hvWCTN~Td+*uiXe%iivjlobV{sL8Pc6;0Z_@+j-p2_^*I65~U zJiI?KdETNnzt5OZ|8@5Eay9s^&S{Ih_P)>ct$E|lWqIop--+P;7n}AeKHm;HZq{)9 zmhDGRYny(rg>LSCEoa?HynWd9 zGs0eyft+vyr)n4M$L5a0U^_|rMM!@pPDB5(<0mjfK`o4C<9o>8)iwF^QXQYi7d`je zn!B%$j$Zw}1)(P@G6O4&x=+z^7rm6?zwGpCS-;}Zn~*M~b)6Ne^2=O_Ovuz<<;!2@ zV+e%&75w^$pS;lt7=76`B#ZG};duOK7=zQPBev(hTuuA!_3_%uaR~O{pOg3wu>`uh zbYXFZY>SfryS`$}a+;2b(FwA_&J@cvcFyCh`WG|vQs+op{`w4~`0CeHpJeH6Gvk9ZuI2UZzu{x*bFW>O8Gw^yzdjGe zoqr8(Q=2an3TAH0wGf-=yZ!NaZFwv}2O91stbLZbYZd?J*_mAVEe%@J!`V-#5#`izkj85cWBY# zFN8TL-r?BzXK~BZjCoO|-wnmGl`CKB&=WFdD^E6x0^Eh(ev4aTx4nB-QAA$Al2mbW zHkH=ZARD-~!Gdj3^INcv^0~sado*KL1O4XP<{I0cTKv#lu@m%0Vm2Oj`}=*1d)2UP zn;_dpYI)JO5+V2|Z!YKN-e&)7+ zQygxU2P)C^MHOjk!v*#h^Vl%GRV%wz%B%0wzDO%*;ee zul@PD!hGyMg!$P|d5rN&Yv*rhja**4`CZAz1-sj4HDsqa54BQ0Z26m`uR^B)jhDOJ z^A&<+Trpk3d|Z5T)gE~&yIh`8C-^JBj4p}vo!{(hIjKnU9~H)%@DIfa?J8{@+e?MN4!! zwzvs|q4e*$xAg3tusNx!>HP}nMnW?`mS2fcqPA`bD8$r-UdsJUK+#IuycvGGy!yN8 zC?g8vU_4%5Bknv$3>~naETZ}$VZ`%w)%dad`E^=pt1mzoyN_&@2&dwe`0ZLJ!C~}D zZJ?$~_b#JO~A&wqB6aQ#*n4RmepIuwI+z}Ny#4@aAn*7S9A}RH47bR9JMVPb z6G`&4PEKF6GC`cn^+}q41aa!TQ2HaBS&#Z!K!^G*!~*K{=2d0?Lg0AK#i*4~U2Ejp z-|;n9XL|j*wU8?@KMFsj1)U#)pa;*T7=uLv=|d0)*w9;SrrvZ3mr`!6_7OJZ7`qgF zY38mTJbL7K@?4M`IOki9wnP`$9Kx7UC_H=M_D%tlm)_OkY4&l@ipTYwhgc)2o&sUi z@KDLo!_bq5%Yptv1yc*cMk^v4VG#WVNYp{?!YU439aBhcC1#TdBr9~@?r3rIHi^rLKNp|MNgD?w?{X=h=o0@+^u%x$6=ytzu3c2+@uQ<@bUxVrT>BfEOk<^}OTxf( z&t3oFJb&hh^R6BBC(%U}&vnKHwA{uP&W+Q=W|jV?#fS(L+NEa76|Q;)n&2~{_4MKh zmAIYoH1h11XXjgL9kn!R92TjxAp@p_9tf_$7Ybf5Dvc|=WKRvXa6s+-C) zg~{{;QY4<_IW$D?m@&xh=N>d@(C(VBM0rWO7kap+@TiHCKoU_MV)quA+9LJL+^nbw z!jr$zu{5Lxh?28b=aAMNo+jA=q&RK%+SHuX<-;N%?1hj^6>cscBI@UgkvD#+nOaZ$ zYF;yJO?r#@iUV9;IgpEXyB5LCeeL%|jPg?NzdtJOISAd<{7mz$2W4j*4q{-&#U*lj z4T%B|zPsC2NR9WB(p67tUqQ+t@6caX~Q*EweP&33&EZ*!04&UnR!1p8Okcib9tMa7VN)_K_cz3q1)bR^HPb7I0sBX zB`~MP-rM7)N`RHj(>!MJ7fSQboKJ64j87T$#Gm*Fhm+{~x(!6$c%){Ir%EbK=t)L* zJ&FLo&8B)hq3W0bY^U%={e8|;30KC;bUTO^vsAw?-JTl2@Rxrs2b^4IhnHdCdrS|v zGx~M-iIMq)#emB$Oq>r9;vpe$GZg>U6uu{ux$xX8yZar9~>yCsi7oqvU8-@0)G%sl4J5Plt7`mgqGvw$vl)f>~j=O!MauqDng$Q8ppJ%L7-~S<-W;)2?oD(sg ztnz@mXupJ_r%-~=)Kpw|os(9NdS*iuleEo*#US&3`fQ$>eB4>$8KScsnS5ygmx_dK z@EMI~G$Pl3`H=&k{|<*P@at@lU?gnSh_u`k6G=HeST=`b*+9`1@N7;R{2&^T>B$H2 z7y}5`2A={sl9szB-rb!KEs92i28Ji1oF(!_z9Dvgc`&uZtNUYpSP6!`mC^vkcua@2OQxI2jn!%2m0?En}J4VRu-Q zH+DH$^fDtcfvHTk_G7&66Gxi7`Qq+{CM|X9=r<_YFxfLHbGNw%^IW7x;y!d%YCI0) zryt|YWevBjTr1o#+@|%sm31Ml&4cNDrCX+%SA1f++_*8s${`CVI1qk>0)!54fy$Yp-E+b2;2vV+-$ii>qyftZHlQHi z`wuS`1bt@FOG@LX!l*%Qrc*Kz{U$!B{%uKI)Sy{DIybQKdnfRo& z&VIH(KZ9*Lzo*TLzr5O6DV&yZ2vOlV$2#Ai`2oyI2YWjBX08SmV?*Zn+TYKY@FR8=!pn}AqB~%Gkq7|~phRJ8{V+Ax9 z+QFdLm-j|&W z{sdVewcw#tovHS+??qGhc*K>x8qcp@M;tNaq|a(smn#r&z(aa}7FFxh1!`mRQ@>{E zwNj5HTrtWdwt2r3M$`S0z&QB4HN@cr>o;Fk?OmjV?f@YQ9ibv54RuwWyB(rnLksgeS(YyC_vE^=sc+80ch3pf6QSPdBApJ z&$Ld4ijv}9o$C-%+g?6DM5!&(3C zN<==1InxfC7l{$2;c`C6UtxS3PVC-vJZz40_&!!N1vIHor zAN`CInY(mw?31ZsNj7?CKRr~CxY*sKNiYOCxWQF=clh-n9~&aJ<|TJ)f5-eY!LUuy ziS$~vx~LQlWddsBdCH`j3Ho8Dwib&I1X;-uS$jHLAAzcS_?9m^k3kp#>me$zpd>%69v>y!;k0)HIFo?8P@h>{i=62&bTQQ3JI1vUm z>xMnOwsB?Lo$gER*C`~L42=}YFHBij2lP?lyqG^_-?ev zZSY-YiClX%kw-_>c!rCB-D05bsVk9;G4e~{b6Os1kR2l*CuK~HqbWMFDllbdl|hD- zgKN3oJEVF1E={RCfj7Vp1~leuTx36Dkoc!lqLg{w7#p=QyBJ+I;8%;ep(@ighP2B_ z8;Lj9sEPwKF1zIYh(qsVuTxEKMz7hnX3rhYu%mzgZdC3`Sj~$b8juY{ILp8VASEbh zdF?v>T5FClGS%6-{*zEgE;hTUciOLmLj~V8S|WZ|4tC@bd|(duH(6v9gY(b3u0oSrs? zO~RV=c%1Jqcb@V3sFX!jxf#RPvA11w#~nDUtYA=Rk@jNWp&RR0{gQr=SWcZ=$)Q6RD9aJtFRVWq z;haM1zqiC+MX$W&OE=8)P)Nf*DIH-H)f$fz5E|fqE-%x)v}7IOL<4g_Pjf_w%~HsyYaK=LwLhA z&qPVHcg!HgQ>@(bn|g@HjIV`gydXkT;VG;fHgHi-1#>4GkU&|6#)mObI#Cg^i#3|g znz7LEFRPaoR^yuPEM7*VA<=G+DYDA#>3E<&eLLUfVStp=0G@U4iVrVtoFY*4Y} zov5GLd4AU~!`uUpP2Dx4)B3fmz|Pe^5B`>ixNLglbV`C&Br@hMbgOlSu@*jWkFFv? z&KP4TJ#GHFZ7@VTC5s5kYr=KI_#kfRg_?lXoc6}&kA;h7%XH43NL0^rxNSURWXgOn zN^H|T`#O9@Y%A^UD1xnGkb5CW;#Q*X)OHKT#<_5z>+Q7{JmfTY$9C9$$Uj?L#&YvP z>GNL!<|SQ-M2#FefJ^}aTdl^a6af~f1#;?I{JUU6g#3vVXP?8v1a7%*W{Ab12-9C2 zGK68mIXBZgtx45P?-k*uCkJ3pwoJ!0c@f|}imsc}(~%G_`?Mp~#=W`xZ12`BW6bIJ zOI+_9*yYAFl&8?pZ_1=rj?8T*EZ6F2WXtIJAEmQ6KC6}|)+Gi=4TPxBvNOtPxk;-MI=jh=X7hBY6 z_DAbaFmFGQO*CWm9VNqcj?FtG$~>jq=uy;NJW=hw8C~0Vt(&dsyjnXPn{mPzlASP5 z_jtyKN|`WECNCXw8-q$q+6?&UEoaTnekcix;Z*we=bUixTVF+GDwirv07j0w>3t?W z63PK7|Fdg4LQza~36m4AGLLi2x1xtmp_)=`_SuiWpms$_i1_Ce4onTj*6EA=>*}Y? zmT{$%=r(@k5&NNCj364?ZtFI(Rr0`rgMs+oP<(iq2qw&^a=5^Pg3HzjYHRjsO4tnD z0839#)fnMC2cNiV7CSsw$hu!5#x1X+m@VT@2MEtQKs(z$A80I%iYj1F_MNPUxbhdI z07viHjbpN|02(wxqeL7Z4J3o@{REnH{uYu?fs!;%r_Xhlc-$4mWnKUh@^(9C?N`}+ z`$S=~CsFD~tuTof&WmKIk2giiaplV4W}VK-11mJ(jiAXyO`(ed9Pl6Z>4h) zks$^aw~+)>`#Bz(_Hze^1Pf0o5>Xjh6GeY1(9o7)Gq81jGL?XzDQ|^l+n(k|Y?lf@ zE_Em`vl)*CFY%Za!|XJ#S~8`sOXOo%`V3q@x;>J)(on3s9KRg7$()CrGy2?|(Kp?@3DO}J$^k5f@GsAicD_o3j0)%=%bbS4-4bC4_>gCq@7 zeva`^f)9{TN8Sg~%nu7LRII6#G$b}fNeBujJ2v@LInN)cc$q0j>(#${EtDZ7H&ovo zpv?2cLYhJKSmhhgFR^tzmInZ^(J7synf@z}UE`d$xdF{>a1U~FFo`>Ex-UrdWklcM z5I~g!zpp+)7Vi{|3yY=3rSO@axbKwuSQ|lUVj35p{%UhhCqPzsf|0ZnF|Nnjc5BRM z`MTWR3Bv203od{prvO5Kkp=#f>p2QQYt~|CU^p0aXVd%P zJ$|wdtvDcRTfkGR$%*=Np3AZ`0^P6sugo+K@04c6BYTW}gnisx?t&b+Ha;o3jGl6+ z*UHYKNAk~i43Lq0{NuFpC=F~@59b=3O9lS|U)rvjP23YB1(hCC2N0*$M+;@&w^!l5 z@;s&a94-OAhho>sNOs1hG-9rehK?{QdN!3R=%*j=35M~;Nr*i&U9?cd_&}cs;H_I=scnA9Et$kc1L2BQVChaT}yR9#hs)~u( zPK>us&(~PjL5I{{qX3D82FL)UV}qO#Pq}WU)bxG1k&$E{7uyT6hsH;fjc$#mO0Hoo z_uZH(Ubcr_yae3meCP7GG-KvGo+!P$&%i*-%7nLL`khbDLQ2-2NRdKM(lg{waq7#? z?z!}P;^Qf^rpX!;>XsGkMsV=HPR^5c0`bXBcg0IBOHCP(oxxvb#U-EE5bdhNTNL5o zs>jHN^uW~(?}1K}tyZ%l1@4tJhs_YG7?u2JZ@b_F`T3%)$lPl;~ zK>X?9?>ln>`Bh%;fL87kziwI&2ey|QbP1A>PJ^M&=;A(x79_Ql?Q%vVHR`SxiVO2> zK>N~i0=gHh=CmUpftV_-h6xxla7Wxchj1#hG3@M2?j}sdK4%rokxtdzyE~!$m<^F> zJ3GuNlm>5rKf-f-pca!0&7M!&NOkja`()rxj6};Sa{IADf95E0u`{pwhQCs%;`)hS$d1qd;uhxyUS}4;!Z|CI4Ps#KVxmEh@%gY+I_-W3-QXpRw6|sa)6cI>5PV6h8 zBCIK0$TJI(f~9$Uhy+z>W9s5Z;#nS6ExvUDlgNI$lzgW^`1_w+XTgK%OfOaw$`>UM z`W-m%6_xC@xYOLe!eb3N5Xw~Iab6SVoIkWJ!F50ZI~rsP3;=4C`j2p4vLl=X5!UCe zu#<*i&Z+(u5^gnX@G?F=KIA~@OePMXJ zrk4@;T-D+OR$t*x9`hCmQ?-u616z^M5*amU#5Z2QZAq=qB(_y#X=J|2 z)@HTA`F)RQNMnaVu2oKZX3a}H9~AF0*z6ZbJxjs2J!AvU0iT4Jdeh#`g;E%cqzyJ| zLLWQ>xq}cV(PL}_L~+R$WmM{CjF)J=3^=|@EW5y0tt{cb_7f%tE$-59yyR!~ZgbFN z0qPfE1=?5o#T#HnZnrL;OH`!hgT5D>u@PVl6B|OB3e*|`P1aNSIfF-=S)5)&Fuo!F zrqbLEbbq$8;k$4&gpGdPPM?!u!j2=^DksLa@j~aRUPLA&Qg&XDgQvrq`lNzZ;&k_S zb9b;bb>S!)teC>M$>cJlQRomZ%}bX zGH_kM;ce$}@UeY85se~fVc3e{f922*1(i7S^JOXlKUIL^z-;`yj~ ztg1pjU*)G9#@7&aekx&op`F6`cCAu;{(TnTF0_*W-0kUR##`JT48~GQ^_5~gEk*w; zRPI9?<9`AqgWRY2UU}<1`3{aXj7QQ&S~Gv**QOB}$+vRbSbr^vKY`#x{lET{0FJfJ z2TOg6`j$xkR5El*u9ExX*k`3+n#T^`8b5g|nen&cwK6e3cJfpOF-lIDKmZ?5-(L?3 z#JL2q)LgtE)Aza;!|=9gQ#4jNyADc`=>j0Xl(AyYiybYZ?>2UC*4TJLO@~$MRnlja zggW{`r@V`9^llh_Ii;-R-SNtN;V6=jbe^X*jrrbapj4Y|AmhSOx4{x6=baL{_4e_B zIOTg6B_r{+jHF5@goaCpn9AqmSo|5igGY^H_C|<~^&E$9V_d{~a}1o>Ai`cM-0u6$d*R>HdJ`XKH8%f_@?-`hDWaIR^q0^{pn+yo28+43|QB zXW6?7PtvnI`KtamQENDT_)&9ts$NC>=lb?k7wf$e?olD$B&f`ASB@I$`n&p}!{pG0HN6OYW+MA>0A@r;c(PI@Ul%?e5Z-tbj2D2YQ=*GbILY&!TU z_dY^=SG=}mXe~+V)ljeebJ#fp@@a$N5(w9)s&ir@Uvm=7N_OA_r5eM6b(qdEkrQw6 zq`6c0!C1v`K~ZRgnPg;wVp()&TXjYvMP`R=w0yGp_A*A4O!&kt8{MO&3Ywzp`jMxt-3&gzcvyAB}8}a zdiAV)yCEB7cZ^d*5>LPu4?YZ8yf~Rs%UMz7XeF_lDXAe#trHt=43EfjkkEel@N4AV zB$2d-1biZP(V9?Z>%F8M4fRK2wKE#tQ$^uP%gji8&(sY-Yg=ovtVd?{KkBlT5UUUD zE5!5Q@2!{K17s3wCfn>yzwVZfTDfA=YnMI^_z0acJAO45kVn4L(IQRmRHUoZPFxMd zL>%iXQ_E|L>Q_SICA%xn$j6ewOdo7Q27hVOOt_NIB&_x?eXU|GL!|WVOQaGkJQcG| z#pjFgtN+uK{a-+KpvFylNw2U;pDZh*;W}po*ci1_q_&Z4<+Xd{ou^(@lSzLM`{}-H z$t>#SYM&(agQ3aAUO<8Y54MJ9{zz2!G!8Iz@oXpEc1eWst^1~^Em%vkYF;JTt-v(M zmC+VDDAjGynRmcaXsX6wr##6}#|YGs5OLoT*+3QY1M%0>IbE1cY1(;Res^IzJ(kIS zOjH^Oe^O_YHV`|t+4F9issQE?8-9&+^>be_MQ9YPowMr9$sU($kRVk~85Qw^au(ct zci1Qc)K7zS;e(DIKDIb?>NIxp=+Wz*y8;y7b^?<9?Sn7=?O_XGYr(orj@N3;+=iU; zSL)=v*9so5H$)Fikp9;rx}Y@tM6?-7l(G%8hk&*@Q&Cc0#UE1s@hCiFvWOTURgF%o zav6_YN@V8Bp}oM(3kz_SmGbjY9*Mp??qaHLrak@)w>uAQCIlp6lJ+G|Y4vCnd)^7s z0LYoAR8%rh+$fXJf=fZK={&>}s3f2|rH?Y{MkhJ(t5*=HJg>W;^LPit%Tr~9b`{_^Xrl$bFIR|2QqMfjbjBByW6qa%1B~YV_a(Te0b|E zLW4AI@SCEGUj9!LQSPHPCkav^W4Hs#XHeaYfA(uWUhDnE^^Lv(65!XTNuQbY zM`=@c_zJzqWuq#^IbRTcxOndEpCp_B`M}AcVZ1|aDFWXzqrHwq7=d7vrlfcCpz+k-08V)lwF(3MEa`zNTWIUnyxFdHT_n^DdM z)H;lsTW~ZHIukpeI9=d#%1q5%Zag6yY4iO&hGMEg4WsOXTn_8W8(O&zqsWtHp#9@b ztjXdoh>G1RMBT`szz8f&J=t8C>AxVbbEIIUkj&ikRjPvXdvkG+DZH*XSf!TMdpm=9 z8l@5*&BnVCPerd#9EPBV7BOm&#^?IFb+{k(MrJMRqd z=G){8yE5)DMYruGx_rJ?7e5mRP-)C;N_zj?LW2M)j+v4XK8$%y3b6Gp(k!fv0~NW6 zpT|u+^sCrP8m}jhK3^_USHa;pR1j}+bb;r{DuqQq&o~OjJt9UT`|PM;`s30oA;ABM z99aL{)92-%mIeL}`ZoBy%t)AMr9Ijd4gL8xq$*W^#G=eMuYiJ>f4KI7=N$h$d*yFk z*j)z(D*kx^``%f&XJp@HD({`)~^cS+cjoIX|v0JAk4Qm=ADv2HYYYPp> z=q#Ykq0Z9}t8J0PY(HsaeeaiX&UZH&HUZh6>t%(5y_z`+hGVv#RZWj;%b59rN@eDf z)L0hdPa*UmCjq73;l?KYV=tQ`p01j`dz2HfgLHGFk7xTqfLl&GQJqBAHFs-w7JJm2 zfFZH!BmxYxM&?41bN(&@UYh5|?jt5Et_a%yUD{Wn$?uI4={A#3;2es`V0HWI8IZ@*wS&2PF^n@*cxgf^2!C#d}xdq6XWz zV!m{!2nNK3pDwWKcDVHnC~)NVC6^=~j!Zg$mlhnMsh7M#(Vtyl3{QI<{|AwABhbNQ z)ws#)+T(I_pJ#T9bzs@eYRwDR;K6th3t0UXJugq zvVqA?_|%xd#JeP#guy+wvN&!8WP+@&!j4ldsAS-SQ?f?z5q?G`gS4R4#E^`reFr6n z{0NhSh%HBdc0vnl%3KnhN+=F3c}`^g`fJN1{MZ2nq23Ek?0-;m}s<3%lEEtj7pG0{T(#Go$(NowNjkj49iP z69l72YR!9>ZIOR-P(PA+TWp;2%pXBEf5pdeUzj@_cE(8%7G8@66~ZCkKm1 zjw9?$gpA4m8#sf9IfdPwqltgoLl^`1jk@=a8Xi^!h{jbrz&Sk(TbDi zj3(S5-U7VZx{vA+1H1=8P*gzjHwnUUvLSj?X-^mCu`Thl@j%c9KIAV^07Uc6Z zZDd^bDah2_J&(y;kGpgxK0|q*Q##sIg$BoQrZBV7lhkJ}5ARQ&IuzJ#$9|}1nY&GR z!QCp$WjB1$Xb+FYxyTa8-dpHp{rlRc_Q%_8Jlm*BhwU757bs4r|LVUIZG;Zl3g%D;%mEt5>kf&qOEU8Wkt{u5!kJRx>$2anHETAu;v`FF?sxW0O+sv@ zg^BJT1C%{d(D*IAI8i#}WsnqQ>Xv9`rClu0Qtbb96|C!*#gl)h#mpg<^Ic*qR>NkL z{u>oP=CZ7RX!z+xJKYA5bPSZ4*-n=SbWh6@O=OUB&T@4Z`&EP-*D_5S9D}!}y1HKr z5l87)Dkp&WX(+j4=J)4^@>bJA1txo-2Xq0M z0v=7w)PLK}@PX2gJ1mgR)LYj>zetm-zxU9vij&j!G|kxEl7q&Rei6PuMv$ok=&=I* z&$w9;IObEChwjzQ))Vmxd* zlGf;?caT1l%5Tso;k6OB+VQg_kf<)Pb;u3&eQF>m81PT1;dddb?*@bPgGx2c^;YdQ zxED!W@aDUdHRdQ{8;WjQFN#;ra`9;n8pz{b)`c6YH_%M=xNKC3L*q<1j2Zts!*Iyuz&houn}5%*`)Wl?;lX#EMOz{PW%}1x#m0ia zTA%mF^4VC>zrR*A`Lmm$v*Y}4$XdE`&xs}De7((Il+?t>zi~fr(CGnUT&Yy0?iN#O z@Lgu>M|_uxAA>YVsq~K=<>WDhaa3(a9xdmMlLJ9fzEhekyh${S~cUQc73`p zR`CGYu$Z*|wNi`q`&<*Th=hv(S<#38tb*t+cXul&gS`WzNE5w59JxSfApLQm91uAQ?Qz)JP;-4P)`1BbF|3`*u~&{J)Z3-tMa>&KfePF&Kant7xTmeMC2a>yBvGpSPK?FT=8g{?Qj($9&QfmFhApR*#z&m%Y zp>j?W@&r_>GbZcqhWB18#X&hwLdTCw5Z_$wKRLC#2dMR_Ni@MlyFKd5*NHE7k#=0WZZHB{eZqemg52xJh)QH(J9v9K|r z@Ce4Qv^XG2NH$_i^{NV!E!A(nB-s$F7uCtFxm0(yxmX;S!C#vt+6e(DPrVX2syAer&ADr% zPqrdO=XS}h@wFds{NDQg04pKD5kVF(7af-@6Ng{7AxF)t$-7a!MOz1ab7bx$&Y5g7 zqnc{aXgFK?ycRT(4IdLQX9JL3PVUA}@nl^Zs7eebl2nPN{;}4M=4klFgcFhpBzQyB z>v?RG1uv~^xx}%OvjGkCdsl{o{KL@P=SyI~P;`!i5xlbIl@Rf@wiZLJJrFaAxRc{ z%%d>^iteg7)*xz3R1ks&7GGSao0(M8|gQ4O?U z()5$9N7j*suDMG=Wa)#5ySd8LOBVI%u?J^=880^z3`bLcg0sPN{m!y;EYD&PIP(zn1Kn1 zmW@j9f4JG3x6N8!Sqa%5v8C%l3kkfNiZWzKtlW~gGMV4Ql2UC^Hz(Bx#b1WhIv z4LO%J|NIy(2H^ikI_^tSYP&V~sqi!}oxww>!>q|?Bu2Xhp3&Mf|CjGzLrgzs~(ChRsM`Skc3lty{#^bHS1o~7@5Z^@vk!PWO zcd0}GWV@}I`G}Uh$_Jw}uY2bujjv8|B@t&1D%fQUXI`He3+{+x)4AzdJZ@PW2jcjP zLx6csy~r$=t==J=&YEf?b};XXztMB>6F>*i)_GmswT@|`u1|V+D!AIR@nk4lsdwVP zIe`*)@q3U_9YJaoFo%{xm7mw1_63&(|4cH`z>Fz+;7qu~=71*#!T;$x609%JWvZ_G zt=L@POh(IJ4psrP+-DN=+-nCh}hRc!( zDPg5FGMhcOY-mN%lq!WoEFJH)Jl4AbHx=bx^myRa{(CU2hbwGBHKyoE$LiaXExSp<YcxQcEwb@#H_*Ic zXf+#dP3EzqKT^q2h#QV}QEr08;XJTd&o`T_pB34^!=x|ov1b`sMwF3JV8A%w`0voc zWugK$gs8qndEzw<$m#D$m0zWe^dJYjn^La4W`0g=TtUOjcRb*bCPK2#kQ93s*8Ca% zb-cL*4Ru_Jb0=LKdFHzCd;)dRNEm?Gh;^wiwhUjbA?LGSfIL0>$}AUi{X?Q@!Ng{I zpf>k)AFql@&qrxdk; zbZbx|MqlBmG;dq5niiKh)JmIj=iB@tBAl@iS+{&7jdsYuiZxo_-X zCEfdBLzcL-t~W`yNL@ZvT?}u_WA9YW39^8pyZh;mUVqY_hJi0vQ%iFM2`gLG17gO9 z{~#khRKEl+OnZvpE8W?g1SZHWm#Webw03&<)@L@te{y;kuPtz7YQJJSML0N?M;w2! z{x2xIOwtH8BluAMsd{jeRC@WxuGDP-xESQt$>7xhHLFVBo-_GvGS| zW$cbOYK7{Lyf;og^rVqOmRiJg=S| zv5}vo!8JMnrlgj%)xV8(O6l1)T}sr_C(c_tqf51nAXPSmHI6r4Sa12V_&QnqpK*AF zE?@dKh7)0c6OUWBZXQWsoasLa>yg^+EeR~Adeecl5m`SvlEATc=7gY9Dw%n?X3_1K zUy*OLKx?p<_*qZKPxEa`v64Cr-Y5VE`1oR!eWV}lK zWO@NYdYd#{*ifdGII?}t0p`1>CxlH}Py!VuiAorMH+Gp##)6GPg zzeI`1`|H3pPmOb{L9V4B-*cr2-xPbcwY?pqj}G75-E)O}PZcQN>U#TlSF?{J2I$y< zIdu8kGF~Js4>2uO-z$4A6&5E$X)0#kE00CuiYTbrp~3N=r4+2M^v5GGxM#LNV&t3ZnHn#@7w^`!Gu3vUrDeAsoel(Q z6lBclC1qgsHC1Y{&n&Y^o(X_r@I^{MZowacO2iohV)AhAo8*kD2KvE=caZ}N|Hf{| zhCnsLyBD1QANJnzE6VTv(3E2l>8*-U z7EYGbf2HaI=Wmq|av1or88J2>_3n9UqRO8zu}1g*_b_v0dK2{x5r(&BH~l84bYHO^ z_e!8t-7|X9aa-O`K$-dQaxaCIA-<0islmeB$-)Ev_OFC$(T;mPdjE_)ugT?sW-(Ko z)Z~&dUYu%x(C;N9M~a5t@09XYebY7b-IMhB{xqKRE{-UA-LZS$hT+$_tdfO@U;NLz zZ6UYirk+H<+RHWEktu>r3;eW-D`b;1B zzIfz%eEn3x?HY35@17zyDH3(4CINf zt8h-+WevaMxP5?K%saPG7Tu#SorJnM>WN^5KkKt1!1FhYVf>SExyD*ABaSa%3*i^A z5Maw340C6VH;u;AHA}U^4~upsC1JKZX3tL>wZn2X6022IW(Yfx`1(WpM;a6o6wHA9 zqH%ahLb`(|8Ivk(-}ozHdin-UrF?3Si9M(eWYkS)?0?2%?=}EFx&d3bR<>n{i~i5 z^-M2iIIHeUW3;5bz&oIAP#K6Dl5VEQ#h5&9Gj6*qJ?XlF$2*pb%s(HR{kAdpG35?% zTgeS3Nx19hC&#k)!$yH6>D`;MoYd68X}6^Bknu0RoxyD{YfS0B6}A(I{r;lZ#Fee` zo8J7~fvl`M@{1LPx9NTYgZMf}@|bU)i7lB)_)|1tRYZ(oLJ|_nD>PZ@gWW=*bn8G4 zT{)5GTv*k^BUkXeu!=-}dq1~`2qxQ2=2qMuC2`oNcwW#qrlr~v+{L*csug;sU>vz+ z^!2FXr@L+z6J4mWMtU+6gI!vXmu^J)n&8ZJ#({oFFN%*qAaoasGQs>rrsHX?3iXwe z*5H%*i-+)yr=@Y|uC1apdag)sGi=ieQ82e>EI~-_|MSZ;+@iCYr4QDojl#-vWR800 z+-snNQ^POj^2>3M)kmpqV?xo=vcVI9niTKP_rXYmtJz=#Sxe*(K`J@8UB{zUk5E<9 z>;=I)IZy#h{}{+aQ@jZKCYPXy8GtYYr-FCRuX;0oEjP%h}Fr$L-&a! z?v^&r?Xp=%x#&@7fh8R0MF|^)ewrtG#PKn3PN;#5M{npPq&UHh>(0cdub+o337yFW z8k@U=We7Pp+9(AFu>DyQBm81;A|td2DLvsqShE3M9zDrzs}|_9lzr|rlBZW1lw;_g z^oUpT11s;h3-eLmf8~AAV62|G((q>10H3$cVp1)Y!dIZm*JBcs9Y7=lG9a8Lz74JCj!CtX^BrtRyu@Q+Dx^8$EDKaHk zto~os%XAqg34)@CGmA?+v!#+0eoch#{OtK^0@K}ze1+^m7PWXaW6f!tuDO==PkNu{ zyyxD$-Vh0NpH#oBoQ-HZH|6h_;4EVO{rswhG7+IxMXA6xV|z40Vg20eMV>N8sdT3k z=w@`-@!urgrNYe7{)bf+@v(t@o51o*@}H=R>K6vwvi<*&+zOw4GEVfMDzr;0B@G%H zx?en!cXa$I8_cu0rY3zJ5-o!_MCY@w%8>YP&mSf=8sb z*6X&CUHa*FL|dC%v@jc#cs0VyHCacLT{`~*wn&UfVsPC^jyN)U3QWT?NVai^L-ej%=hdJ>=F15T{Cvs9fXuiyYHOH zfo{t90SghtmJcdbF{bsn`{Raw4w+bKC%THFyIG6c$?F@nFe5*{|8R^^NtYM&_?bQt z_H)qOTX!f_7U?256GLoQ)IX%hZy%s`t!w>*P_-v=<(Cv!2$>$`WJspqmy_hRhxw&i zyxebkQoK|Hw54ai(x>5(E2zm53KZ>A3iPgp*2WR$K8YGh?_p8QN#OgQfE!>%NiH?j z<^8pRaUGi(VmXhuFvc|GR{G{1F56u->Fo#hcU(0Y>MD;XOcRWp%Tk)LA2Y7bcAX~A zMXdj@iJXfNS^Yl!!x`?mCGqTT&5V$fa4PSZQ${{MqPF)#;b&{CH&o!zkC)I*;^h(( ziR25}_e!LU)D|+lDA0eon!~$O@YQt+^>9#wMP~X=bktBHzSMrW*N*Lwf}2*4S30Xx zBrBPgm;Xz!c=CoZ{9*Trzny%g|4eiH5T7{rSB%`lnX~Gy`<1d5`~`Qwv1h5j+x>N| zv&KHx73c6|#v?8wA*c^hA3@2U>FsFl)DOaMlg6q7){My6NYvn`4Z#>D|SAl>Zty+<-Olw7;Ra=?Io9ckNu!+z9NKED8c1( zqD;nJu3=%dh!zUb)c_(vC5NtpTIzkB)PQly)WuYlLf!~%_&3MA%n!AdPgu=@MF$$x z#?lv@%Ysv^t{LM<91l{CK6Ao<-)EW>qG+E~7_m*^FKM;|v%M#}K0&F(x;RW@Up*l4 zlWfA8*ugf98djr{T0hv!WVJI6B5a<>goud_fi`BDfl_1_8@1W8Y#BUxYE0opX@9N) z^gER&!a?7v^P})nu6d8gZN^`p@Z6T~*yl>z`I$j*3qnyoUp6A&rB?1U?Jmc$Ses)^Jinubn=#fqY zGn0#1pOs%^eI1dx)$@IzbK;0+l$kH28E8(3U!owPHc(volKu5{H5j2S9P5^u$SroT^;J?Dv)>(1|Zxmck#T(dj#@0S1TJkB-7(?;K5_0Z z5`x}B&1GKD`mz7Cz&!G(WMe%jmVHg^j{o{@ zvhfg`&a`}<4zIhTU`>TJa%etVL61Ud-4E10{Hs-4Hp`0hP6KNm^zkoRR2CmSW@(Ep z&D`)3l44%N)wJkkx`yrwMSV|tk;)cNLWM)1XZZPC*`cIhRE$j==y2vx2 zQxbbp?sT=oRU;Y2V&5cQWbAlg##lS5=F>blKW*Ke3pb&m;u`$Xa2R8L;6%*2`$LYU z*IyZRR@$7JYCk7CM9cml(HmCsN383Z0*#l-blatiLqs@*9|4S~FWD*;% zS=)RebmH!$vc<$X>Q*>0;*GggTvay(>_dTjSdYmIu6tv6KQcV4pHj1W^n-rx!D;as zg}SW23_{EX>ap_(pdZwb;P8f!H%fJg^)5cqW+)> zk23qb+as9B^ti|MGU^13_+HHy6Y}r0Zew?{=50kLaBC=_uP04~ z0v*-ryzvJd5g)(7B}y%~UriLKVMqThRABx^A8%1#kfS@Q+*kisINnIBW;TU2!^fuZWHaDb`Pr#xW%64*JF3Xwg*y9}cP}TSa7Gb}&n722K;=+Q> z5s%#~;nlrgj)vq*3OUiYO<`R~p?qA!=&s>!K@(S6+Q`N`{!R>6B5Q`rd4$$nl{`OG3#Bn}I z%5uD?SpmL5E0E|wQKD1*5v`Y#P+EzzVrJDMN#7#!Kx`cxe)= z+~bL;Y9udIGae`9&fCyf;rLU+?iWVKD0Mq2w*MU)=*%rzr{EfitzteMj)Kw3gc9J* za1g1CJYtv0M!7_LKS3HSo3EON;$gjiJmRtVj0&_pK>*2bdBwM9wM_gwA66=0zob+m4Soev!?<^tdLGt}%5-GnoM3i925RvMg-2`is z94qAjQX_x$tXyGwRA*I-uvJH@n0_O+_!oEN<$s}k`c=(GCzW|lvvyK)XTBZ^cu7h1 z3JN9>4d@-=s^0-uuC*e^$DIJ;=I70fLBSS^K4?-u6RVwKj<^RTs&(FSK3%-7LPP#) zFvan|Zi&TINhG}dW>;Umr^Mb8Tx}!^d?9(^1YMVE7-auX_Th2A1a|bbQW=*qnNG7F z%A3czyM>aE8ZRvTFN7(QdXy){K65$uHslSezjb6FeH-4|K63C*YHHKK3O3SfNiW4XoRlszAC*OU~6hlZ2F7 zg!Sgxq3e_8r1hTLX0WOTrp*{7sc7N`pU*iON7R^d|E zNrdgoz5ZOod$|z^$7J?adVVdsNaGcJu9rHCG8-lG=ptzq)(c{b~qak>g6} z&`BHbpu?lq*Yw5FeK%3^Puf*E2ZKcD$`e|@5*q5%>IES!xNN0)%}JawJ^Gg+ z30#4OY^LEOCA~~*S>#UCcGu9?ZmWDLkpGAY5h3~(_$E|Tri$x!*^p4vWR~}Dk&y(| z5rYGcC@Z;u#~0JRb)k-*M*P)`pAe~_fruHzR^dawNto{%tewDP!&PlQqG?Rzg~;Wlo|a7wi`(gP)WO5Q03!5N8-mldJ1-xc*j z67PnHGuleNd5#)kH|q%DsM3#+Wobp3ES>9{kas7DJ^gCDb`>kT&%NcmYJ*N~uf9c9 z?k83x#9LxrLiv;dp_sf->Z%!;Yq%m$>a+c*``TYPpNT zRb2Z+1IPy2>ppV+{rJ4}#-S0q>p=7*9NWKKGcok?7{N0_v#h|;ct+%nj$|@?+pxB) zQP>y4T(8uqRm?y^J=fEYFsUKowwmY+l7BzhS8_;}kvtR=NNby-$2C**%~Zq#AurIw zwcSZsfau9qzjX+CMRk@VphI$kqX+8n)%xWV`w8!HgVD&cnGe$+7;bcN<-1R`F4#$n zWgm;&K2ta@C|VgQ)f5)p5Q|$rgWyt6`7H?D*jVo0d>6E&`I#Yf>{pv$XRX6NqjX5s z{40f!?|E?9J5uAD$Mf;tZqY*b7A{_`5@w+t9E=^Sx~)|yj;?ze(2FJQSLvmE!d?#o z__dTBE4m89yz3e>g{GgNoKoLB##^Jx>b_-~={>`7|BW-KSw1j(x%%B(*-u!(p^r;% z%DUS)URg1TSOFgAXiYemMURVn5Qh9S!Eg}nz(TS0Jo#3$FOMOQT2NoE@RnMbk-tnG z=ej<5Jj=Z_KF+x z^*W*rXECZ_NeWQBWElB4uZtiyt3Y3RHUppW)vmL-+{A(Obv<5@dO-JcZiP%6?ZaIn z;t{-CnY}B`+t1j#y|ty78bgwVM~KLB=uk5H z|f^3W28N`1F73rq%Ac?!*VF8$sxdT3j=}vSYzG8}&D|yCwEVEj`QsS+8!}pkJdN z5LBCwSaR`FgDhUklY3kul~3=jeg6av+VIFe{cE>b*SA4MAw5*vsKG;#`@rvW67`tP zU#UE{tP}H&8l1Clc zs{!BA{%)N8{#$$`P24n-|G)EK{ulIiW4c2l5%*TCEOQC2GH=jQ zi_w+qKpCk=ERg!FO4LPi@abgz?bTs1(6aHkac3dF5j7Ll6!l)cwQTayl8&U~3(4Yl zU&S^vx?aIzrtf)l2p#nOooz?y*EA!dUMWLnZD36z%d-);=oxQiAC zz2EzSOHx=$5MC8A;+XtnU(Pa#=z!+p%HS%9V(Wg7{>>kA`acnBlz7jX9ryzx*j@Ke zTJB;?-BIxS;Yoc_x>3GkY+ol%^bAn}!SYQZ zE&qecD7aN@i9&d6iGKL|o^{ub?DBHLp;;RGu!+lEG?dMZ@=6_Z3JIS8FrDVf~I}9lEcYxvN z|6hy$|1%H*id#4PH&^ZHj*N;|a|PTON`gKPLwrYFPZ@UF5|B&3dTxJ`V&?1M{qEF1wA>iU#i$#MFJMC@VECButYld!>~?o zP~KR7q_0<{jX7lvnGP`jwHf?JU&kTziP+N3T>F&?8lzKOdIdh)&E9--U&_XFiW)<7 z7z~T)f3g1Yt@bU|i6eL>&AAy-%el9Z^wd&p`_1LZjlxiRoPF<2n*{re4dU;3|% zz0dUp_=$>q7i%?Tb5{XY7<9y71o(10Hd&EzTshz3pr?9sv}c?r<>ReBoEB)!n`2FX z{AIb@L=BL4Df?F%M@P{<$2sY_BL=ye7uV?aB{;fyX|L^S&)IY7e6rrSc)$H@$l2rh z#q3lQ5bx!bo@%>X=k>(Mo6o|5yz}n=H25z!rI%9n+kudN9s2KJ`{m$iV_Ao!mcT;! z1wkPOf{_SXoI4V=*UF=%7`PyYr8mY) zRP51vOnz6V7^;BxD)rKII+(xtr$jz!4qi@O=E&342$VIiw-LINysg6f`Y3a$_19hA z-{!~J*GJx9EH1e+ddJk9bxton&} z?%6H~W}baNHNM$|$LdT)9mC`#?~LSScxI;E2fwu=8+6^^Ypc9yKk;2+jeuqZ9h2g$ zSuB2rj(ReL|M`fTXu4IrwZ)?hF%FoXdTf<;Coe;Y`Q*Nb$b zo2FXJ*nP&OnPOH9+LcCQp6v;hzf-plVUYcckt7fYqf$*6weE%R=a(y!gw2q^5je_? z`pkPkCu0=*%RbRpOoO90X2ADqneO`Kzez~%KDo!Ek}E{fzaO6zD7j-^TJwFWKYk0C zx$yr9`U5F>cMMFx6R5h^H-pa{Tfm5B{}cL0H}p!*E}?vHF6wVi7JZMEe8x6t}>6dOp(%&Xe<6G_lvHGOPQ@yJ5GKm3-}+ zOH&Sdy_nk}mka?IC_(dTl(>_P_sXNpX($76$Pb236NK{e1j}-lPu2)2TYwt=Bt~?2 z>!J8=1qy5tM+~f>4DO4m-(hsjoBbb(70fxeUp2u`XY%Jfm*8{gi~V-ri|fhyHh0fr zlZ7IHs&I62OGQ2OY>@MQKD9g>tK#$qJu-w!ez$o3U*)(oayJNyF)-n>eQUyU&)@An z>Q%90uO%|d;)C#nFcSN7K|WzaM7WX!&R?>wQz;=%i)o}rR89{#f2>m&)FH2SFgD6?GU`ju6+!VSP;+qV7634 zzzd^>+zjm3;=*yB$expHeCk^a@}Ze};kV+#zgBv%|C~Jc?4LI`Pu7-Am;tuIF3>l( ze`^42nF&y+FPk(wDKgkAb43)tHE26TH2F!n-Ap7`e%SyvOr==li?;#ftV*Qi%z8!Dzm)k-qCY%u(8T3BC0QtY#} zXk)ghmiu0e*PV!n33Zw1e|_N9x~CHnwy4~`fx{i*l8sBb<3T!ID6_3P1oV$wd5j&9j>Y(F{zlJe#5 z?T4K0qqEMJJ482Zhg&smZ$xJ0~5r~b}00;_WRc=1M-yh*FsgM;O8r;WWR zkp|6xJoy&cVR$JpP6M@>TjvU$}_RYj&oH;4lJ^{>$rqM8ASxU zwLjsWXc6POYqJQD3&w?zgfZrRR)(;XA7aKcp#)PdOOD?i2iVQBxlIMC={zwznS4JoWy?^LIN>K_=+KHh)=XL+^raIm!iB(2attKd+e97osckW zbD~zeK04wMJV=BBZn*sQ3h%WA`a%1j2c=h|yjP>cQ}i*{`lZ)pXQ@-9jmKxeFq&-$ zB-x@4ThWWnlB^>}Rho_MCq&uh^09_GGpGdX+$z&waM^#}^%y8qrrhykgD6+74CKkG zILSO_u6?89CTXxCP#fC`73C5UBtIfjqWOsp=``SZ6+rM)=DZ<+-Ii1?Yc zi^#|#XlM?-Ibc=T2WZRXH6RLuNSYyG3jYQ7EupBYgvWh=lLlvHm%l#8|5>a*kUh!j ze*(|sv!nKXF0ZrGu5r`^L6jl3w|rt>jdTbeekiOa{y_L{pu7u`n`vxoNzWxii7e77 z+%HV%ks%t483Zok3u7=3=thrku@!^6_DR)b1@A7gU#=6olHAtHr@Z&duM5?zaUmx$ z#%w4;WqCV}jEWFCPk~V-{@vyXB8D&q1U_XhO(N{~1&j=iq=AG$6N2~><3ji#9~8D@ zS|C1s^NO&#*k338CRc4s+d^{kHyaajv1o0c~k99P8Q9G6=odMQtM(G~MP=-5#gPbjydoW@C);A(M6NSa)fE3iyg zP8zM8vLBA6zw@X{dzR@&JbI&PKF1r4Cm}vEE~v!O9lV{YGG2-Eg+=u>OiVeb<@@{8 z`;>o$i3N@>ggAqt)*;py--r~bFNt=2S8kAjFinL->V8S3PTet{&mI*qod{937IYKU z`N|eGuE@c7*uZ}asYxi(6e-darK-0p(q6u?2?=R~ASMzKe}-}z(J&6lF&GEY`!)0D zTjDS8C#bB)oVy?dcCD8=wgg6MnZ!sdz?A<<6&2K=(E0I{dR4E zNC#VGehb=IrCg$yyUjW5QP2nOM1Jg@)2zYpPLc7L&Pd#B;p1krT82-x3&xTUM#kOt z<5g6U6}Mdw1Gu5Zx*WMY zL_RO?3Ev)|o1w?nj^Sc1+Kyw_fCyMg!aEz z&Hz@JlHvjqN5XQqpR``zA+1?Q9jbJyT~=Ti!sPzo9r(J#3Dy{f*uk0O;I^Lz=MT^7 z8NRUX7=&tDm`$vc+_%5896;X{XRMs%(MecKRjHd`&K58|&}LlNrS-fNFAL-EoP*!= zl5ZO+e>wpTqPs;%poAb{W%V|hq3iyJK)8rSc!}HT6ZJzP73k$4N$q+!B!#)U+nO$= zRjbUZTQ>bVA;?ua=3QL}8)h2= zM*}w-+bUKG=}YkrE6y9XQRm7RCBN3D`@`6yJl@92QQSvsG^hdEbq8k#N+Aa^sNNC` zb7e=Ya1g1c2Km@BgzBqEG9Y?s+|*fOg_=R88_C=+V`aRxnoi6cT1jqG}Lplq7n< z1SG6l=M-T^m5A{H4w`6F{RI_Ed6tGzZH=jS)88IX64xh?$Khy{>Qeq!S>xjJfl+8a zFVC073kuG9$0_i0aKDr*y0XGPTFn*_^~DU1`|Ox3&CbtQEM|Z^wI+LQcw%GLqYA&f ze`XptMkm+mN`G{S@RstNIevdlBjNK<-6Er{KE;Dt5=RxJ;B0!u>Rqiz(a-V}@Vr z$CzeEmN@3mX(cDzYWmyFyBLz25)dM1I&XUwqy^)=SPx8V<#{kq0n@;(d{P&|Z&_My z2#h4whym8fyn2FkZ8Y(|WB`O$Z7yxt8OA@?uZNkI|25BrofY{Iy|t9U5(Jl?JMh}peo zkv8Qs2c(WifG&;4^vk^j+T>qR&LloB$NY7Db^(_w`H=_HjuvOPE(C!wn|9!rhRT#K zlaa7)k;GwUfrsb^^WYs1%zs!+K9s)vlZf2odgTN~ns+rT=l;rD8&$lm5{dUtjk=yv zyeBpi2c}$@`(AaL-J;NG9T4Eg5^;Fy;_P@Rzk5jnA#M}0r|5cOeJFCafY{yP^FKqQ z|A--86|bjuer{#hYEYnOWi+jn>sw-~HccvITmcfC(8u5!lPGK}SQnr%-9Vpc!O6Ub zU0cyF`uCUr+*9P2Bv}8|70@_qW!`2h<9+LYF_%}Ut3qAa&<3oSKA0$14TCWk3Wv$M zIg1Y|b`I%e944ga&%EI;qTYIv8q-&!Pzym2w>a22ICbsewiaL(20-c7(Wrb0nHj_wUiRz$}o!JLTo?BjijFO-|q^S|yb zfb%l*s-<{bQ=n#yY{KCMp^Bw;iBs%A8+cmyxVDlIax^bwpiJL7RwaKM3jU!sEHfAr zs&I+OvLxY!SXsZ(d7V~UnvshYgbBe*U)MbC2Vd30t*G8YCB()g;P|t&CsV(xiWuxv z{#HAEs^ zxXUAB{V_`NblM4Io*eUM*f_Wq<9BD)WvuQ6lDW@pG?P!$3KU=QHKAcqLlfGk`kEn(HXZ90Likoq@YvXB?=Ax*> zZVgwJ(oyWY$H}w*f4{-6vZN`Fn7Aueo0%2??Z{H$pn&^5F#Xa<*sa`Cn_jnDYP!@@ z?5Za#$Z;_z`fIA8N61*2p;u@Tpz3UsmvmK@Y3CO=K3P3cwq-~FyM8b*rk`03aVPHM z*Y3Ss>dH70>Y`l1xQXMOKe}maF$Wr|Ca3y#NGn=?L|lJEJMI1V-}!sr0>oT=69_hI z5#P=MWNwT^C5!G#cGJZhBx$%FgKZb&>cY8EVv1|M0m5!`@uFduP+|%S^X=gnKk$jQ zilw^wu+#b+sz?_bU%Gc|v3uXo{;~9S%+XCKz@L1>8OuCJU0Ac_DyxezfE^ZK+gTON z#zkq6@Sf(0v=&-7bL4m87kJR2>GfRu`0|mZ#yMq-2`j$L2`+HzO_k>h8^ZNlpufOd zs2+_WbLGK&wskV?@;9~iiBxd^8P%tl*VV9ZRqnb42iPnpp}!e_0$6KGrhvQip8Fuk zgb%e>o5MNeb17=B-HQ<*mOnD*z!F&3@OhQpZf?)hEE#voN4dn9JMV+4no63#Saug5s;&*fn7xI-cxSJ z^px{LT`y|30N4FiwA0MuN^p9HrIQx>#LK;2*kr$GC=zVW&jgsm)n_%4^Z_Ji7BO%_ z?pNOn8B_|=ZZ0AN80u<%ofZehD344F`IilLx3wmJ@w+>k3#27-Y0g~$Is za2)&&Ox7{Id%)RyZmzDnq47M8hFBoEZh=aceJqv3S#BMw*nKe+o;ttAviGzH)1F-# zF^SA4I#kYJlyB2ezO5uPC>B{CWzKt((#}XA9t6{n9UKiPBm_<&5G{ z&eznqJHuh@iB%fIEO_oHY)N;7jd%Hjja98F*&Q%Xrx4*J+#AA=VP2QL(d;^}ti zn%{`;_0a+*ae|r|@$ncmZ9OS#*w_xZrS*@bx_n$_Fo{J6p;IeY)wb9OdiIxn z4S7h%{5t-`Lrkhgb=UhiowmzlhvoK;G#gkqIaAUW~$k${mpO{JGbWkOQZIMTS2svZq8rI{P2 zQXq$FOv4OD4fS0x581H>Ud-)~)ohjPp`}Z8G=M0UQc5B4Dq(M%}&_J7(zQK*}^{2 zz?;fwxP#{adwz|V6|x>vU#MTBQFeAGv9yC>99S@;_u$?tQme6xTKP8Mc=j5@m3dGY zuI?U%Lh}5)Cn+yx1?qNQMNNAFD&3~NOM*e;fSxhUItFd2PrHoan~EQq*P$^*0uMWl zFiaXdA{spLCzD93LxP(>1Pd6|jy#pNRoeakrOXMur@u8}NSF)e-g^^q&P)`#EmnP2 z#jtq~vaB9Yl>4}QULN$;AF|B5{mV06PJy&92vYYA)sNSGF7~G8 z7cJs4LGsDH95{K#T;zu+|8 z$E3sE{BuV)2>w}AA2P81|M@cs4;Ipx+DA-*g2~wYTr|kJH|0c$6-^|Pa5e&8w`O;| zOEnJ}IrW{A0Rpc_|6|}`(yKb`L-k6M+uC~$&diM@Vo6_)0BkqQ@i>8a<$6N_8izU9 zBOv6s^+iQM4N!3jnlXi@q1K_0!eGzc%3cr5)P`}o1VOvM6`fGBIi`;I?scjedF&~<_V%y;I& zfkj~w#I-P%2%03*5U_*ZrZxs>;nEVRDqo2#HMC72pWN@}QvI}gUH8))QxZrR7`W7j zcLZr|cn+UG{Pdz3tj@z{OqA4-%UO1BQW4%Jr**E(2}E}X8kZ=Z@|!cn+S*n-3fqSu z!*HtF5G%4A&L9_D!<2pjXxu|rSD1p3_)bZ66DT6JiVLqF%ivu`5i#kn0^p{`Vb_8Q#0cpvKtlJu_%N zSyeyL;x?+@Sa0pDgiupW^{w_3L>DBexQfbbV$g}jnzm753QD333L}U|E9ch$@I0aM z(aCOM&;a}O~=6dDu=s~X$VkvD)Qo!!_PuBD%c z**P8{nx)ZF78$;|+BRsP6_5kO;3UuJci%0IM!gEBARQ8w57G>N?( zxsp3EB3BwBe{*x*PIn$rv-h)07jRlP9Z~xj)bxTns6!;4UHu7`ZkfoJIQlL)*K$1u zK;((0tUSO@!!U6y3OhON4h2aOi1d%rQ5X<?WL>(!(H=X#CVlVyR#k5a(S zHb-QH&@cB8@;hb!LYfh$?Y)_#+jEiYXsMaM*At@#J4?rvjmpTkF&|xJhtH+fV&wjz zj2A#{vy z20Y5@njvcc=XMscGEOSBV$4q*9aMN3?zg=M+0mO1C)L+KK14%FDrxNC0x5MqrNR_F zL6tfRBRj7ZD-$p_)mRolnbA%9(xPyv?{k5exTjM>_c zBdBD)&+q=@X|q+Tsn;^sxNNLjuXcuxI8I`-lpcq(z0Zqd3m||xH2qfKZK`%Lc#qPc zJe*kS?ta%=ew}Zw8d}sIYKx@zc6J(pRJff6tETS8thIm5f9_Af?c? zc=PM49H91YxW6In$d|s_(vK$2%GF}wXT|>H4({^^E%`VOZFlC0{8e1HrXU#ON?F?5 zZ}be$uz$uJs8Bm8FsUlb@5V@S@P1-@Da%sEc*La9*XKSO>dHHD$#vGh6hQ5jZk#z0 zb7l9>*!Q%?@#C6$`)-q17ze~#1iprRhP#Tsy|OKyi;z6hqnop$Pncf}e*FHmc)tqG zy2;{UqWs^YOSk7aUo#M`GpqH1_vt-)ecz z%`MgmUDW~Jb&U?}bk@FHQJ!cBk}P~5%JQkuoDF8=QXz+qt!pa9Mjja8B8Nh^@kj8^%3j(}PKvWA|>9 zLYWw&avAKQ)PX;a~GWcHC*?|?TXvcJw-~8 zjD1ztHkFd2zS4$9l;Z?A1xo)q2N3v00ckNHo}L8VBZ8kG>!3U0W}E~k-%5+Ep)KZ- z-4yVR_Fj2$=vQHP$8@M%C?9<|cXAwwqUv9dkca%%l`mSY?0q~(48a5}Z#8|-$L6Yi z0mytjE;FtAGR&YQ8(#kBi9he7{z(j-lTrBrD_=`0MDUbD2Tkf z-X6gk>Ph@3;r<1tX(C-&6!>^MA7lC{{VHQRt}x9Kg>jgBQCvu_(sF>3v~*hqCM?V3 z0cwF&^Rs=)pP`j@*kYt{u26S*EJ_!f%)v51vs#+PYcl?bfuWj=@eV6_a+wRclwzv4 zP2fT_Tfl(B^EhllJU-X#HY}Ro8V#9{B^xI@zUBqxHtL_c0RUF3I}Ey1I@CMfs;~TR z=xOXGQZ0KE#;ibESpqqsyi+`PoQ}@`+|p+VP?L3@?_^=`TnNLRY_a|SKbf<6}YusuA zX|$;Xs0zb6VaL(;*l*GBUCVzB8z|5~V8M852Nw$rkwH(4s5CrX8C_~Wtu3!D_aV#p zV*VwwP{kCH(mCD+xwmLSNs*gy9`OPu(LJaw6b;B&c^dtTN?hGzF2Oo^L=P(w^EvDH zls+#wCu~nRoDXe$Rg}SFwl>D&NfH6tukx2Q2Waq48{Y9{gN#bWHZbQ9io211UnJ<6 z68N0-oPJ{ReUZ=vj!+60mxq+dz4@2|*!vhxrZ}oXCW87?YaX}c#ADfI1*UG{_ighN zDpgO>H2oI?25qC*45NLmY(9uvN!Wh77aTGJ`&M%t0_y6gVpqMiTh6{WXy2uk1t!=# zI?#Yp4qgDk?8T1#8gXT=1B}mXuowfchY+grc=EU$BwBDcb};{Ws@=WO`MIKlkLSFW zJN(6@8&1C|NefA}5-m70}IFyXYa22LSeU~Ro z3pk98{*c=!(%9W?3oFP3X-uKo!&?Mi&lnT2E3qDPBwn~FHXkak-!5M(kxItPGDW?* z+!p6((!$fb%QhxU@9>^2p{fPbht=Y!?+OF%XEA7*V$a@Ifsoq`G6hR&lk5;K=p7ZWbQ76MF1GVJQ#m5f_5tLluqkO1$I))GY8%G)r<~Zl6eCqP~4y z1|q4TralILZknv6^R6mmqT$F~H_M~8MY={V#3Rv)cpwkKN@qn4|k@EVokK%8Yz}-jRN-4Gkwy7h%uT16L zEQ{OE9OM%@255N{qc}}lm=y+Wp?%DO+n8pZfBie1Ey9c_r zes|?dofLBGA7VOF#6$1$Kf8BcHpn1XgBpUP^`L8gIKt3J%)zQApKxUrern5KezuIV zEs7eIu=5xw+E7&d5hFBWPGd#6CWT{)^+5_2It^(sT{hh>o1loD_J{UyDRB&gR;k2i zx!26=q=gB0HNYR#yElG1T!pp5%ptgUrvyuel%eZ~HIRgH>ntj8-*E-OU{Uz7>iPT; zT2!n(``Nut;4;<+9w@U`4k4wL-^*VfdJpAG<@jAzmJ8Izg~N!2Y?~?JXp^Cy2{ zkvIolBN8lI2_L?^>ciXyB|uv{9}DZAa?Hnrf8OnL_SYd_LC@P@qG1*fWJSJkI!ZoH zd#|~g=nEuv{$@oP@m>3aDGrh`TB544W+F17D_1|CiQ10okomE;NA0FLWo5*hc5W(n zS}2mgD%Q}|KgptBJ(?ywXSFHm{~_tTK2R(XM*8NU1dzVc0&tv_=yEiN6=uYa(T~_jf<$+G7Kspe0bOx}N^#p_=Hm?dbpo zx|Rue>_S6{7$=<9_msT9j=nuO1n1aBR{N$&_3XGj0|$1&T_j>rglHl@R;LQ>JBgc4 z{Y_ZM_L-<-{I9G;q7*ymu9f_RZjLLt!uR{HXv57c`lLg#4HA8|XhgR}6%mIw#yL5K z>CCI%VAUxi@UM!d((sn}8;ZBQBw!sz>H3>>bk}#U?saX9X1#DAN!Ba8hC-BP zY6oksP9vi^5J?zlX(e=@IfO$#tHZDVGciK^5tVN?PPzR`oE8%FLHXpYswpeO;Dn2c# zhI!{bh~>D&AX=M!U1ukvHn-(S&e_#WT@Ry!Ew%PZ5e9K1DFfmpw4ej{!@d+|px zm#L5%rUP43ylH}aC~gt+$13Q2WbPlUydMio8V{S=oHgoezHS~&$h~;w?=^X9vnJ4W znNgWHX_@1a+n(bp2k@B>1iV{ zoU60NXfSfQ7C1Q{W@Ve&pGDw0H1V?ZkSH@D7Yve~BYi2^@RkVVTI7uyR6)G;1A$y7 z+-D1XU16+ILo*h5?Tn&!otO`9PO-IF4JwjORy|?WS*^QhH6!x)#lauf{e#1fe|!J- z1&%fXG`=le6rZaJ;*!f$heV1f^jO8lyk&Xl$})}m^ap$loPY>KtLj-woKHq4?`7n4 zNj%GCGQ;)uN%_}k75Ab&SIBP$o0@FEz2qWvFEE2jX z!GTfyLAu6O65O7w#2p8`^^R>tOc!U_&&{=u{sPpM6W zc(F_y{EBOT@aUgchj8g9aR_zdd&ZJC}OdkIjic{pgVI3JFRbRzoEwh(<+m|Tos0WbQ{zs+R48)H$-A!z zANcpnmha3rZT!!PjiGPtohbI|9TmoLzKbS&DZ6-MsJVuN+bzD`FE6g2TC>87?48bE z`^rk!{b#us&NuB|59$fINL^1hu3eew*tguBzxuce`&G*crO8)e7B$4VL4~xrq_N%{ z%tLkMWX`w@N4|A;M>lb0DLI@qT(k0~poVZ$5_FjZyGe>fY=JjzfJNt*vSmuS#x%XfMC1if7C*WcJ;NSR>Eo6icyE=M3i z@LU(L5TQJ7(OJ8NkAJ=b{f}*v>IWu94|q(6Sb7FTFS86x_aGJZf zIaI!fox6Ha{{6&|)Rh0g>V1RAvFH-(B{>j%kruaL4l6Nb9C^!)E~?8%Bq87Ra#rEp zit3GC2>`cqLn}q5my-Dc`mFyiwKu)pyqe^;%(N19^rhozewA}6PX6e?RA>C)_{_3I zx>5tqL$}Ii+slC-?78Qw|0u6?J!fMN5Yv5W86KFeG~{gIR&i}ZuaBMDh6FgkCi=1y zvWZNFYjV|KRcN~iDRaSrQKGYatdAH+U zCw&8ZK_ufm$OgDYj@7ZdSR*28*_guZEU0V3Adtsllch-2LxnNvSrAtJLXPNfGK4WY zpJu8z5AO7$J>6#=VtOB%w7#tDdE1`t@c+Uls$~|jCskg={rB(T==NY zZHu%m{V#QtBshy9F7h`EYQ|z zx4Z9(=LrMKy*`T#2PwCcw(bzpBM>p6{xAv%f;b@hBW_-V4EM;NtW> z?qD~hS%G50DT04#Fi=n?0npo6iNf<+BDC)}C|OH?E|}B}qxB3RvRX_`1GGV7%=OYsAI;-F$RePF<5>%XouGBTD?XRrjgO6dv z1c6g;-r54~*;QoWo=MuWQ3&sCyI;-bDay~=-(ET_$?~?X3>7i0HrBH}vW_$?pPKv@ zxAx2O(ZM6M%hG~P$**>Bfb{Yv8Dj@klA`9Retl~HRd2uXc0TTW04KIKiBH}QjZEF1 zJMUm);X8ho)6+m!bVU4(9N4_sz{96KQn(V|*w?1tMo<=1$W=;;w+b=)*PAVLszxet zN6H~a8AimmiH7QtIQ|It>!`;BDptM2o_}VbNwU@--=HkT!MDpiLXP@3uKxiB_dIWd zULp}wS#O~s)@SSUQEa|anWd@8kppu}&V%&SNNl}T_*1ip&=qwTs+U||{qr4NxK(&+ z!4G>Io&UTT)Vp@FP&@4Qr`}|YU?5N#CyfWG{{G7PLA5g%#_efIw8Ri~S>$SCQXD=S z9cQZn=T($K*LcL4Q7s9T%W?^9y#pH>*3YssWIEke zg;%Y-@0W-fZVk)J>Wy3@(IyPy#LIchL(P_XK+4IcJg7Cnx1mZWx=G`NV@tNfP%^R( z9@*=x_qXirsknO^rmVDk;1%?=Gi+3dXGppF>_$Oz6;-YE5Hd?{iMte^Y5c5B|HbX; zq0+OwcoQ#>^TdJb!<#4%`ao7l!TnRDhia`(2iV5slQ*xajXHb@zm+fN6-&4#B4rRF z6V(PxC;z*;QzBN}jpn|DKFk_}~Kvjs;Ejy;txZS}wLYq>Es)aUiS z|Euk0XF?#>(N%^M({%5B?+#={L9~Dgao1Uz`^`F;u}Wn7*1B%8 zc-US7@4ACU;y65!26fiH7g@{PMtgE!j<*@73m3=Ke@dr`%V+XG?GW+DN+FWR>0_f2 zvJY7vLrGfq&HFQNig>A&w3Th98unN58z1HBzpFdZa>`jtM(?5u0v7~=ztrsQ$Ii;I zW3zE_Vzm-aQGY*mow+!#&y9PavqBuxQ9hBI?E-o;mx1Mw<(YkJ@=xhqeE|@*i5c5dF7D-GUy!~Zs?0bEZ9X|80#WoT**><)n8M4Uklqn~cni&A@ zEd<)Z~Y~BE>6v`^D#wjSq9P)7$3kZ=SvVbh2yWR`i5fTzPOx zi=NQ3Pus`q-)0ipvWTGTtdxz#6+Lcly8fn=*rjmUGeOAv^`p-%w%`@+@rw%e6B}d zey~V~uZU7e37k=pOc{zTECAG8eo7 zPBdntyCC~5Z|=!R$Vt;Uos2|3jP++~U)*2pT|aBeaEBnhGjKPZ73GY~9#EGs#a)+= zyHic=bq}m zrIk>ybuI1WUV|m8JLiT|A8zQEmRR2zvG~I7@aJ)2fu0fqkN)m_%`i%|m-^fN?dFZ& zYyPez=TmoJf%D$4WPR`))ZByW%~> zdQPpSqb?tO_qo7!dXlLkYBJnP+Ojs8)3CUas&eY#$?k$TIWS@0(8YT`ed30+5^IoC zj&d+-nq#&ap~B~165okIF$`)6ycXUr2~+to0<-p$PfZK9g~vt$@iV*36wJfk*wgP- zM@)KgJeqmNVu8n0D-~jsUZ>{W-o%@Y62?=TW{Qjx2Xd$f5#q^0F2>b=ZkXzClAKDMP~3ZY{n*-jOw}g;Zm&OESrSe9r?mkdQVe5i1L& zJii(MD_n003VmoIL+^ZYl}UP~-y|D{W;|#*`|S5GstQZ-#WEqcC4*k^$X0_|`%+`I z^}1Yms`71#(V36CT*Ww-FPGdjqq7R-9<-l!HHc$3IWT3ua)|*kd4AnOv~JuCc||2c zFoO$>Hh3Io8jT+_-2Yn2yHX=DJdm;x-08cP?Srrv;}xzhF_uB=_ZbSKdC$ckB}b5G$GQuFAdlC`b z<@}T)knl%~JAieVMpHD1)7@^^AyLNj#vDI1v%g3$X-NoYv$`LuRmFOzV053Ki! z&_8d@QKchZh_?$L9uBk75~Js0aCnX`-U#;XP?0-wpEsSL!{1jvXT7j#FpJLYe61_o zunQM_vFW53rBmF5Qr(->n!Zi5(gxasX$Sou*}&_{(s(_yJ{gDP;n==3Gq+ON+++)^ z9QC?4oxDU-YMY!M`NWt^%D~{uwD~AuWSR|_gllUlD>EpHX(lvCE~bEc=`#l~ge232xY|?w&*R1?y?y{Q;oE9s3dtN$^ z11l>>-MpLhs-AOf86yUTX1oK=s9i=j%4RyEu_TFu#_&#>-IjZZ1p|0_E;*;ek=>if zf!UTG6lNy4W4iv1>-HM$_;`BCZkZySYrid46$~(!Jw67mghf5V{eO%wZo>U<3Al+A zewk^t=jbnSXd0-~msw^!<-TKCt(tXDQ*)nWmYA1R2DgT<^Nb|{yLdX=T*^y2l{#zV z^{&5{eOq6O&%d?c zi$Qxoxq=!X=RE4uh1!>+gyYde^ZpA7$rV2wlCBlE9D}%D`qhJDgIYa`irRB*so&!= zPY3wiT=ZKyVz4>ng1WN}^GK$Xr!w>WLQ=y0WSa2dxq`#U#5%v%fC*3c|NB#MB82=Y zd+D#U&NIS&m_ZKXpyb*#%A0ru;hUdJ)zv0hB#3Xs{TcWN&2zoX__%Yr59o%D#4I6w zoFs<#6SmO6d3<4yKiO^n53956k@A(nvxaivCNT}*VtfZCJbFZ*YIK-uUoKk~4QtFi3d@{m3! z$>8jjJ8Z{e=`Y5I;yDOoAvb4G!Y!{5o2g}Cx<2xfg*%`5w4$}a>GKrGcbG|RzwfxL z8e4Dq&>s5-B~$g)?tMjo$O6L#gaYo~enXeP{B)bNN zSH7aXFoN0g)rti;DV(29bA+6%ja9T6X35zx*ocm|Z2SpM*yCDIFo9+%{z}0RzMS|R zHi~t4-295R-e_B5a`d;!QXAyh>2p)%O~{-v5SXvftW-2(`N ze>DrfXvgQC&#$E#xa1mr+gvLT29A#fYA*n?*?UpZm9Qex*gTH)rQ8&Y*_c<(c6+`Z z`_R;6ML(d`Xs&eRDbwT>#Fb|9{L3Q(dKm)r_bAu?`3Y1M$nkbXbHmDe! zEjCEfi*mgGGD!gG^f(!}`CmpnO}6FV@t=VI9M?JhPOy}!sAUIQ(sin#ZrXLs5a;v| zO59%RfX&FvBY8PRHh-V)YV8WCf^M{Pt`t@>DRYjlnn|I**BR@X})H1&8f`Q6? zkRz|^!{Xr0?R|_sOqkYu$Js&dcH4>4LT7{T&AmCSJ+h{nPk6JOpeYF#dbFw0PvT0q z_1<*=&nq4h!`ej6h%ZO);E8M%;t6i6a|-f{iNUfimFG+ry0mooi+cHewan=c?JvBSQ=tNOAL zni30lP`g!sCH;Qkuf|!tF1n=M|*RBnKuJ0_5tHgDwWjc`lrrY3DLM;l1+RqQ^56W_)kU5*bsUp=nw>^Olh*nlQ1jb+11M?Dp+f^f zQ8rM;`?}T|%1FL=XdcUEwC&aQu-fbi1ljg3(X)Qf-FAB|G5)yVJgjE`0~ei=(iNNh0E}Az$geShyyRmp>Ahc+Is4r)po)94X4lDQ@xoN&%vW z@AI1a3e^8Fkr}I9pv!~^U6?LPooBXG``;<7uW9@`WQgvv*Q2WL4Eykv2 z3=8!Q&Xh7~Fdet^750`6qDUkx#!v@`gdOM>?OXE<#}SNakl^U2FB)z_hI|&Y$44^! zx%|-sjrcZ$D$}_OqdZOXeU8{1C@{svo;jhmcJ`U_?C|eR7XbAgrg|JC>E5?4KkBu& zRAI%g`Ic{zr?R(X-t-0d`0ix^@-2q;nj^J_yFHhTVG>ypp~K!~ju$)iSQ&64&bjkW z`?;@BN^)oIk({C*H)c(4+rrCbT1clM4wxCT{$+YJUjXO9$Nr=8_Lv&fF00m>VWaFWg&4l^hiZpUw1cK{Xt@oTG9Y9=*SViZeE8mMVD!gx zn_3DKhcyVNG#JRx&7_0&6!^8AnH5(a zl1>vtU_I}{U)%vZIHaa1`Lf;S1|{!R-~&T}UUTEVStpcGXwP*tBJV<*Q7Us+MkT14>JPHorGYXS*gTw&?1kk=068 ztmpkIUq=(`k`{fVLlJRPoT;2W z;x}q#u!?K$MkdG8>gH=^Es3;0le$Gv|`bG^)1lU(|}uM?H%W z{DrJI)li(eHCG%DHo}qkFms%ER7dP!qwv6qfU7X|0O0YJZNP~pOoJp}1+Vp5J9Ond zGFz@F=3;1|(o$YbPx1ZoL4_5`KCN#JQ#O@a!j}>Z__u@L(+zFu|1Y^LQwCpH4Xsc@ zo}9Mv_F*d5)h!EiR^yfzJsC9=D+rA{eq{Dteobfc3NbDgr-)?4`WSFroeF#)XT`Nc(4s7Lp+;Pn$t^DATAOs^BqUcYDx#UPJYH!C&ugei$r`7)#M$+ z`hZyrVVcPp)YwO8oeAA15IOVr_3|yjr;u^Yp76P&VlZSaAqFQwA;~$)Wn!ae?%BP+xZ4 zZ?IpQy5a!W`<8roq$s9Xjibfy>styTeC%NnVTZY$y|KXZMstZTKqOFs7U-4B}m6TH^?c_ zuz6K{P-^cwh|kwdIueKa*Vjrl;u}Bs?&z$*zWFz`H3;uSiy?P5D!m@M86hN(hgQCf z_MPRj|7Htn+Q+(Lkal_};zjQuC-VYh+OELM z0bpiKOFc`btxV~JXG2h(uuyWhj%Y@x*L(5y{`RXXEvT!QGl0mrczP(E>c0Wb(YiPyp!N=iNoI&6Mx?Nmf9-nu*OJeGt4??Equ( zMW*zm(ePfoG#MRVKTD+f0BqLGJD^Wq#VPx- z;JjwOhHy&^h1uE&9K zJ97j~b$;Vn#!~n8^w13UcPn?4&emv1dE5!Yxrg@2&oRek&u_k~n52~z%X#h~X;7Qs z(tTc#Ax7Pj?<44tW1jmv5?iC!EHX}K`tnlq0A87|H_^~!Kok~QmuHozA=0PW1y$u4 z25jCqbGzufiJbhHuybv-?&$2u$Q=U-9+o&bbg&^2oT^OQ^at4nwIL^)cae>oYb0n( zCLM{W|-(ldP4gIyxm3X-J$aeiW>B}k=Sz}?+OdvHzQqi|CT`Th%_=XfrjU%qDI z1wNE^kP6OBRIEgTwS20)1#u+;*bC-j$$jJ7^H53Q!5w>)7miYSIO~|C+)CCHNrC-Knp4 zuK3N1CR=xK;cVGnU|VZ6s(-2BMGIS-yFUKD*h{L#U_M|~K0S-DOdy&UDVhb`$zQ^1 z2=KB26LP_jfw7v8MV_%C?=xfb%&qNO;;Z4f>5%0u+?iJqGV|`U**48+(QhQjsx@hv7|1SUyVaZ;7w6^OHje>UDn1y{Z9;_7gv$0wuSz)i8`}lBaCn>;vCdHmxa&5EY ztmF?c&RCF_RZ5(e2tf+4TNEiMbWUY<(tM%t7@H$hsYW(aE2H^46Qg8x!QrF8O5i{^ zwLwl4IJn{!qu`!xt9{ARX!y6`6g%Hd>9lU!(yVbkp&O}#O`S(y|2sIJbyE{|(|#gK znTnnkGMsOVBlsQ+EozC1f2IRJf2`X>O|!O9{W+^l9-hqLd=P5Ad80fp40D0U@{7D% zk(P{r{Urk0JA-EDk{?B2B{@qCN7Mcn6T38d!@V~EvY|d|E1grF&<$YKomnR8YVzYe9PfXnPlff>nhK@ zEwm=@LeeGn&Ok!EHJO4xI{hehQiOP%XZT@sqpASKIG_#vFOwMTyIB28j6svbY-YW- zaUt*V@l$^`+VYcb$+DUjSGMnX-mUU9fx^ek&mEF$I-SQ`yRy|t_0F+NTBvXLOOzVS zz477JpB9&9lWCt)C7PiW%o;tWKw|RlRbvcqd~9J(GmO2D@wZJS2KC+^ORC8Et$@w> zNBQDCWQ$~QH3KGy01wZi*C1S?r$^SeT#HFiZFnIhDmzBuo;Myp{jGt@@0nn?xDJk5 zb9Zkr)?85-Hx1h#6%6pA_au6R+wI$p?uw3Q{0yPO-_nDN6PEaH?9xHHlRcv_KtTxEVBeON3 z1S7h>npg0tSUYwcNtv9(tE`^vR1H^MiKA1Gkb(0*y6H{y=ptJ zeNDlrOw9@vw~nrTT;y7D@J-C{@!qox{I@#I@H;G&f#rGJT>{&=Y%_Z?#8bVTng3h=h9QYxW&Qt zpJlC`hu6kBa$c-nWo;>Kf3kM|8&V2_H&$WC8|xSm&o*(Y>mf)6DCzwW*2M?Tz`&p~ zY_+r0enGBN-9PEZ;itwln!7n?p_h=_%y4^(x0O&b#9d4d=bnlaw4OBy`O9!KP#s1YKkXC?|J$67gI+&_ zJ$0(fPt*H*!o2MO_2v%W5)zwa&$jhmoIJ60^5&W9C=@@uc7U8nX>g?CyWI5pFlEk$ zD4cbZUP)qggT3XJEi3IDpEyQXG89cbZD!99ka!`)>7MwJMk0q1Iy1Ga(m3hm-tpkM zb(&ndce?_MPt*_BFFk6ZL?vBH)f=V1XNbu#WExYyNpa*z*22hUeRFzj#L;`rkdx=s zdP4h?x(C!yH5d2i-V2&B1baBKd+4&kRn|EuV9}^OfAIW;iGAML!k~ZCuWmGY=sG#W z66EqYg^i><%gBD3;ki?cF*PS2QiI+P0S?T|EpNYeb41_m2y|$I9o7fPQRfWRGm=H( z3*JL-05iIrY_b?$xeSTd#nB;pr)SEQEc_5(hbRH>@1KPE3`idQrm`Kieu>6%7?u_6 zz04sc!m0VD%Gl>d$g5&B6U(MettJ5s>Ls_uA%mTXT?XWWTv~pmQ%~7PXgV0FRkWsNnu@2ZN`? zlB3>+$2Kzvn_cF=yE>6*^e8KRhDYS8}c?`_cWYo_X~V{yMI2cJqc`%hfs zV1DvRaW{GQQ+Xaido3OEo0?KXR%{P87%5JG1PYRA96c2EhLOM=zOP>CyZ-e#L)x7Z z+G(K}{xQgIX^QoNF@{XUArqp_G!`tXD{0o-d93F+b@T|s57?**1krI!{f-)H+R@Q> z$zYr61Jn|5@+$8@Go;4$T%T9Ly$z3hJmMpyx!s)0OB1I`NtNk9&|nG-w@=;uG4V!5 zuFnEz)=^bkuL4Z@SEpLmvdVZdB;X_l(Tz(S`Smu)eZ-CdWL{OLAy$v3yCM%)#vS~;Gk2<5kfgH_X-&b*a{8MCQ z*gYR^lL~mPyqkGF8#Zr{^X9F4m9-r$x$v=jov&|jZ(%xq$kP(D1Ya283{^%c-*Oql z?T)(s*s%}S*;g8@=<*E90=C~a4R^EZ>o1e=pGX)r?R1|?k+b}2L7S>wxcOOsjt@lr zPl#&@yOksv7=CiRSKcG6kRyittW|Tb&1YtQCCX7cPPU@e)sOB#najABdq+dG?*>CL zx6a^xA{@ueLl3-cCC5dbN}I!O$ZBRa>=9t%(B4eg*C*yDxW6i5jLBEU&a*2afzMNL zFnB0-$6qj4-Q8WQQe{1HaF+U5YGfS8Xw27`Yt)CuBC=auc2MxXi*cCx;Ez(pSg32N zns+okue##7X6N1~(yDd@ySTEgXJt6Pr#|^YiQ5=!_FeRd_s%;%_UVvSJ2G9olb-yC zC5$=zE7Mlg+2Uyo-zU%=2Q8{XFVleEuiOkm9wZgI?6)Z4T!%`bawsDst4!`_;K z!mTgm$sk;6YtCJ^4G$Ns(b`@7>KV|cC5j~4i(Y6EQ#&(Lk$BZB=B(=^f!c#638v7K z`o_Zt_Fp@JEsac~<_s!RlMdZdF@y51ibIk9bdqKYyLW|t3UG~UN|x=2 zNiu!DxyImCns#ix`)B2>d6*nc8Cu&>{W*<IUr(!wt)!?p3TVw2@$L5iT`JtIU*hz;O(_++pcu>P4L83PT)RRQ5H0K} zpwT0013_}2ZPp)Csk-ur-A{#2z~_s3+eLYm_iOK&GV)$4si)L@&`F*JloTwQf|c@F zvV~cB=^jU_EIC>Gz`4<^w3={bHvjAZ{AOaq+c%&UJ38ve`a2;&nEFTVf>~(uH0C0U zrRo16fH`3$>Xug#3+Ad+lj(htybzy1N+x6K;t|p{;NH4NLy41o*=AeOe%j?dv~k8# z53cY0=DdPj(>8zdUBD46#*tfy_wbMOOb~%cT`g)4BhB1W=I%~ivXw$hGXb^!m_J4I z#;0kACm!)7FMUsb)*bI0y1vxE+VNNUtgE)&OkkJC)ABl`a}~I;bv}Bj1`M^G8e7 z`z-S_@=tw+kRDeOQ8zxEJa9lpvo)(B42%VcG@`EMRSGQ!O&J-+)_1~t zDVAhxlqfUU;b8`X+@0+ga!TnC8mgrFPf|EFaLSc^e22nwd*Rud5W%`v-Ob$*QS&`H zP;Y7d`psX;Q~2+@?Px7!s-HBg6rSbBmYJF%%4Ybb*{!L@e__TP2vIr`l9~0{jnA!E zbJp%rX=Oz2(Hv64o#l|Rl-zN9#MUQNnInbX+Z+5_#vvx?Xch#IhPuUt&J9?aRcHuX?eB`qz z+@2xt27XUA_+6nhnI4pa$-C>}o4C@Am{xe!^$ssLy7Ud@8wgPz5Uqd;$*CAy3ekT_rnwW``J-XzMXCs4Xy`b0DCB0f zo9L&F%WG{+a#@!#t@9>0LXgIf=l#C-i~cJ_%~D&e)FI_xSd_oN+#Y~EaUWEG_`27| z>k3B$cfSSrOO7f7))p+%IwCpuaq*{crSs~Td}EdfS5@f`KbMaw7Xm@9tyEM7i7Fl- zXEiEAyYDo&=7cSXq)bwRXxP>&4S(u>dJ?IT%PaC&i1;5%`QgDr(*?=$Q8LV>Wn(=lg|xvND*=`W>zC%Dn&9 z0F#a;SOX1z8lU+(G0A)!eb?MfIR5L$IXNJ|NG;4nY%T0 z2IYy)W=CmF?L`xYg<4`kVY-&0B>I(=#+UKs5=|+m<*5&~GTf|Z_Pt2T3Ib6Zn?u%> zI+c0;duP^~7&??O}4l#|v)vq5o)drF=vYt*i&6a%z!}j!F z*1Yq7c*Ax?2cy>gkP6K6q~fR?aav7>ZA8vnd)xqgm3E}(xBt%q_TLs6u50<}%d@ZT z1XR?3E!7vYD{4CSnPo-zYd;k#-#BgcLo#j86fiQ)FHH~Nj>~=|xqDJg+?QfUD%l%* zeUyCI)Urk79(Xh_=20h{AA%xZJ!n8Erat9f1!3sCcTAhRA~5@VXVXlDIOi!Zcz9r7}kNI{c~LBmndk*N6D8c-*g+HN)dLb$3w4|!WX zAOtIYB+JNfVAp9F_@l$%bRywrD>QxXmLcWi^rL9YBkRgsGy0yu?|;P@Vh%+GvdhhB zSOS`4@la7u9Djkd(#=l}VudgVw>rsC>-=3RhTG}B5wbF!`k_{0BzC(-Y%hXM**P&P z1w-yG5D_RHeQKh;3x?)dmrH*Bgz6h2(bqmdaMKx`Ti{5DhN>QNsi8O~&P}{wmOCBm zJsy)P(3c?#{O+24Tl?Sq<)Zibub+8iFmHXm^MRJ;%MLl7bGf%FHLM3GdGPYsQx?7e z@)%Ixnz+&;uJIuEo`>8V=NpW)$xWi$pJ@5^+W~di3l)e}^D7?BE$fwpew}_mo7j7) zK1hyAB2<_o+B{{)sK7|sCb_1w>URliD`|nSY(sa1#!xKX>(hLZxCcKZPbCWC?!$FK zH*m>q^IZCcru@*7Uw2-Vy!e$p1=V9d@JFQX^C$Yf>Av#gukyt=$%@|&I37g8hSma~ zvsmaPUwSiT7#T;hif>LOVDe>^MRI>D50Y;*+w^0;t=H{K+Tl6V^ z?sw9+?0BAv3dt5RmU#nk6%yASs@22_ah?}|27Sqm@KIbYTgl=Jxiu-t*!Z%w%BPf| zM2#uuFMq6kU)?^7E#8?HJrk*`cv`zJo(gaJ`e5WDc{*^w9DE`Lz)OizZ1xhxp=r6n za%UYlbN!Q=$AM%axayK+b9hUS&zP{gp!uHk0t>ibU_AaPI_?TcRT-V|+Xxt@iyik)!ZDfVvbtm!iXu1ia z%u92bS_^pknZp9+i=3Jl4d3;?geu7~=wE@9sa`i^dUE*Xll(ICp%wcX;hL9p>YYk* zMjh{&&|LT`E$r<%XlOw?*|2HZnh?yJ;Ywnyf=|SMnMUXd!nh6I(vOQ`I~L$ z_F!9JQ`&pkJpf0%EEv{vAviKiM#s_~GzMZKS5uAj@5WzCFP*{xvD!w8gFwb^8cE4* z#!8rP7DkG_viBLG@NVkfKAnCJj9o}W3I5!A7Qx~=L#f3*QQRyweQZ%0A^rV}i%J_o zw2TRNcTX)!yt_VWH6MiC8PrmZZ`Vb|mfyPvI6EC+TElkZ-Zi20+9%h2$tA?`$^GBD z?6RSROAK=JgJ@n8fTSVk0no-su>BR7{=}iITeDQ2#^wy@iZ7e2Q=ypwz{J(m29)Zix?u)b5LFg{3nZ zxmu%UO}5|uGE9j6(wKfO_DQH(Xm2Y1t9|T#UMPy5fOg!WTJ8b>61k&o$fRoQ)@Q8Q z>`WEFk2Njb0t4W;szABmKLvdL6 z@@@%wjHmgL+HVtN`CzMwE1nBwtnn7twZB>I4K)|*@4clzg$tMpdHa!moeIX^+_eQ@rQHA&)T{uu1cq(-jP#b0eHHlZsBXB7;e&!$hj=1t`Xu?4VrLi`Jb>BkmL;g&u9%B}nYntLu};RZWg-Y;L7 zKsM~y=@P9J6+-FZ(39X81oubI%IejGfO8JsWOkL%jhnzhp@Ua@}BRguQh9>9d5E zGJv29!uqmTjFmj%1Gom(X$s9Y8CEn-JN$5G=4q#nXlSWICJZE1uV?McyTatE*Dn4|~NSzq8=tUh-A^8jEb+t_s1_=FDJDH)La@CL&vY!|1I%Q0vE@Y6$vLr2`5L`Fz$ z37hOrE9rwhd&1gCtupCgLZv}}mYL^NyWIS2DzKH;__&FzTM`T0suQW`wf4(qz*E_9 z!sv@iIE4{WdoLaT)su?2c z#L0~}7OPF5#c}bfZ$gPELhqStz;4JT3Sy7&7DvBXOLsQn?K= zhLiDD$?seeC}>SlGpE0TLEUuCk|S)0$K#Ou_iYFm)NcBt-|20(hefuow$xc*9e=(q z9e>>he7$?H#T6rv;E*+NejOa^ROBO+y4X42&~>ZK8s|`6tAr8RLG2*6F6mFaH z&1abv{frlvuTD7Wt1(RGvB>taIWeTX-qIabivd+ZE_k~2C*n`nq09J6>8pEb;OhOC ze1rs8x=^}~rlvpF#0X)2(Kp@#vI=UNL&W6{MSf*pRAdDnj|TSSTs=t*RyLJ2t+B`O z3pEeNFAl|bzupvK^nIq_Lo%{U9&!Woy!0~HgiBs}XL8?NIV{GVS6XB`$$ntUJHlLd z7Vq6_aOsbEfy4~T&|>72V?V#z+decjaR3he{r^!Q|4juE2SZTwiq;Na3}{M?rQX6` z+V$*lw#oF(O`KNs6_GOr<;7g-NtD5>+XVtM~=5Vi^j+Nvy4=7EALG@PR=W6gyDQ#<87;cDySqBW}ii=kTd&6&;m zEgr7#Mrm3CFSjO`P=>9C4r^ta_k}bQdx@eKm&@rAB*UNSZLPsSa>@bEBMj0AlPJ96I~r7NL#^~r0rV-%n&)aSQnZE zMh^@ol6U1F?a8+>0eaWh-+`XpETx@?^!cd_Pu&0T_uk<||CCrh7SFc2b@R=DLOs`A znbs(8W9cd!qU@gTF5S|#bhq@<(v84U z0t-?~=OQ73bazR2E#0s*NQ0C}E?o)&iiCjRx4yscKX~Sud*@D^IcGtpM0~x-L}Je8 zYz$Q?Cm~Wb@aalN{Df?LkWjTnHsuAhiH(U79g;>51KldkHV69x2YO)-WV zrZ>=O0tUsX9wwn2ZI?=KuI9SLs%4`MIXoILgKxcRovQYgte0bpLQ{ItD;H|kMV%`y z?m`^rSE2U|j&&~-+}!`hksCI_CZOqts#L|$rJwt2PjYW5%-9vPMID|91XFze8a&VD9(yH5}ZUU5{hfX><~fw za=_aDSA1KF9bbBJz<4P_+7$ui(n85>Os}RNf3BFr${A@av2>qJygg`F-6dA5xA#myH8r0-XbJ5v)RQIT zEe;B5H7aV-D8}htoxPB|)N|dHfK)7b#)WhEV8&jCmFo&C~10wL7)Y6GLf6c?xQZLVa zLOl}(^wHtV;^5KP1Oj14+o@05%;a$=l16lAqI)`V7;j%rR^Ar7|5LE4n9DU+loQ>N z9|{}xhUgmIA!HZ5TqHftyAl&loxyR_MqG|chhZ2*o484 zk#7^D?enQU;ev57=^iaK#kCymq;Nw;oWYxE;&;dVC8w*9j!U?G10(dKuxy&C!WJk1 zho!{fqr&N)oGJ&RLrX)gi)3apypY5Hm!&w#X#Z^E<1Lh#s0W-L2x5axzq<(7Gu_QS z@zMFW*FAuoU{e}-bGp*dG9y)!;q3`!fXKV1`dO3M%fp3nFfW8ZsYtI#CCB#7mkXdk zhl~aubO&mtDAJFIS`M~Q&TpzV@e(h>)#>=q%#_{6>yGU)l022o0yDGJ%2G>A+0D6D z#2FRYs$3WK*isc^^kSE5QPjcs6pA5Cdf6@x$wqi5&4^yvFt0ZddxR9+VE2I7o2QhG z?o~+S_@15;7k3URELQJWL^)THyA~kyGuv>UMf>DA7m0*`^L}qZ$9IR=GCcq&ODfk~ zh8;7I@@J~p*_AZJy_U6^T-|nAhIGiYQhEcPq5IjsM|w#^vpWE#46c4sZ{y*GD5EQf;(OyB3wE_i=#NM|+YQNz*>Rq(|5X1f|b{S80i7Jd^;p9)FztY+h zG*OG@W@{=IV;D({2oH!T91=V3gF;jbK2c7*L?fzyjwc#ySoY>jaK+9H7U>P-N?+>4Ye4_9qyB8b1;_&o=1ZxgN?mW7u0* zY@SWPDBY}~ko5$Ez0mtYVTU@>9A<)n&YI4JPornXJF*GxAkeR=*Bb9v{&3~0W>7x; z7J%zW8rnI8=(Onni|VZZ=WhPBbV})W((m!h>BT?JjPR4Fg{mSHN3_hK%ceA@6eylQ zjAN=mb%jImip-b;ShEg&ntDdMWUc!bal=Qoi%>#EcxwDl;~l9g#dyxrI1ETC6#tcu zOSa97p}K$@9t!>KO+8#w_=QiJwpf{(O^Atfvjuq`$KX5elY+m@Q9qA0Eeokkek~@G zqUcnbHExlCUw!DFLz`i!gE@1xOz7_Skur(%>}r@y_mAXxu+DIE@^{A}YEjC2p@D)H zA!fdhCO_B5+vMaWv<3?fTfV-GlC%lZP)wJ3o#v{&o^{J(31KwlOn4s~=o*UZTTzA} zk~QK4K2B$_Nv{jMgqHbfsQB_{;%KmM>E(7Za9hQ%NR+W=2~bxH`#_O*>ONvOTqt0M z#w}S*^!E66MbWpHuZ*hRL!${8ZR5EToZ}OsAf7#pABujfX~HGI#iY=aagx+B4)%r1 zfc~3+1ie^zSzfg{9H-S!SKw3ZXO&F2uDWwVuS#-Nm+n4gjDk0kHFYnC-7Xrtd`MG9 z3dK&jY&f<7Px~@?z#q8)Q|Myn(|7%(GMh}wLl7(+tC!dfeDV6lhgzYtCo!d$`WOkU zd+So{xdoNOIflnqt%YH?U$!K&p4Nv{yO+BD8VImOH`Fj#(ibY|0U!AM&*_RB{9fq{ z${5AOqKTqB?4{Fj`QlWdWkPoOLpMa{!&VoZxdvxHiA3cGoSC3L8N)7~2Tu2d<(nO? zP~83Zmf0EV7d)ayq#C7M!tc_mxJJ=!Eisk@gf9$5@kqgSC11&9>J`FC=gYSr&dwj* zoX4ryDQIlV5J@!d;iQhGkcDV_-YMY&T%3G0V|3&j@RP*)&kjM7L&s5|pL8l*+a_SKKS>0}M z=SQtUJj$eFr81q`k_GN@f)V9E3?|&o6*0MGcSZz!R2_e<1v` z%(=}ebiPeK#{Sz@1y|5Dr(_?wEiSF;wA20KUIQ#Xb#e0$|4Gopv zsDL@2s{*;%BLf>=jc_N9kN5xR|H2E_%<$Mh+*Fl8l=!Aj>;4jJ@?`0eKOFf-Q;Bje zYXCogxLbOw;wB@O5Cg;oy}jDc^!))3R)+Wv6x1dT;6{NGzJHq(^sC!q z5XSqaGx1N_Jg*yt=TXUFH6_(C1ELWeSNt8^pu**f!$D6o{HP<5F1^w8SOtojG zj?TUoSt*QgRYrCkbqoDb*wRH8!|EyYJfKbX`89H+aMqkW~XUjHIws8J7VSBDSff2BIoN+uYT$XJV2|&F@?6QTQZQR%qQW2Zzjs@K(G8d zHN{XeViUy2_6@J9oWlfbg2a}HWy4dgD?C`!@v7H51;=fqQQ{v^M9-_YQ%JXPjgcS5DJwv5TeEK*vLaG7e4y;oiGq_qdE_)?apRPjubhac=4TAzuQ&CI;_d zP@Fq|*Zpi>^6bjjwSDCAj?bxT7_{?jHznn54EUiMXS}Y-fp6ML4ihC15SyJgv33hf zF|EaHS*~BVfydKe1Q>-eoq-bPsE=t5m3HRP?_-?6&ljxbYEOPG| z%Wy#A{m9{$l4tPfZ7#f&ASFmoY^q{!*&4iuvMsKeaTcnMprx(xvWgcnL-n)V zCwHpPbkXJd?-qX5ddWw3H*qp79S)dY4hart;iK_}%ZL?Y$+4>KJ7=+o=`WA=Vb?p$f(O zB~zPtm9pF9z*_gG#w`4WXKqY{WGCr2EzJsrNpvDkUa_1*MM**k{=K zOZ|7IxE!8o1#q#LIMDp^lRJ0XAJa~ItSO_T*rt3ap6X^Y_o{;>%@dC1DUt1~$EW|A z#Z$rAsTy%&VPk?2?_mZZRiBmk*4=JO9lQ@-oRpf~FzGYL)5oxH!8=N+owr;;hEh9t zHU#jrbalIUDUDMDC)EY~Fs_QE-4ge>;OU%qF0 zbmeGA>g={|wRls4#@r|5ylyX>oT2@4=-k{8Mc>3?Fly9C*VJpKX$e-& zR(@9++@GR}D57kRkXEb1iTq_h^tn=mkIQzqqX{omWJ^oFZ8^fFyTfKZdgYRu**tM@ z6-n(TAtNLzKl2yW%OHfB%K}1UH__?$+h^qiEHmQrHay(UCpd2an^Pk5nvbtJ(pLA}{*yOVyVIb*gO z##KTPiIOBU!5{7~pIUa4%kDO8<(cb|b+a~R@9a?^3Yb3yS@ z6~6yGvrZs0xkP1^)K~35kx!O2)?mb>3oD^oIYqgigqm$IGWJ+4WB0i+2fpc>{8Ytc zdb}#czFbe#%_U!;3WwIWG7nr%5~ z?wq}*us(YV3YSVXMz@F7p=#V~3}U}QF+nssOs8vSN+*TbdQ&rwnxq0gWUI>VKuQ3H z6;&H|!l2Sqf(Jzs1r!~fk-T}Y5(r~IC171Y61p|{g~>z?R>d&ihL=|&KdH`zX#7(qtGJLLYoZ?sSd9E~HeWUXB57Ey{ zDW^*3)M@K_4ZhtaUtLXXZgf#io5cQQ(@k;wv)*H`u9rVs$n|I2)n|lxUynBaG+{r^unC_kZ@*w^DfRQi1k3N4TTwoh2B|L#W=GM>LIZ^GRtk>j-kC zzFWOqIiDqi_Dja(Q7iTE>IwCG)Gi&PZ62C#g31dL=~*1>Iby2*ju=I@2wWzj&7)Cp zpQix{cQEo;82*Xu{A7(y_V$^;3pOHd)fXvRC^*a6#G!Ui;SUl#&bG9ngNJHwJ@!Kv zAB@s|e;pD{p)8{hX{nQ9&OreMGWZ#GZMDjkOB}YvyND|iv4zNG6ZY~;gA}_n4_k+Y z03H_89jby4N2-DC)`BK7CVLYViv0$2DM-lm)QBrXBnrFJNRWk_-TLCgPs*3}P5Z(& zR{WXqV(1f{t1$*Za^?DX4gA2y9;<<5g~`~*@-F6Ba(XwZpBCg4s`^lyd>6Zq1JrAR z)4AH2BsT~Z9t&yP8Fqdj5=q#Tba}QI$V8Pib5x01hJiqorm1H}G<#brX&fNaOv6$6 zv4`s7D3DusRBAVNn>pVysuOa5jfBl!(;&ZZrkVGq2D8~PQ60t~Y3Zo!PSR@~EG9GU zXT_G^=FS=%j$ZwKzx*|DB#b9%U%4nqb~3@u{h0x020?Ynp`rQmG~tOULM=Q#t}xY} z%oMNu$|_qu$ysp}TI#GaOyWK}fd$Z4OGNa!hL42Po;Uexja0`A`n>0jP z^%;qL(14U-kqErB5 z^D?H)|LS`sv$I9*a?}%jOS#hVZ?fJ_EQj(pCs{}vVZ+JsO(uBBc_J;o=jBwYHwqFA zXL(kVLs(K|*kNp6nKBnO*;<4L=vAWsP-Gt}s_pkMX2gU#GjJtZ;&ClYwh>F#Wf|Bd z;YC;R{Mri~afSq0q}>|)%plhq&gkMnjT{WRWC9p~{mYyb4cd}EPYq29)yfUcNllgc zrXG5QDoA(A^jM1P>uk8Rij`j@BDwp;36}tYy)JeOQl(q|N-k{GY{~YKD>3h{0yMjp zX?W98!LtJ`KUNo2X2)ADR_|)Zd}OaxIpCx|b3O|` zzbPGsj0u=J7;XraxOfd^*x&Hd#MVRzC*qinum}$r*^FSJAudIXB6u8OJ6G{LEIVt? zd5{=l#u(!I6z3M?7BbFImZxMzUHQ8>O2}xI?Sv@^haD0LU~#}=Zaph>zA#r6rNEW;>)u zJ7E~!TAgO@5?&=SJpU&0PHx2Lx}8+F+}O>KhdbR!ckzwar*1}Aj&R*<0=jbD;iWL# zVYEQVfsZ9_iPIW;i7O$LPkSJXV)iRT#STCG(}z#wPvgr#xqQQnDXjv=$XX%Akam)y ztYy_&@~u|}0bxuloBv%db5RK8T16JLO(6|=+d z^>wIa>ZQZA54l;R?T%lLnlpkxamL2)+MRvrqtOm<;ogDi#-p29V^>;q$72#+I;JK*fi;@{PssnOX}w#cJbW6F>F8`&QZET!}%8T3ggJ}MmT+*2Q@k&gu#MX4I-2YTDIP)&M0(L7AERK(GbN|Z2PQ6*>s?e-UfJ? zxwMv&p@TZiqFjX(zxD0h%e#boA~<QEJ&Ng`Say=-rej6X^fS54GuC(&oWLXph)9Z1_~$KR zse0S?658#j4O0D;FU8MXRruoQR^V+=Heq@NsSjA_Nxe%$zLJAy9J54cjvz9 z;r&lVQo_4Fu#Ck^KwLhu-hXhu{Of!!xBZ=K>N;s1RK+FMJC7a`PAR2+PXA3MS)hms zrw@;diAm?XsD8SQ61j`F0uV@{U}$JrZrW7z+wAe~&(Xch*~d<|A&ym@cI5Jrh-B^t z>~MNoQsh@aeYecTw{S&CwZ1Jp=sp171WiN__ctKl1Ds{JJ#>Kn3eRGn-RmhLSe}$C zRQel}*OT(d@yaw4K6jBB<{#L9!dH!Q@Y`URtae0zh;^gf0QW{GX_iF>$vmW{GX@Ig}Nc%k=}M z1_4PT1_96Bfj6cDJ0kieRopI^Fz9d+LN&j1EJy%onb6a%NJP;zn6Uc7qptk+p?JLb z_5le$5LjQVlxpb1ge1pe1v}p<7REZ|X<6y`k-rEv(OV^IL)VaBEue)Ymj3{+{F3^C zV@3W0pqaEZG5DP*y`6%1swN{# ze?VUfJ$k?7t{>PmyB2KoyyMHs&`!IYK$SP{r%eh~4cTo?TJqo85~EmzJX02or5gU7 zsqL~En&qgD4IPI?nMJTgazvb=s+SQGfZ)Lcw4!^pkehk^OeF4Do=E~}^LzneMtkH0 zm!Bg*vK=wGn19}FwfWCe#!PjNP?{SLp!;l-sH^^rt#%BiI(?G|vYJN}E3_3PSWyI1 zdrQ7F%nCCefCm#M2b;`c5kzQ)#0ZLUrkbw`tO5PEiT%)25~&?9L|^~iN##b1X#{sn zUweXFYm&iM`+81{7%c0|?w>`ypy;U-2NLJyZ60f#@3E4$oq`5aW`*Rm)&YwW$%<=3 za~rUCvi8ppWCyCwV;?dZ>sPR?;ud}*70Qst-+*4Gf!J$5`01Xg*YP$ap=N7L)w9+M z5z7%PI>{|1iVG2Gdo1?Cc~oE(jjz@0ti$G9h2T% z9T?gqZ0)V*>;CT5mWAnJ3NV!(BYuJ7T61P3JCl-H2?yXH{k-dyB`95_`sLNaS3}CL zJotdcS6Ed5MZmWQ`2tN3{7#JS*noW_G%ahl8MaQ0Y^Z@-#j*;~tM%e|I?W{cY$aSS z$@Hf8d4}@b5bLcq$%S7^z;vue`A_ZRj^xZgaaa(UkB5>lo(94jGU`c02Ct(>AWHkq zJ9xtrJF`B?iye{b#T$6m{>rVL8Rh0X^FXXLPir52@hRV zwlLu~95gVU%B%@4gPVO15yliS0ZxsmBRNaNw97G7%jnR1=8(F(i%fC^o{k~+S!e2Y zTjB>pW}AC|=g<1Pwa-eu3))__Qko3KbMv4#x}@Bmu`sS0&ps>AD$p#4pqrx16hJA% zyZN|z9TF_5s*I}+5@sz5uIhXqhvGmJQ;{wIObyewV!haXR~Xo!Q0#Qhn&@W`fmvTL z=^D)5Q>L^^$!7e>Ie>2t!)_pLos*z`0Xi6$?Oyf)pmjT` z__aEm;+*U9=ceK^QiI8ng!G8*4&)~)lGLM?l&7PI6CBY!=wayBVLug<+=1!9vj(ie ziE#Zi^=`$}MFgQUXQh=I;2gXfB)xnE=v9)U8t{S2Q&5YPgOxX$61B z2X4X*dBl>x*bA;Qgs7z=KshPxJSID!Bt`#=$?#(&#$aVi2k14A*b7YBH5JN%Io}5q z*iazXwcQ`x)xgP*8VVF*;ta1nO%f%(4X0B@jLu_?N{WeOdG`6~4$vTSNwBCWwja6i znk`4%=oE^7`0%pI5+ag$dR(9A#s2^|VhjX59a~^MT7U&}`U+QAeP=*9&Fap4pe0iY z*^$MrrWt$*2%A7ew1;rr+a;4QyA?J#7tS(Ow32Tg(oSWF@>hsr7kd=ZXYBjCYNLE* zf$~+b-jJ01e!YXL!2aP7_p|Ym%ABo<59GUUCfr0Lxf)J>EkYrm49P_-+yWaq2dajz$#z&cZn{~QMXQY)f>?v` zU+?cvyu94BqBQhkjA)`b2B=VMdoesrn_`41BIQM@M-+tg;yrwQv&io#(Xi8pcI8_w zlJ}$lL4%or)J}8zunPn=U6#qZJzu|3p#=bUpODMYqgxK|JqX|-_%S2z%MH`NP#SxO z!LfnSiSHKfW~GQ!UGW-4HbS{R;ZKKn?vbOB<9YvjLpDR~>m)e!sUVD;*=_4RoDL%G zv*}++%YcCp1 zK)<;cszHGqGEQ56cshfRE+$WE?GXlsbIll?AHAbMJIay-Q$zwl0_-@(@ue$R%6Rr+ z3X&+cr_L10FW=t2zD^fewcOX`St2<*bi;eV|HV6Q5OLN8TjB#i>Z@_wMLFaqFzI4UhSCvN4L=kXFqy=GR zErhA9W_Um5N*mo+ZU6EOR;qks}XW!urEt8~ab3))WS3XC94z(*ic;J2G<>ir;#!{XNS-?8^Li*;KCpopGg zm*HGm9xtL7?>_+3^^VT*he)@1(6>8p~5+OO1L$Y>2ObZQJ@Glql z@ON*-v?I6!A`MpC1v>aHn}W2|r?2V^%PRlmkaXfYcqb^(iGrF63nQJv zQH$&63e}OeUoTcbe*l_BtHF@ydjxX4fbz?R)#7Dpuy7*bGLat70{U9;tbB&SvN?8> zwEA0FFe#fk=h2E`1XjEJ=GFKeAx=;GQ@eYYu#>!5D!O~i@dS*dob)ga|FkH2pBMD+ zy5){_@!p*}|GV}aC|g1YJ=iTU5Hv!23B=j@621$6OGz63@PTJ=EKr^^0us3WfyzFo zW!fHcZY%XI&9)Q$OL;6_8t+JmxH>#tV$cHd|5#d{SwW8b;NXWbJs|A-_yWv{idS_dqbJz#L6x zNjXW0yQALq7;O9SjPR{R1GHl(|GQRJhim)Cw;>2)ZLHPU+|Hf_#$d^+{qeipQ(#A* z7EPN5>Qi<89WpdB)<)^|%~f0Iwig_|T?^ zGj#JaDW)VaeaSYk!Xn*#a$*lso#;yle4j!|3~-C-nZ{ds4|lR>MU!VlssU4_556^MR_2KAGddtzMw4ntTHz{?PzFSltf$N(W#@mW@*xs}~QTaRFE) z)8t8^Ja#~v0W1j*cFBA6PZZnOMLV$xoNI5sZh^DOu* zIwO(+rOD}Wa`viE+r|UDe(jKU>K#T)i@i7qc+zMiB#6d>!7Q2<<5-RR`$pvC<{<>dCNll9 zevF+esk9PDU5MCsQEW^Ma~xDgyU5eLLkvR3rh70LsiTYOlc~r;t*z{zdsrV_izZ&r zA4zpwn3@&Nnutb<9Cv#T;c_lIvK?rKYM>KP{H@5t>?t_Op=JK_KL_z=#e=Hx3x<)bu?WI8^{;g4YHsfsF6w1={`tSP_LNth6mSWY5kyT;;@ z#SVU(zsqpaBqz{d>L1>F)gWe`WJ8NcS6t1rJPiVr4DxPcByVr*+UvoiIDXtn-4k2C zDztwYNR&u1w%A1|Dqn{S(yoL57_9{ZR*RJS%637HPwG<=-CeZ)cXh=e(yC z^e8N$&;!I@g@eTBtQW2&eM1smedX{;U>m_Xh~9{wKt`~5h4uG|^6$7TwlQ>?vl;KX zzq{V4Byt`ZL{sPC)1ZsGvEGn0hL|sd&|i>{SbHW}kwWj*|84x@6}DTfYy!YT>w#A^ zzY~KM-M9x1LY;iQQ461o>DcHAD^J4Qtz6`70-&scoEK8rs8g5w8lPk1H&Tz~W$H;) z=$B81(dJxHBHwHpI?8erei($i_sC_?y5MD9&|~4rLt?R`v^=B#C}eaYB?U(^44d$o z_J?tC)ktWAac|&HnO|Gt@&PqRl&+&dINRD3I8|*=7A{^tTrD@arnEkgv|~j=C$sn~ zIkUd^1?%cflrP5Ol!d+-Gg?`H6D5ter~q=nNRlJNU%8ZS{MI7;`+xu)ESZ^7xbmSe zw_oGa>!e3nvL)xg1kIqqdI57eVu6r?E>xK{ zkSLg#F*yAxE|*cA7bf}Uc#GGsZ#8*ksFZUOC0Vs}1E$kp;=a+Z7hKlJ`oKHpiI#S; z+nG8Ol@zpJdeHUM!o;H5qY#b7md3@4?|YOX)iUtD;vvB~goW+?Q#s~hiWa;POOL1Z z8^5-h&7F$%F9$}pCtrko^GSffbdpJ|_U#4xj|rkhZ|HYf?vc=>;IdiL#hWX!2^3Q> zMjaM07HNG#-;*`&5Dk%SYHyhLV0{V8Rt>eslhJc!RD9da;m`6e7R~V`^K43gPaXkQ zT=!y+a6?*IEE*ZTJ+qo1)GF_q`qV2Mow|-yVB?M_i6y9Fi zPlC^Xzm|PT{_2Hp9(LH(I|oLxj7_N6J$55KmE%)+-2$c`pWR`ruRheFq$waoum1lD zs#^B%b>ybE&)Bb*+%_T)|2UR!IL+cts_gZLk?k%70O+FEP*yP9SM|aTk+IsNMce5k za*u+t4my>|mG^ z|66#+)BqcU=Q`E920K?f2M$$)#i$Pgwq__WaOWAud@W9Bt+kk^b;MW8C{1x3(W3F6 zy|qT{oPg7@`~J%N|7jYVDt!E1`Bs2{U9ur08>OkxOOei{DO|Nb0Jm9khut20c*?4l zSWEPZ{NQr5oct_P?e^HDzpI_8S>So$8+?kew=;J8Dxqe}oDV_@Y|+>$a!SQ0R?Dv^ zCn;B^Ui7+f;vpUH&Wv{hRFhsnZp!j zE~w}SmU@@}q%CO|$?j4?Slk|3LA{-m;H2-;4MT7^FSnYWC4-u_Ct(#Z1)w%hxE#ZZ z?bTWwmV<+=MCbxTUr`mFXLJk#haczVXBuQFzkn>iU_ z!<&HeBfrP5^QBWc&h@6h3~Y~8tlgL#iCjcB{oaq|*;ZdA(VYjYudgzqo1e5;qPv}+ zz&>6ohP*3!gUzkk#Pi;AMCsBePs*EhXh4>6Os5{VDM!zITDIs1_qI>;7qo^}iOp0GkP z-AtXE0TM$Lj!nF3WU2=DR)+}rQN%7{27FE! z35Zn_Tw`QfG-*D~d$vx*lmC~ePckyrwrp5JE|JVv-_}c>=GWV2CM)J*oq~uxe(!qv z6uE>2qF^=?!_Llbven4(N2#En2;3XqOQEknWQk*sCPUS^i7QZ~YxCnWuXli3?PC8@ zftuI2X`Vmd2>FQY&HT%vBYy0oz@XO-CkRWvJ^0%qyd=dbTJl6Vs z{tg-Gzv5XW?HJ+}WQNeDF@}J}UjQx#wLNG`19tr`H0KL~_#BrZU)u7d_u#=a?|snu zIhJED=He~$x*f~g9k*f%8c@Dh?mt5c$T)0=`R&*` z#xUn}n5#PzdLkOaufsb-Dp)YVgC2G$cd=~+2wrsb%i=3-JzmSUZ4gi|9Ov55Z=50gKLOgrVVWf`f+<95=dED_#3b@aH~m@aLI|Xne9;?L^9}0_#*`b8;Ro z^h)yt9wWsrQ9TEG*BGgk?|;cqY$%Rac%_%tS^MwPcWrjQ{^SgNv8+CK4od}}>4ns(^nIEkBt%xJM_9eD6Fk(@YxDX%3*b^?$0o>L_F43mcT-! zpSVe61MzwzKu37LF;tl#oB1>q;Y*)*Pv`%Zr#R@c<>AQy*Z%;jj#6!D67x@@PC`$@ zczv+IzzC3icN2=XXEUg8o2>Dbe;2m&;NSu^>4iE=-1o)`}kxrketFw4x|;gyFN?sPlJd zC=#JT)!|_u-?@RItx+Lt(2YNF?v>ezAM4QyAnDo}SMRxzYWQcRAZ|3UnTtuE$#X9O z{AG0I{Oxl0)Z=}Bv3Nm&jK7zu8{)ue#!frG%Et7SXZTZlY?qz}1yG03>*w=ws!fqS z(wNU^i=~eM^P{6B)fqZP?BdR&?n=hIb2W$NN_rP6P1i z2Ro*XaR_{KoN&W8(|ljG;nOr~<#c&qP>c{G&pc5|(>XVvM;@(UBJD#%1Q^Q6th1xV zDd zWiUQozp@PdGEF|V^oBXH?%Eow2ro0dM$-QRU+g+lQSkM27G65tB#!^RX_6)^NG*=Z zgYX*2UgSeZh;v9$bo-rL%io=w2n%_^4lK(~87M<)gt<8q%du{e%;gfJ64uC2l)ZVR z;1ZvJCp%?dsq7a|cilO#DKb>|@Gy~VEW@t!s5CGIB#pB}>~+(cyK@)#-S&f6SjAs% z6tv$RT`E_!sKv|GS>)AZ`g7)J!B!${IYP`{t#?Is%jS01K}? zH<5V2Yu_;GURKQ;rQradbJVj3<;4x{Vfo}qp%0F;6JV}*j>x=yKd`!GW&5x>cCUI& zaI`FkZq5A8e?{Ijgsf<>4C)}w(;Lx+NQ-;g=SLWat{b7tp)FWMahGI6cfVA~L|;hH zb(tMzE9(;&ghi!@Tz6^Fd7wi~1)GMwA>5GN5)($9<;c)Sd=3jHeu3ZBWLFcglzmOw z2aspCKdqy)u|VMzW-2e!@)RMNJBVO#73#FTF-T^g|rL-v3F+;gV*HH7~Z8fQ3~ zNN>Ki$$kBMVRYCpkar04xqGfcC9DIyE(bxV5QlT=@tl4vv5`;NTo3{Zo^#;^TW3z!Qwi+uet(jO1>ba?tT z#7aQLo@{0Yd-~|r@nrVn=IVXwi$1wxyexk z85k-@f2z1QqYGf|UzMD!6Wwk}pwqeyL<*hbx}XUovarHtb{@m&$1uP8KV`OAXB}=Z z4@aX&*da6vX_nEvcj~9ummu=u9&Bq@9NM@u#=Ea6@gW$rC|j543*~S}Ulu=ca036j7U?8^JTRQ(PgkmXm3RdEC5o|Z zxWOkopQ`#Z%Cd0{AGfVq=CBM;3?=}~R}cTSStQ6p@ZiZX_S2^hV0XdQ+i=?G3c)H{ z?3@8;GP@U_a*YGidHO=y)6osg@NV(hqwbI14*Orpp{(EE_hK)<9yiE1wp{{+xr{*Q zG_2ei35H!ioM#+$KffOHK_zyJfAp|9EReRX^i&!d#j3Zd_XoBU=w8;|rDV^^W`H?7 z8XM7_Af`o?=4Wp5aKrc9+gR^Bs1)dnfSgt?8*i4Ktz5H(-H^Z6$VeKwBj+i#fH6Nr z$G8=dy+(PQn;<=!9#5rw=U6S#E6PI)k3mdcWw=B8Z&L>5qTLmsRs=Vl)|VNx@7j41 zO|_l7v9EfsmTL4SPRUcT#i7DFD+ovVmEbu!=?~>KD^ymv9*B(Kq=4f);v?UCx}?9p zFAKElC9xWcNW-PE>e(;2RFt5;Ws5jRFy@Gk(+*qW+cf=+PZ@3RuRonc%+@tLqmv&4 zx`1SYJtIu|u${cdi~Mdpn}`$14?~}VK4$&J$fQodCA|BzUu`U01Yg3Ua6hh{w>#wJOP+SJDq0V84-_gy zv@^N;m>ONvKn=}i%!sJoHcQqIRp{#(U~_zA-sYP;@=PJ`N^xe##!;WNCS4Nw+sHjH z0Y(zsYPbO`bN%JvgFHvUA?fELl@nGIOq!ePvAoaWRvi1a*d;7<85?SJKP#Rm8Clzg z@f<0*D^&BJ(JfEDt)0D>UcR(Ky0m=a(GJmECy}QRT(#`XeB>Z!(Br0?uDg88{MOo9 zkYGU~6DfL*`EHfB!2Z{lKWk%eTLkb$C=0WV zXYqjPn0F$?C8`_9?z?0^G=k|-ud|&v0ax5G|0Z&wm`br+V{`|M?IFkv#aS zTyMvNDx(jHI3&{m%zgJ(UX$q*yu&!`Puo{^3Dfy4mQ3w8)`tJYi_Fv-_4pz^11e{E zitwW9JMfadErk|(8w=p2Z2%4hfVN2FG0`W|pi zXaH-3vf0Y`IT=LU3S%B>8X-8eJL8K!)$sn|q%ArMhO^k|QcrW8%?pZlu6@D=!7A0^fhBu{63FlUrB2XT6f2)eYNn_`LrD>vE><-7~bj(++Q( zb0Ad3y|cI7^ZvA>!L2>=7+_FP?afSlCiU$qQDf*|cY)`4pbS3`|5=p2dfcAFq6aq>dU~b# zV$y(DA{b|YqSqL47d+XOXVLw7;CMJ6I_kdJ|Bdq=VI5}MAFrTga;qMyuR&|gpPdu zM9sVdnDP*k%z(-@q$E|b(4~jiC$jsYt1{_)rG%=g({1DJbTLpvk0-umc^VEHgpU}F z!FUv?laD{W8G^|$8cuCs*jbsq)U8W?M`X1cgrU9mH>&{@d~*Ml`r7C?!PP4teN6vn z17Lo8tzA;}0;14&yH-v-o_(2(C7jc-5JjY$p+QPQdgxFo0VPB_2Lzi}?wT%pS6hN3{1)}N78L!0L-pB~X2m||E; zXSy=<9gJ)Rq&CDfL8jt%3caG-*cv>e+@>q8W3r6AAV+jirjS*Y8sRsgH1WjfG>YkG zUXy<cTU5jGFGq(>DU>f4uAjGEY@0S@3~XJjNPS)S ze=EV8o5jwq@y#>waGUL~5||5nb8oNgJW-OfmgfDb(0QvH1H2?eXx^%WC5mXNBNm6D zLWV%Z<`ozL`c!t$_KetgA1kGY=F`j*Da#|-jE6``Z?~?6lYAds3Uiar%qCX9vx+JV zVd{bFOY~@~JowvJj{C?2BG~Y++Iqb(Ss;NcO4v#%hEH9f-pF^$ftIfl;Tp1Eog$&# z_hrREVt~YA_i{fTU%=uQXibqcfcOoh;h|N}?<@RNMw3l_im$CJh)@|J^i@oNdbmhV zitc?_Uv7X1+HJMulv_ z##(0~UKs99YlucAQIFA~(vXxbZA<^Shd=;b0jW=-1jsp1h(-Fb1?biELvC2cEtyG& zDV2H-KLP<_<(K5neZ(rKPCe&JIMX}B(OHzS@^3X9_#1tI)f3H6I3wlQb`fbg1tY2r zi@?Iur5R*+;=x%R+%_^`ir+M6$X1$;y?_PEXF@RCL^s*;6stZ~UAI9}Tpfg>PS_hV zKdZ@60fA{}-TKK_`PJ)>9g>d>j4_{^E~~Bg3y7>oiYcvsuvH3vXn)4LMHBOl+E6ARzhVCByzeGx76~c#zkDN_}wiiVkJDNa6@Fxnet`lh24`NtYN(d+i zS7k;x*MijraRTiL^WeudK|*9&q^Zq;GUS#>RiYnQ5Qp;q&BMuAL3{M@|% zZH23STjAQ7t#CvRTu{v=a}#lemk-IF%%lT-g%a5hZ!!=rF44gKR-3^_%IaY^5etse z5XmkmH7pQZSS49ZMTM_RZ}%q3bye!sZ-^yK-GxMFYCj-n8C~PlRY%$6DP)4;>R2&( zt;`;L5U(yQX#0pIU}n#SKjM){%W&I8l9|_Ronz~ug!S@s3Es`m&F03Qb>sc}35Gn& zTfNMu^BYQWVi{5s7P2yV&7;DWG;|9)%4N($sxduNoXVa#npTL|JDtI z@9aalw5q07LAT`fB$2-a`77M0xA!@}(w@*Yu(WsIf$UKDXd+{X+M>Z50ahn~F{NXt zdny@eNd2Zb3_pt!`7z{P!^Z(U;0~4^j4s!Ab#sbL&2U>$}MD3?%2mU;-7tH@X zRTX9a0xRk@b1G`{Pi`&X{K91(i^-|Iz(ImNoVq>q!<#R`+2#fLufrqxr(1SCrq68Nh{kt1b*N; z@x{)e0M`3Ys$7TEx-p3X;Y&XWfPuILqFr9TYXd_NZB`#5nEKmB&s3`yT!NxvZ; z;oTwD8m#wQMKQ@RQGDuSNGhL1wfPR13-b%f0WHd*`qK}z3A5IuDGnOn^Z^Y2TNuN< z43dCO_-j=({w@fKprJF%_sw@J7evf_{DX;iHi1@al3ItW;}5M&xM8tB*6l!d!i2DR z$gZ%~{E_LdG@QhABNE?iuPJ-+h?plOd>I#2qXny@4D^wC0ZDSC3)RO{SM_(qNRW zj=KNl>HRN_FAJW%X1$a0lWlCU+@YJpcKFE3fcW0A%nR$qb-*IC$ULETTIbKp**S=c z39K--nWDLi2bu&U^FiW+nHq8ZLD?DibRRT*@2jLHYxp?Jq>d@>T8eb8wT{%JGcV&z zoYBtyvV-dP$_?Bh;0e5j-R)J{E9s1k>u&9Y4#tB#0Ud9=vG^(_! z7=%v0<5KiCLuzkE6#Aa{_0%X64igy-iXxN-iTwdq+l5%l^qgVE&G|p~A5kgd(atp9 z%c&bKKfU>v<GUmpQSz>q();S!eq|{NRnUjCOz5 zyvy~RKz`f&<#t%5%4YT$2NA?lj zgU^LNlU=qnykvMoFfBaWBd+s+$8c&Bt3EYsw_V}ZH1B3zrxkBOB9jkNE{)ap`~P<- zil}dg7V-W0wv0vaih`dpKcgsUB)>fuPNLVuaiv=C21w0yIM z-{yVm^#-&F7xVRT^+r%c(vkO6x#Mrxs7>W`m1E=fW9usbq3H>AAgPDs6cXQy8dA%9 zfYN<8xwlBm*Ye z_7?Uw^s$J20V$i6+nyK>-FdXy7)Fp$r4YX+V2w1dX-%h8V%2ip2lNybo-8s0b!9*7 z=e_IYG@NXe^NHfArwiI5_*#9!5k8A;omO`_T3an4zbsE2zAi}|4c6^vpwB@xg!FwO zl@i)*g_~c_!p+A79ta9MC8q=zfd~Arb0JKUHmncpOcZhRn5 zdYF9^bpK*2bLLt&cDk#zclxxK&JepU^fFO2xqr$>07zvYUT+_r+@hvys zp+tl}Z;x3ERnOh+t;6j`3beKlYdnk_*5a97`w>?Qvi*5)sNlww{3o@h;n%-4$?guj zmfB?Lwre#&Q@tdlluU!TlSsK_?l-)fa7G1DV=1W)l|z1K>)}tLA3~C$9{2__KEilf zf*-jugWp#1y&J3NLY~2*?TNrdDsg4DY#_3ts)k)#wC#=YX^P~IL^XKp&FB`48T`*Euij=!+jf#Sz>Vi z;EsohPDj7-X@{J3h|Z{GE9;rRxgq@MpM_9&7A1)1fBlxdhJ?5FJ);IDK4+vpM0L3C zw0**_4g8j(Ay8+~dH$u^x3E4Se^ZZzsGh0Be`WVpiPn`O-cJs*4qHTkW?SGTi$WrF ztj6K?j7n=i`na;O|wWK~+cgJ&~Il9W$dRkkV!#B){J+?5c7?L;R zgb?L%{Vcf8D35#T$Iw9gAgG^!A?NZRvl?{qJzV0(o_F9845JN+?AY@?kr=E$t`l?@D%xC^lH#xXt%!W%a~i_GY4bSdy97Hs~mNKW*4Tx8mk7 z3H!m&Oh@h{O>2UTlI1=%PTxDnKfkwe^y}yMaM6qwcp^4wzasnU`u1=?%=Dz%B2=dL z^z0z_z=#zg9M}top_~>K5yDeY)>_C9yEVwu5RPKpPiS>nw(u@l0-5>{>Z1JV8iI2WZTd&9c7@_nw2Xfnlh0*t6E#mUBtfVN1(%CuUih_)jPW@K|z!K(WFCzbpqC(*!Zzs|rzx&VW%6Y99Vh1KHEVkp+ zCqeq4d%e{>QwuX-qpH69A91p>SOjrRgHCSLFehnnA^*}us|Ui?3}*?X0&Tc5LCwb7 z&a^OJ3tnMI%Y7PBjkfx;i(-8G6bDPBA0&Z6T*ThE#&@e!-Z|)cQGOtfTuxu8{l*rX zwG&i%`M1p_g`h3a5YsRjlr$Yp#65Jxe%$@gfgd(f_bsSbSzoq%_gQ$|Hq`>f7+Bf0 zqW8NSb?LGH3+usd_W56T%qy&jy~CyiN4!wYN6P4C8eHRZ+Q|WGwIS zi2vCP+ZtR&hS&vGs%%Nb)>$y$SDqNLEzSCsY5TK-y}9x&R{JZsqaY*uXGVd$>WX|M#*Qn=&tQm%)g?M1S>BIwgSRhtCDN^O>KPQqV>7TA6v9eT9W0hAy%%=3 zpFn;$1Ua}SZ?WBYb==fed`e4?7qd^~sZT*g_wvNfCeyXq#w$oMg_~inK9g zFX?lgUbw>60|N~3L1=CD6`LutU0&{Z_WRLAu?CI|93;Icjs83t1Cy8 z5jHe2AKq3*@)y1J*d^OuYP%>{7`uwkBkYXYAB^z?W8r)ltTK{!XR|*);G1_d%pD0y zFuklz>}D3k)>aU83n=5rY#0;{8xnmtU9Qv zt9*7fYX^NwKy~gsa!bmF1mPWb(_~FPLu_g=`3!aJ0v6T-f+S)DfQ8NkIO

nfbzv_L; zm3kUBRyAjDjAg>x^Zx9Mz2HKLtqp`Brh)DA8D{R^A zO7|y>3i}9te=u_1Nhvbo)HzQB1{S~Uc9{CHD8u~ z4W4y*(LNw=#(X{A;cyUi<7i1Fa>OP0eO`f^qJ9e%lfbRGl>YVT1BE07K@yTqAp_Ch z3&qMcIp4>XCIc~em=@LB(B4cC&boQD91i38O-bK>+I-FQo6%pSxC8$ZqD-@FLTOyf zx@0upmA^RvYKYC-eg=Y3N8%PED=d8m50z`t1jmgrOSSY+MC)?@qon&P$+c4>2| z2bi%5ETJrk22L1A?TyG!B;crJNT>0~R43y=!&&rnA;|5|${IM?l^45ob|L4#V>b6& z#)5>kDxdz|rO|EIx@^X^n-rVwAZNiOk&DpA707Hx(y+^12j>x^tL{g$)H+bq$c$g^ z+8u1}h-;q};@{Eot4=%$M)ZfyI|<$CnjOrW@?U}!{<~@~MC|GDwx9YROxcJB+nZl( zuEy_{*_;J-Kf9T@Y=0c>>06J}%FSKf+%;a;%0eEd4D1B>m#Elk^fP;KG`VF$GP~r6 zI}-y1aV@lgv7?t8Z~V3cVn^dd8Jg0?&W%VC7x88k7o^L5PaSR-Ux94d(&YOc*Tym* z|D+@bpdO-Cv%h$r{tNpZXoIl6xUcDS%O$4qv;-)F@l6OeI-4P71J@#WE1exO3xyZX zk@HNG9flKv7Sr7$>LU(>utjIQjNXP3J;E+;0XEWg?YWUQA8SJLsS`IVn%l_EpHha6 z%D#SBU`H0^w$>qF6#3KJ^Xv8GPxg~4u-w8)<=cbnnadMpQZw_zX?XpWeG0>UXkyo_ zO&|0xU10!LtMJn5H$rL}`2!@?yG$P{g!$NETKiOv&1iWAM|GFBm4Tg8*(hUxkl3sS zmJhDlq}9edqXi`R)n9jHdu;K9psCRe+(vVpsh*dG+^$ctnn=#)(}V(rynncoRsa?= zBx%N$_X$EX`(tOA5*CyK*$m7ksk%+FA|;RnrJq7S&bxS7>>^$vd=8dU)Q&WVXcll7 zrCuV`&OXBWV5FzK{S^2~`9*md2V9P&RggwpDYxd@bV@eE!hfFUlIA{(&pdBuaGEfO z!!apn@tsa)vG#t!BeS-bb_HR@OmXu}_XqtPm)-)I&<>OC65Gp<*8 z%TH(9QLAa7jCs$SRxqeT@3MQ`V+Pb5Qg`BZS%O30zAC7N$y{&t zvPh`n3llsJog9doRKgbRBbYA4d2uxGc(Q>p@Gv7|qO}I?_C1YM+f4h#{xb8;Q#?A6 zzLHS`?nv!MC-g(ufgMD>{Q{eST?a^y4V&b{mvu6G6T!*;*ZVvBZSx-3=WU{v)yA}^ z&b+!0_^`b_Y8W6lws~+du}{!xzFbj5a?*V6O}-!X@fVqLujvEM1}(_0NzOqB1RIwv zgLn{pNQd3CxGue1@@^LL7F{QS>|nF5%OdcsH8| zn=GHQKOy&$wTEj2jBqHx=>^x3|H>czvOtLAh$o5CBoVX6!Eeh!P{;((Mdap(1rc znhv8TM1RTP{BJ>1$1nPmfhV&4xe*TxqNTS_fx&w_r7!fqu_O5vUWc`U3r~JE@6Gvd z^ODa62cP|VLY~AdzIGN9eEnhnswc?UIpDGcdCndz#BjN=gQb~2^UlPx{j!r%Jr~ow z&ndq@4d3?vHFLBJt?a6 z$!)t@ew+y)+ehK>&4>vVx|S~8)1N@S`B2a`jgIbP@oRmxUJQQH!+_4x9S{W%ou|WyK4ZitO0k%t7DltyP)!we99I zUEg5Ht9k$3PUHdk1EC`M-KWMo(po7gvlo^7)jLVmYLIvrKaCed?E~e(*WTqT5)5g+TdWz}*>yd2Wp`a(g_qO!OmB9mJW;)b6=U_uk|64F)}q9h6@A-#P_N)Ga)b$9y>7#EQ20^v(kAi-pl`6 z{Fw2xRjqVOKK-1|?_pGIB4@hNhhbPXZ2(k;`T$ ziMl&Mjh_QgcKQdl&uo1MPt+%mWW&R^eKm$@i^sAp`@zV~d;wxQV=im!sfXV^z8GsBBf6!bi$6QvS`t}w4z;qT| z-Fk-xI^1Z0++a(gI(3^53&0XQ@Lh=;FQL9R2UK+o9VTA@CQeBdy?9g9y@%CN4`gk= zBYu@lWJQ%`iq9o)avzp6#U1e)#W7rkJ5=ya1TB(?hxvC;w3#?JVONoZTi(eH!JEp~ zb<)A6=2IOjROHFk^ZA8{o1Wm5`Ku?TBu1MV%R{-#mmzlFcq6Ytwf*aW;ZOed8%6*I zw*|nk*Iuad*GwMnoJU$||Pg?s|=4JVRjHxkjWne;pxWN}~a&`hTAJYBR5A+N{~5h_mf zF0xL^qt+~Ji@0?YNRta3B$l|#r6FMTJ*xcM)QH$*M-{sGi}}onK|+4L3uvh!K;T2 zm6n1C)i{dnOxKpIsIKW5{b}K^q#XK5iCpo|FJ1R`2NnKp?kI*~z&VH^RNoUa z=!!gAm7*_c%pDGn#^H4?9qw5nCk{g)sptOeZ_A$?e9TrJ6ODV zLF><6Fs(%7ghjg5ta8Px>7!*e9Gj{DVDjhhp8=0}2;%F_o_JsI5d112{`5(&bFkAh zb0fwfaTsUjjp|TA*Hfo=wJz0y(fmHR34BfB!jjTty&h$aL7L+#A zjuG?xl`Zh{N3_l~=Mou1y>a=kojD1_kbEc8wmbXBRx!M5nVvRh9h}VkJ6D&6{X~+% z4U8Tq!u_94#0xt{g|alr|&5#%zlB{rYI|taP*UmA}ce8e01` zjtSjoZLzHg^^zyY-Z==MO3^EA2XG`h+YhlCQ3V8{ju9Uo`ru;fW8C|{?FCOyz3m_S znKUe!yvQw31J^|#)K)pT;i>iN|4vV9nvKVw)pM08Y= zCMaPm^lvlN*llRm{-A>)*?={^Advd=K$2XNUDANH!d0ge3!7TiD;36@2{?fwU;;M3 zeyu7?v!1<;biAF|PHavLJA8~AtypQ6G;g-Jqnf~#M8hR)@u{-S1nBlpBsqCcVgBuZ ze=?B8-B#AJh>l5oBY%uReywXrYM4ES`OaUp^H^Mba~a4F_G!92aJiWPVdpmwKMd8~ z>o{5sI6$M8{_@^Guqg8}Xm!fKl$GW>iLtE0OEuO^(pBGnuy4nDk|;1Bjw3!#c!v9z zc9}zbI?9TP7SfYchB#uQ)~RWTLtbD?RuAjT%^jGI|Kp4_0Y?`9h1)|W9wCdFcaC%u zV}repbk00NW7j+1Umos2voh33#i8U~Y2FWdyLClukLk!#<>E`x*axpq1uq3}w2cfj zIbje5n^Ca*^7m*#b-}t<#w8*3M-l;MlR*=+{on{@Q95 zM43EAIoqfDGWUMV6$Uz-<(w?<$?$cuBF&5+9g{P9fG(QRkLAR@6RMxF18`le-;hca zf0L-!RmpmSnf00;2S~OBXMTrCwL3aLN0_V2@k^sCapYUgeXcms_F3 z<%BZv${TGWIYS!X4L>&IwA9s*5ZSx&?j+H){oAzu2EN!v2F*5M7!ORGJ zcb%#pai?x;g!1J-Z_poi8fyE^yU!#eZ#ldlqCfCa4=2^OcJIz53)=Qk(}nbTRo zKx3EK{VMqZoo_`5qbOaVd=$Q#|A|LT;eDBJyZeB3q9o#pe=sj+;$ z;q{96CoN(A&y6pQU#}Mqz}1C}xg=4T^Rc7>rOnS~UXy+U$kQPIl!O>)6WV)&7n?_p z(3bN&<9;ojRT;G9fc-t~T(*|YcCTaQ67o)QVu476{t#-B`2jXEjPJiskXchYLPODa z6vca2p+H9KEZzjn%eSQFmlf#Nwi)6e`+G7QV*X+sWPuWL6r^@JW|$`E>Reg+AZ)&f zh@&usOS6zJbt~YV%P}w9QhA=3F{Lu9P^i{(&XB$km(;)*C|%oh{LI#TH~f5XP~eGL z$s)%ZKA@Avhqv7Tqr{)$-4EnzKPq?m?f&fD9}Ovl+{Xs*X{Uy%F8o@zaRa?wQ9uiV z0sjy;Fe`Mdz4o(iF0U8x-#R`={_P}4T8@#fIBu+sjtTr%P3*nTgbD8&M-BKAia=A* za1omfwjSfxk?re~#jSCXwxI7JxOPZrCr89ceHwwOg+{Kw>p#W@GFO}S3*G1)omEB;V&q~ zilosG?Ozd?Ar#11C(JcDfBsUrh*uXXf$>}l4#zwx#v@PB?!B)Ps-Ck3KzM{E$fQq9Ne1I&YC;dOP0YLpAj%&~GsfmK_f5c5S}*c3H)xIW4cLjDDD*-}y?g)r{>qF4pwdRRb6q5HV+>s^9I z(C5_^M%i$QNM}&t$wDX+)S9(vpF=n#m2upeg9IMh5}pY88X54%dFZoKeR*kvLg8{Q z)zwOZR>eUU#3I`9u;Whbk4>H1JwyLfWc@AA$vd${rN8*g-#LDKvOll|<{i_UTx@%{ zo4WJO%7}eVq^}_MaU%n%0n8qJ7Pn&@f$*1^V)!qIa*IKdqjyAb-%k;{0c2pX&eT$| z<#Dghm<6Q=jv%Xh(z2ya(lNj(*R@?=M3}6ohCcggm+2I%H`B#B&OeHA%Al_PI|KTM zo&eNBDC1^OPu;g;SF1WGB?AMo(Q7tm<(Z4_y)Dx(-Tz}480 zKItS6s(SmA<8Skxs~}Y9{;Fo7P5He7w@aw&kl?~GFbuTp?zLAC(L8c)iP5}U6*His!24r^$yt5g(8@Bqc|e~C^r16uA2Nh+(0-Iv zz#9No4SZHq`b(Ay2!&O5Q?z*P$`=9Q-qG;RyeGE{f#{R%u(6!PpBfo}w(9OQ{5m*PX=#UGvo8l|CzpI7dIlv(}0 zsqU-ldk+xMFoDtalV9BLh*{h!DU?Z7Qa#*1%oHAbxaowve#psS2ovI@>vW$b1hyc$ z?+g+t^`>L!%%;sAkEiRGP9g+1yrS;TzQ1;F`6xU5S>wEN8LO*(n-j%`4zs#%>Wt>% z*6nPqHgX?*(!s?KKaX4RRnzZlgsM~&T;09)M${@G(NCW>w$xA7@!#^6Gw>O2H`9B< z`{C+Kp@P}!b{QXXyGFuWGA+J+TX_Z8W= z_2SpldZ7y&W;=d{f(v`ni*Cd3`*(Y@^^VGIu?hrCdfAoP1#*e_QCltDXW;miR;-2> zx%>2v-O~0=QFf4@tlS6=sA?j7^fqgDKZg)=nY}ZMohBnPN+fhC6(vnM3WOw-%7wRe za{W!TNMrq^_xgBDZVmn9oFfuHweP#5uFW82`1?c;3b%F_Az5yri9%S`LEq)kSxIZysg2f z)@XX)m}Y0nM$YT}nh_)^q(6hEkmPOMtHs@y8D9I0`UPl*rNI(wXm@Tdf_J{P3VH6& zyWJ@4lNsf+eOJ?tWF9q+E{~%$4`E?2aVf&P)xTf&&U+%>ogHR&kLXg5sG91?5O8#6 zoDd)nq<1LvdORX6t~Lxe7xa70#TyXXSE8NbscqX$n;YV4c5?qofow_drQV5n$|hYD zw`^xXBox6wf%3S!Y{%2)mXBQCM@Z(Lt=kj;ZUs5deGKf3eWU?cX-1E4faKA$rNszl zbFiAxw6C3O&-;~rs8(UxI}Rzja)LzSB!^4V`a9=`FFoYm3 zDi1L4rt3D2Vhl=yRjY$@jnZ|)x}+B$MO*~Yu5=>DT|0paxQrbWwYzJFpJ9DVSCYm0 z(xU8{Nj>y)rVTG*Z@0ES1fR<+YSw7R67hqxejf0f9S=`rUsuBC)7S3*MWwcka^<|s zrdu*MaJCfiK}HotKqk&lhey$+pXt;DTXc%)npPa&C&WZ9h~?o>rq`PT&bb0tgZ@V7 zY5bs$0e`>4X4gafP{-yiDnG-SH6tmqeGdJC%{u#ZRZKP!8On^e3eI*D-y^(ztWcI6VT@&t>G`YmL^iEds887(;Mkj-y~Dayo{av3t~R~ zR@$s=4=Pvj*0|$L3RplhM%+HbQ2GwgxLaZyY6Tqj5|H7NT^i0W6`f~*dj&aNH3H5uS<9hpXW=lVr1T3Sh9@~%l3HyXkPt(#HqQQZ4z&ocO=4Y-NwcX+gbY*#c^ zJ#Q;STi{M8ZT?n{GY4lr~gz2Hym~3IZ)g5Z&nWJ&d9wknxP6hG-YcIWM`A zA^hh>S$?mfi(z;9c>0nOCGY#jPo4P$H5YAa%bfg6c&FS^pzyimF3sHMsxD&djj@tC zU(9{i6*qrPJ$G{Ps$0ju$zK6)R3+KA-)1B*0;tGuj{VplS)EC^04z!D(#VeFg{H=z zu&?m}tFn>V5g%ey3#I}uoMyx+3D0 zlXfeI_r)eT93Y*o9|@);C;Z2DYhESMXrjcpHa~pk5D#>U!cuu5Px%NJw9~ylTJnBT=RHW58b$0}W7~)UzKQn2$MAOrc3P9{EI)hVVDexn^rQFD13V^?3~8!N~MuIYdhO zXOC3Eo6$OFaln1b-q`IMjl)oZh-51DDp~4}*Z=|AhGw4ngUiU8 zyZiWQLVpyUx>TK`08Gvs+2(qXPZR6=g{G0$QA{7u^WJr?c%|pUFb3t8&E|(9_Q4RY z@Cs}~WHgVGD0jna=dF|pkAg|*rf!rpWl(SG*;IT=4)FKeie^uwOYd-+~%v{=iG1aR-n;QWI=i@uOaVzGci^Jq35s zDVF+~5l{LOX8o3j zyjbCjlOR;<=KjF|7C)n(gjAG@i@)wHkWtm2wdtaK4axdt2-KT{2yi^U`H(p5FT!2O z?K-4j!%34Pc17(q$=8y^{C@}!b7B{AY*4&d!W9i(oL~NYIa>7~fy6!I3ST2VW7;jj zeFF;3dd+85iSn$qQpaR-`|h#&qgOx>g$eS0-G z^H z=>I{phWOz=d_^An$mniAer&`8KfCZg@3w=?x)&Jej;yjW-Wyqn{2ID*!Ie7IV-Wf8 zj>jPWAuzz_OUt%rWm(NUst-sl6b;~LgeYTY>L^s7Wb68K@uy|wAE7&SSa=S*fKa^141o*fYVzpTsr^5) zj=@1eTO=Uh7xdq0^#)q~x(-LpUg}*lEHah1{X?lTtrsI7Rig8c<(2*^PVaxI<$O4@ z_Lk6zgc5Vlofm+raI=Jd%+S!;YNylv)qDLLIRv$kdX0<+M+dyBkdFnzb?bBLSv%4d zM9bCj^yUHSIl3~BsQU_CbHiwYhq9bbd=Em1H%z)n{IPsf4TQ7t<&ArxZoE)*UyA{d zYZpHQ%!l_IPLiB39>k!N^me&k}%@*RHLz( zJ~89yKi58hwXhA?Z&ucuBCnSI*?={H!?(Smn|h}xY?}3zAcEr`mo2+f-Fun!_yE!0|&-I-x&A2@Xp}z|PDShk>DKLN$yXZaOZv@Ai zxU2F~&Pn({w0zq<`yRS#c*Za6(_cQ$vV`MF8pc2ATc^49YB8HetVw;n1u4+uYa*&T zW_-8>!v(+DkxetFdJfH?X3A2Qj{)U(Y6{_Eh8Hug6)1%@s@*tIwwChq)VoAJE9Yr% zOoQgJDHi70cUjVAsjhXS!Nsd%kL4B+d}MaG3I1DkhLc{?P6OM`JLWN!GcLS%4`ZM_ zw|vCK&p{!olDw=^9fXX6N|*a_)Fh3Rp}qACfS$n5pd{wGr*ox$M zep3X+1jXC5o^=q&{p{OGfK##9%a#a1>!Tw!;}P)iHzvs?tE;=O32K+4rIQ0sW*g8R zBdT?+Y0E;-d&GtQZzKasH>u{1rgfqhJvbs)WKYkVGxFCyNyh_OYKVMI4XvxwD(Fbc zfmID@2lpml4(NEsJ-Ey@X{ZU>8_D)ZkOlJ}ZRd1Q^W) zurZqx0d`Z7Vl0Iu&q%2HFH$-|%aO;^^c8D)S6+cg$MDYz065M!kImp~bdw2hx(c{H zTbl?_TgQv@|DXP%`J9x?Kc2)J=M``j$5Ww4g)oWmkQ@7$k`>%9s0C5|HgIbhH!w{a zl$>v&jXcdvGg&zaVgJUq5OMK|GnRAYIf%;$?^(A~tZgtRe!qdq^eldg+x~{#Vjq~y zzz@uAW+>$^$x#iLT!Xu@o>Duh7D@hBFzVjI7OjSjHkQJ~Q>a%!Vf_~GdvP$r7(NThh|y0vXWW(`)jARVJ2Ngw~d z2dAlw?c4jvsK>fpR@v5+5KJHV7_hOsO#pyYM;wN?A@!E=sZ%Y8<8lLU(Il-2(-{=cmGqnxl|qQ&{u+6 z)%`8AwsIdsL-g8s_0*M_M$BH@+1Xi}ccr|^^rjE64FCL=AppQ+^|aK0*Tm21VAKXb z?g;;2w@Ux-rO;7Q=t5(0>d&Yl9{wu8gvqR`c^ zDa!U)0SD>>H)4`MhwscbDwVlybZk2hGk{sijKq=AN`-=-<{qo0y)+pKQO-^Z0%FdL<1zpEOc zg*4GTLvcGN0JPow_O&2iD@{UbnW?8}1H8>rqlPdMgw@99yx_8c4~SL`9VN*v{2_Fb z_OgVBB!n{JX+=}gP1uStx@+8a!nto@XNbxIZo$(B*-AHzV(v2za%8yrG&=wb#u^CI zLIVOd8lD^}2xK4qoViQuQU9k5XA^}4FSqHixzp^?QhASE7$sYIXfo_rzZOOr zo3YKvCNil}^oEbu=>9bsm+m>v+zGH1Y;ya@C_JjOA-KN7h(IxKJB0!!7i#}KfzrZL zI|dac5|(FT`=?TpDzy7PLdqij8x!^)u}4YkVSV1%gg-}H95@Yp7%QG}8#Nty_!uzF zD)S$}tXPHy^l&NM8c{VR0{6W^+Yl;Ci1y1>9%ukSv$V{#fA&|u^W%&%8IXlb>uIJz zeS6D2DZue6e;n6UL+lQUTgoU_IetuGALqcE0zYNYAP#Yt!NP-<8Rm-z8^fEzQ4udS zE&s&?LIy_&VYO(L1)nh;WqlCA9Abe)s4-_1j-#1VzMtuE3s9B_-7dm`4mkdoI{Z58 zr-ZU7Y$WlWV&(ox;|Ik_fLehxeqB=dx^oaYAZQ2>#PW&Xb^S)NLLK)n<@V!Z#%@T) zO$g{lE=Oo-2KK>q_eL&dY`osLsEQD!zh%?-{?O31K+W8Q|CxR>^Z%jgE7+pyzPD$Hp}QL; zq(P9DR8l~sg`uQ-7(^O|R6$C*8|khQlmVr?Ym^#VKuHPVJv_hv`vKrO=gi(~uYIpO z0PxeTqiPbI=hHbpGd zx*QxZ5|5*C!}ig*gJnB6&1*xrmEU9^kvBRF~!4LFMs5KM$kX^5@j#^qkECAA^d!g&<1K4IOO~iv|jgCF^Xj3O@$$$iU9Db;^Qf-)Ov( z^?$~v3;!S&=6g|W&;0S*ZaQ2nC=O^C{`W1Wp=J9rZ}4hi_rNTQR5B2k?tW%e-oIZC zGZfUXZmt0h3)8!-XIW2ros!#q#+C49qF>SUq9$)6JER@h^+Tt}bg`WXUD|>d|KW>!kwz9G6BSi% z3Zt#}{S01WYX}vOOSBFj8oa`0#l64y523kIq&+UN zA}3#L)Fb!>Xo7@k0F`HLD8AKA{^jvmw7#@ z{vs;>Kb zk)6QP1tMHry-z9P^Go_mT>JWt*Rfv|zw~rGFa^~dsk!q`Os@wfD-KGcN37WF((%#e zn3Yf|LEE-?(BysL`1X=$Q@grXWPXV&&zIG#U3*yB_5Qr=83XlsohTUK6n0-l^VgKE zx*mIlYtQ_sKt_I+PR-Yk%R8sm*^)U^{kHMAHIIF13nO+h4XAD6OR^1sq@sf}q*SVX z&xDZ1J$;XOJK#x3B)zRfyP>IX`bXDpgV`>Ozjbw~mjw0&%>``*o#zlK&#fxLStDx< zb%x5RBsN|VSXQQ{#&7-UW%8)fDNf*hcej8m8+jhFkn`f{rPHW~F|4|&NTJm)l1(pR z;*oaE#}t!i%qeJ>s(RI+6PZ=m;X0D}g;2Iz-qP{RUVtei%e4{K43s;Ac^M0|i_=nM&;*IT_O zFIVnmtLx5=2I0`}OUULw`I~u#&wLty;U-MNq8;hmc#8EjWiHwUQS^$8D4n_@wems8 z^Jk2s^f2m{%}0E*;7Zd45Q`4RKFiI>$lQBoF+fi#PDEOA>MB5+dX1j=%t$Wu6*JLR z5HTMf2Xw)HPP97R;A>?rSvndh^g{p+ZRUXTI}BJh^Lj}_Y|!pArtrVg0Qf=`2TAg; zN=ptp(!ENwv`xIll`#Kc4*ii2StsF2*^MPmZ9D2NWdAtT7y3H4zZIKfDfE_?`>G-vT}!I4Rkh zl1l)Ao(IK?81z~4$!_l(SUUms#_}%pM87q)o)Pso@8p232`qN)aQwfDoUIQRuV7q7 zmc>rw(jN)s+Y-AM(ORpnLdC};S6(VA#1XS3e}_%i{~>tZJz^Tg3?#X5cIbZR?HU=i{Gi4&j4uWbf_O)tFdbKTfa>z~Ywv3c-@+7aeWah)#Q0_OC&#|ipprn?Rmf$!Adjf(hj|UEn*Ou zsbGgKjdG^r{AGv!tT585iG+|$Sc>9}-0ci6fLNtH*3II1-1xVXsW9tfR%upFR#_NG z^6^2*INRJe5?hg|FM7Pr;f5HsFkX?aSn8wcK|Dpmdqd?@G7ud9*Ks^Gp1Js*Ly0F) z(zG5V_DzQo{gRj94dBxZvqyp7-^Y5$@~Z6?HGF}~_b>NZMT42}Q^7F6hW86$QjuD` z5O8O5b)&;EaAU%o0VP!KJSORHq{qiH*n1=vcbt(lkci#1S>2+I((H;pW;O8I7YOeA z*AyIAK4+F@I6uDpGM#~S!a8fHbP4nI(` z!YB}020WU_N;u+d{`KcEd!cW5QSEyvHA|DlTbeDC?)@|iEN2kE@A_Xqk0o+N2*R}p z7ax9$N;s9Flg=SjGlF{dh`##%jxhPIyCYe07RE7=lkEO6I)%`$BNQ#l?*~n^Af7JK z302*{@1DbT^aLzQe&%z9k&|izX*sr$Bx9?@mAIs=|GKoZB+f^Ehib&toZzf_kng86 zm~h@9D$~ON*OS955-e0&p*2!-FN9~a{8SRfi-Unwj4RTkN1pVnI}XRrYXc~V78bqL zJ;jErKwttN!(*_2Ra^ahSabcHoiY#xLY}b`mpPIYFJ{GTQxsq1oDRM{3GnMj^-5!c zB4eZ@8{wB->~t^gkZvsO)-QiycaP^+4;3)! zpN%|2f^=$Z&NOKLSp@7GPC=l{#Y}%YwET9C`}csM1#q_qT;i-??9xd>RjRw7No~y~ z;`p0hTu+Q5z-ZB$%H!y63y*#S7dB z%XE3s)dk4o`HMj=+!xvEhu>C{&xtwRLIOdG616$ zLTq#b)I^%xJvD#g(A>)0b!56Lg@X1Y9j(Yt>1Nb6Bn4oXxgO=<60?UFZOW36OUJGB z`>nQnph_mw^!u9ERpCb>!~T$#-0}S_w%``YgOB?37l|u&K)`kI zmWVw`p`eRxqmT#AU+T{HY0ejW0MhQ4ZqdvM^ zdHzjfOAzQi0L=#{J*@WBj>dxrEg}|yGN=OIYwvxdg8MHz-K+Z1rUPj zm0fATUcOR$x*8kyj0{u(HR-c4x|&*sqmduwIj|vlEOfJ{!yqL>vIj=}OAf;EGh4d> ztKP?6OLy>ZKJY8UYX#uB)XZ%=ZRLa5RJ+*eLSNC09R8hY#lqPR#fSGt1U-|Ul|W(? zAv{#kMv26tQoe}9EWGYEBU%+lLKrIQ_?sdj3|4!8jDk6;5ue;$Hq4^+ZBp}d|MJsB0!X%=M@Lm9VZ|gWp zsTB|oSQr|7W_dYz*@nU*N;dwoYiL&xNy`@_(4bCI?Zq#hj2eEJoQD5Xs_t0KfcU3d z_#;SO<6buS^`npRWijNPp7AU5-ar4^y`$Wq+?cNmCXmvT`lS4FlvB+x?g1W^R;b1Vj)Nd*b}d9&l=UGimOQg(jJC+X$Xn;#cnVWFQBziVjC<2V4S=73 zpl@5%9Q2${L73svD$)F1+k9E<`gz6A>)oUnp32rfhQ?FUTa06xx!+flf|F|OP*N5W~vNr_tMdmt`?|JN_xB*xyyx;Ed|jDvZ_$5{m2} z;kJ>Rw4rVXA^Xd7=dgA9`V?PseQ^UX9a)zM%|CYnqx6_KNvpU11_mi094$la@7kT| z>Ck}|EI#uH@T(8Q<|hA0rLdkoYe!jGEOXwCQx6*sh3GL2&o=NKbaFY>NVEVyEPq8& zctj|tq%h&={nxSzG^_66vWYl??=jb4%Zsr^wC_nsXX{w5%asx>OfOdGdI*V=OX!Ho ze^2zE(`oZa3G~%_E$R+U@A`nEU0N8rIv(YqAf=8EROZT{T;&0(TLxe5o3!&yhYX0-8TTJM>mq<*?t zG)M$D3E)!i%9#sqMy|C%nqm$^4jnAK^}d`Qcy#W|xs zJ^uSO#hP%#)k)kogd$qTEopsrS!Bm#{=wCDW5cq1r(-DR_kAzgwM{MO3u;5OqNxBAG>_|+V z;L5K!_y&s?;n*(7-jK75{&@p>5fcBqlffz?a>GjZO}18fC4)Ksdw>;$P#xr1fX_D{*f+2Yeq`5%Q$?w zYX}zfyKS9KcoeZkLW!-7g_2*oHif^q7*52Pcur%UhmqYgU3Ig5PHxXqPy8r>ctMEW z)-uu=3>Elsod4tx;}|%HT?=UCMOKW`DZfZYehEzv{>WHpcE_n;6rDB2uj0L)h!2`qE4-#MR-h%BcW% z2a?{kuxz@CY2Bp5*9eG-?ga5iP!w?c+4U>~7CIGm?o4ZkYVPn&<&LouR}D}EgLFquZtULlmtcJ- zmlL=JR+k{@_`1rV!|VP(w&YN+v6(~4cbSEHT;hsZ3|gZD&Al#IACc8HmMu#Go;ftk z8Mff*Oosj~W__D54Wfi(rpj@3id0w|d8apnF96m6+!x?1%&Lm<?Jr-_Lm0!Uu`OJDU9_FN)r4~oPS;T^XppYn8sq}{%W~3J|5f7D0e8UlK_1M8l1fr zC)H66+<4^^JqAqA<*)rTSjU;J#i8t4LpskqeL!Vn{dj_(w3@TKY9YYumV=@k6MawY6WA@^?Kf z6z3;OS7#*lO2l`iGK*N#Us6%s=(wW7RL~$uIWMN{m-dLA^T1a8if$tL)C`;l++$Y`Eu^&wtrr?1X!x=JPwS86ahx79qQzpYIFk{U*O$B^ckm-n4 z5bj{&2%^SDRoK%jo%-=M)Q{{B2I}zPxBLs<)Emq> zSqRrz^<8mgT5Hr%yBm+#Te74-(o1`Hlkq&{^QJ6;UwfBr{{!*)4cXz5wa-N1SgI@d z>|lyMLX$A!6%OK>uqBr$z+@ch#iervcc9bV zHy^kYGj-(1PJQ^*IsM^?<88m=j}h)rQE?NRd(dSk5J%th)pxy%Abdv?nWb4#$q%q8gsgIWTyqSj>F1;}68Nn$*>ykPec#xeML;<^TSJ>MMUb~6a zaT|P<(kf18C#~BmN$0l8rnvJe3;DDh@UF!n2OwXrV;$_Y_&^NDOwU?js7Aim*@A;{ zD{6BLgBt;c|v!LU93V<{F;Kk zGjtl{6%C`iT-&w9=MkUBAteqYj>2HWTNMfFK=%=<@3L$*eW3gY+0^l&tZSqRO(Ch$ zV9l#f`6^v3bE#tZ(@hiLS0Iu6i4T(9eZU4|D(3Beh$o4c5I&HBlY^o(FvYwK(~5@+ z0k|>k9t;YND#eQG}uO8^o^H zmNT5QJ=|9LZe}@>BO?NY0AXZ0h4Nn)q=uhF3Pa62P~A%I{eBbLB-~(uaP|urC~>ch zhco(-;`^WBAY53Pa%yE?5A(KrAQLzE1rtvIe_VBF;k9mGFGx>Cwru?%^eGDOjG14k z@IKSC^WP0y_`=v@FuurdbCx0hG98@$h}Gd_qbOb7n=_07cWZ|USUV@%5AC`m2<)0?j4BIw z4kHmJdQtT6qprlcE{~~W=9J?Kq0Evy zcY3ybkpD~X8N(rf8@jvB=u-8{imCgJIr}|__ANj5J3U=hnA$$VRaEXpyan4v#3O@H zq~oON-}(4kPqJtcIm8xAi%2(i*Y)0;Ltacf@0WdCTamYM5p#S!5#rcED67b z)e>EJLXySs*CG;|!B}aTv0lt2Aaxp8;9`S(U1=n}REk$ce&6SkUW69WoTPmfI29QB zk%??rhxspc+2N!_gOo~O0o3dGPT}eWK417s9?+BbpFheeJdc63>?Qp>$@uQ@uyOt4 z9mfEvUkP$}ZH};w9HihFoO~u|IBVwfqv6~lAm!Fc=r;gjStEB!nOu7I^gC~ZE;h9K&;6(cZ<8w^1L0&6H2zVOAC_YWiOjPb^O97YpIhiV8$X5`b9T`Vjw?Hze3 zmU#~oOI_*zq`Fp_XXAmhg%*uJOrX}Gbq4hKcX_Yhn#miU|FFx60~^mh#9-}B5s0vO z9d0y(v+~KHm&{djTc{qKv~EoHvmHFX298@jo#MQ%QnyJD%7{z?HH+f0nJcNY7Oprq z0lA>ky9V{YvUdIVk2iyuy;dZkh&ONg;IslCjOI_(miXEBtM%~f24*u%h?6@TQ~LYu zX77vRp(EuS#(Y8qc8C?;3T(bDxx0Y&7EfWd#mATlt1EfyB^`|7?9_Dp`vQ*E(TSAp z#L#$vSvLO-*(R#g`)U32=euN}29}nkn;}7i$KN)lY7_a(`&CR{mYJ=7fNGAitw3Dl zx@@ohHr-qL+Hx+LD)lCRN`^&Pr}R8Q6I(#KZ;}F|q)J+%AB%CRXWmr^zTOuL@fik&f`~8>o9EbIfe*)$dg)4wH0b6JbMC%}IR#TnS zILN4efo4rW4#4T`x$pYcJ6w1))9p04eqJKMRu-X$n*25R8Qi>2dtlsF=+tfz3)PU0 zrWGmIG_@$wZ41H~ZuF{I`Z_4`m0q&+|y2kdyH%rD6lDbEZslr^OZ$nz6=h47y*-43g4EkTv1;g;5R zQ}lfP6Pa)Mk%N4d1`d%FnH+QVwGjOMCw%!Z-xjLpLe{__QwTKJLd@=Ku& z6SS|*g@#w?t&P`<+8P}%n|>mHXZ8w`WLnU=tP4S3?We<<Y^O8USIc5Jq6 zO8?QtBZagnpPznPGb%!bDz`mES>SXL!TAs4)qJ)wYgF1OrJd^5pQdwUg>C65VP-phr^;nuMzOI?;3VMnvUIYdcEEEcZoTvfH|z>vTGxJ zJQOc=>gT@$xS^L<2Vh4vL=~@Kaf3z|uXX2YlGuXdO%NaK>_yf*5L6xb<%H%#bJ!6_ z1T=5ci56c6GkQM5rC^l@l1g1v8g^~sl?N{Qo$atiMtHF0|BV*SJ_aRI;q!r+SJq8T zBSw$~Kl5ZCJSMGkP1`5R&s+#5H^Fv{Hf$?E8#Kl=+j? z?YX4?(m1<^`ncyvu z>Ilinwbz)S<=30bqZMqjX_(~PHx&gSkjeQQXIg@bV~v9j$Gm9Pvy-PEEZ!~dnmn3I z4cI0e=AhI$yyAa8UQ=euCelO^9C$KcVh@pSlMq zEK6__=%Ll*8&7U+Cr1SX_>#oTH2RrzzVgtqjKZQkr@-A94`oayg46;CU=E>r-sfx2 z1YTHAjsdj1*Dq~iXNJy}+{}LZ|Ek}Zd?qB+iVoEG+b&>Bw!FPG7C%V_Oc_HNU=VJa zQP@eFH-hXMEG*Wv9U=1O*L@Z=%h@9tk1`~dsRaswYA-wmQ`3M#oCSPX!Fe)PwSalmy3i5X<2BV z;vveE>2*FOh^bMnWSrtc!xaC~hHi+@LG_733q2)&d$AHywVKOat)Ay6EtM&%uY2H1 zW$U~1CD7DJ*0yMR^hl1nBn+P7%-xk~&E^>nEuqHYhSS6+JDa_p@9j5Z1X~;#q~T8i zRoO)Y(61&J#mO`v@8wvUlS=X&6f=J!ehxUP-=8Kdjz}`J4%g`6ZQpM(^JW}j#O1X7 z9Y#03(IXt}={NAqLeJvnD9yO72|(m)%Q^!lVk`{U=uE6ak1CA6RD^B)H>?;m{@Nfv zRvx4Q|va&;f@u&>)NlnS{`Dd@eN4o#hJ6p)@R~yfn<&1)rxeXy@w#KoNUsMoY%awos4Y z<1GGhPtwTB9H#t_?f5v-NvwiUwYZ{4egay+>2E7-OYYEbW`r2witPa6raxsADkVoA zk0W`>s7&}yR(S?jse|dT<{J7h@cE=u06{`A7PHkr^Xr;u9-gKbp*14?_p=WIZ|<0i zL~)9*iAn#PfAP(jj}auLIx>7Ozwk}Lu~bXn9w#5}sKjbTTpG%7mc zv$Vgz5}~)t&bJAw;LS-UsI(<<2XRX_#}SuO=`jRHb@1J_h82S~48&OA{(E2pCryzG zW{P^yhLcJG!K?pi?4hp98=utpSK;Ps@z+ehaE1RFt;zBP9Lsc3-%7| zj!ln~`{?x3MU)pC!L!VP8!5&$1uk30 zIeyM3+x`{|RlL)Q&lrSCb*1WXk`vPiWRNLVG^jUkHJ7*|s@Y0u6(Ihj+U`ZKa?IS3 zC7UXXc;QOy1U#W!t$M=RDQI&QZ-%uyd8ok{5cO^Dh`efSLe6=-N_(^yPAPcb@|x#15<^O>LAvyYVKt3d{!v@A+OQ$?D>SW*^( z&t&&QuJame<;YPuX%NTz;8DS?^l^y?A6XtbFK2g+{DlBF)oxYii0;|)7yVPoOgVJ`t{Ikm@Qx1`!ykV>F6sQTA?Q)=*9RE=mymWk-4c?H20#OMOgBy=r#Blg>(hxN0+`H_Rx#MuQYb$fbx(YYaQZM7Hdv0N% z`mK!jYH?-%wC@xE0G;HKnaF}ruV*9H$BE&LJ*R-TXh#ai=RD&9e7 z=k?}Xr%*`HX2$3UPYUBO;4DQe3n%MJt#I^9jy8{o6U+<>f9Us?uQZe!H66Wmr(P2( zz@m!>>?r-2yYJLfGxrQ$l3)R}*{fX<_qNq2@uxKmpS!Y;b?CGC2XZh5rfsRgTOP`r zqh=(Nr-@BJi?WWX#=rjC_oyA`>8N&+PCxY-CxE$a&3!#dBkPJX&=LRPxb>hU|4#w14?|p z7}YSNi;a z1&0p~JV>dm{oH^Kn#FO;gMLM^I-Y57`tjKnM@=nd(y*H9(FsZgyc9>VuT@c^5)NL+Z?Hc6Y$K<0r8 zL0cl6uc(?lA1A{q>Df-U_-PTjOsjs^_^9k5bPksf3Cvr*JTRWQUapwi(30_K#Q*QZ z%Q>}JPf{M(QsW~A9@?m;X;3{#PARQ2kyv`2bmudkdT#mq>rsiYh1o&-d9MHQ>u=So z2MmW9nk2NLWiLa41ae##Hrrj5_^*5S6kJm5xEY8*ry*n5#k959xu}no1I>?~X;Tkv zvy@w@szVcrTlMJC*FX8bsSshF@%6qxQ%+5PuNK*MLOn*3CaMUSU0!SGKiYjx;E1eb z%Y&65c#vo)uVS-~bZ+;bMK-?>7}J)k$Jw0~ zx!$+uUc^baFFzc+w&w#fkXgRTSc~(|4Tthq$*p<5uaMi_L_@KXXx-z1h?`f=EdYUp z==ViU81JZ!4?eN)NkF_D_?6j>eC3wO+j9as{cY$xpacB}eBQ;d!(ikH zlB-kiicN>B3?oAr!k;ZRKlN6RrcG_j-N3hUEj5kzLru@bDUkZ$CC%o$63Y0Kwjqr! z6VSo}PiS8v9i;&{r-3y|0h_kui2zF5n$LrLb}=K8!dHi_meM6iYmM!#8ujh?1<#|+ zWJX8Ac_KyPI&4GcZ*Sf$QXZR|U9M{y0x2%;*mJG^0CgA zBf>Ty>#mVW*{wl_4V0t>)ABsf5{ZP)8F<2Z(}442anQCBuseIp3Riav6Nu=&fPGuj z9i)rNa^(~6oPo6Whv3A2tpxbmCXrG-Vyx1UgbvrNfpP#fxp`Z)D>jU#7TYFK!6|6} z5P43}DhB5k{EK3_A{sOdohxXJSxpCYczFTdNi%OZu?OuiOC^OY;D;n`bSHwDCa9mPGG9VH^Vd{uGLGq zA10N&{s73x?2F}J?~CUj5^^{=XdWDu-8Tq5awXkxkk|Ls_aIQ*F&G^^Apm9YmLpB>8Z5LsofNuMo|$khjOprM6AN# zkQ^!siF4=@#+w3izI~p{!At^`PR=YP^4rrdj|F51ECvDkgf3pZBXvb9o7wid4i!(W z;Y(XV`?dSW7PE(eS_yY&nt{ioIT7Tq2J`rd5WQg9xA5Wlhjy5VZ?NwBZArQz3F zRXJ4CryMk%&Z4l&+*g&sn;Rn*HT~tN<8BVb1w)u4%cL_9Nqgm=ig2lSB?ovh(Qu+1 zHVPTC2T3l3oE&I3mWnT@r8r58*N*3nz%gj}R?h~GD`Wrl;SUaLER-HI06uFapk9v`H^E&tz|&ty8~)#{zxQCa|P z%zN6n%r6}dfBo~BL=tc#L^?Pv^g{9XT{ncQyXqy!7FpC)Jx#s@QSaiAYR<7r z_VNeDmopbF8_zPi9+YYstd5kQ{On#q8(r}=399Mi%T3)$n)t*&0m9|V zWjftB7PD_ZK}k=L1HDXXL1#bPv**!|g)g_&h6G1O{(Mrn(C~9s9m?MY2**L3;Ynv$ z*@Yn0cg9>T3AG|(y6g7|aV>!q_ZcT@AqjnA*z~sv^gM0Kqzvu$PTS>f<=jmy$P2J; zsMz;IK&8?ULafLrs4`+};!-XIFwH5qc8cvsX0hG;ep_iqKwARqxEj{mr4X`>e5~B(nYRbLI6RN4!DnQzf0BXuTi+&mOnUU%R5g6;bX> zWa7Q9EQ>PhSHbz%2S^aTxi(mKHOb?O8El7Rt7k~-a58SjnlZZ0grwx)99;B(nF0Cw zdH%zE51^ObH-MV4;TgPRG+2bhanL^oB#OIZ*`xhS7+bhPD5vhJjJ4a|S)+s!Tz122 zSEKw0+RyxSh-ySR>pujt`jTtdZD%kK+Q1(IN4G(^)$WE$Mro9~I%a+w4NhLia>dBi z9G?TEH|b%(A@bAX%b1$$fe>-Y8^9A6Cl$fzyQa$Lm?+#IOD(^t+UqK6MUtKILbdD? z(9c#L+RqFB< zlWKm+J8uz$YLR~Gxs^_A5l;2hk;*^AWyZF<;@i@5FI!LW`}p7aMRdw^2Qy5LMYUJT zO1BX4WU1I3699U|auQwl&L1rsZLMFMp?@dhlWPMnE|30Q9oAkXg{y4l|+Pt#8FKxo6H<@B&1YX}m%O4~B z)%W=7z19O zc(-RRHUE(&&E&WO;3;u_vLwjNZ3+K%XccQ}_c=FCU9~*NMF9jS?#sHk82+6=(7;&-pv9u* zEd0Z|rOi`YhCzb_R1b`N^wO8^hrkE89vJ5T8q9m#5=AO4hrrE8gcrb$8_C=;h(0W| zKyVyWV923EhuYOYL3R#y;-^^TbvV(8_t~3rVp&*4Hrv#c0aslg#e+L@!jrs@g0w)BnHx|r>#gw za8Z$!o13G&(Oli1-P(S+EJPs|5Nf*L@8Q^^NLE6XKoC8}qKi*RKR8a;heFaC`t7tN zx+y$(@9=K-du(e^98>=Uwk_@Wx9X{POKXQWY@Nr9E4vd{eS642aOGF3`uSPJn8ag3 zhh@(`mQz`r`!ogpPScN}?)N^CH4!lILsu^e{D6D#`^c*5@Fo2FN(F}n>oaf8*QBDb zneqzM@*0=1Wpqa-a01WzZWa{~PAizd3##rsjyS0;G`2Ey*CdZK7by~U>2~JnVbYM6 zhtOB#M2_E1-9kS7U+X-#i{k8W#L+#&3HNG;@l>~u#!Z1r2xlocQh9*lXu=@oZ*b?V zb*H@7BYnJr*zFB$0MkpN)!6n4ecF}CX0r;Iko$OXU>G}gzVFE`J`o=1gi9BqZd!Z# zwvziWkpsUU#khj)Wu^I-S`mv0#LY>JqE2c$KBLYA<$<(n81QsGp41VHVtB$vO{$pK zdme?+j-@zkUj&m`*{~pJslyD}aZLo_SfQ0YV>hHN2;-&G#-Jl2jw%-N^7!I?x;GFv zE5RdTFdyoS;o|1v7wEo7%%L^<f(*<@kZ`m234|GkgIh*}u5yQ1d<(tpE>QAp!UI zG7jVfzP;E z`K4>U3cs#Sy2#fmQ8#Bmi9IsJME1BYdjQJ{uns z+&n9aK!2R<7KX=B;k$BG`YPhTQv!Gbt2~PGLTzQM+?K&Cl9TnG*;wo>of0Cwe{vQm z^PUF)6pP=;OJ)=s{x1R%wD30Y_L8~K$!vFMc zb00>L=_+zay@Lt#Y5bkL?ZVCni6Mlc^&p5MzG}B0p|2wV&3jPjkik}!!)W(>9!Hu< z`$N^^nAH+wKYbzb)t?~#j4CvexRdp`l~qsr_Rttt4NaHpyWonV9v#QDh?~xWTKccV z!atnCu!{;>ddA?ihT;J=TRmQn2kihrBA3m}JY4^tla+HR<3$ARZ+z9R2W7E?P9m-> zEdwnJ+&vPxea$$9%!+x~3C9;w7I6>h_sA|t@k}nEL8|f&%Vc%ijYk(|*8;CczMP?9 z%w*@qlI{(>1GH>Yx=*KbZAHc;#K?C1C+Z@4&jK9{H}srI?_=P;hW#QYCxz;7eY` zLE=b&@WZ0+fD4GJgp(p>1wuUPJGQei$X~}vJeRQ%kal$)slQs2Lg3P-=~bc z*WC$`RBchfN;K10KenYnH$#tr50{A>4ZvWF5ZHm{H#~kXMhvK6!7I4uov>!~b znPYCl$IrU|HC27R+?_d{Zeq%YD=0aE%4uaXeY3#aps5yYtB}TjrJ^`JXV2^&>C!Gj zJ|0O$p0O~w3vRz54hUM#$d#9{FE3r2E6KS03%xx1zYc6h`D{J)L%m=r_rxQK+wsHb=kO$8!1;`{6 z?LMox$7bzYWx(r6l9<(WKMg#3OT2a#(66*Z1|T* zuu1o&RVeNtzr~;EhLtd)H~Ew2ee7Kh#5~3(){&Q=dG$-L`x+)S`DaA@p63(r&6(K5 zUgj#&YTYpfS@6X2C(k?uq~pO!{$iJMWhWulUXjYEmyS>umU}@vRC+|rl5w?TeLj2$ zx8Q*QJm^^y3qitN&(5e0oJ=BhAkBRC2m(f_o z3N*Bk58}t}&&8XSNvRwZYT6)!R;X+%;G05W!)0eFSN%OrAe~h>XfR0s<;eSFOZ+ma z1QbPtC%YUWo?7tzx)0HA@o-=ok%WCLcMz=^f4Z za-KBQEKNkJ@BbC`~UR;P=uI4GGoayHDq6YXWY;C@$-22|NWSA&g;C+^YvWL>rFflX5u&1ss~1W zH#DoWB{XIW;y%|b`+njdg6PLfSeNDh3Jj6XOtVLaJDU~0$7gL%k*K_%T2;j5YSjn+NSf5o^wKv3_g*y1D4oQ|*(?!gTq%n6as@_KdqJe2Cn-+}nkioB|> z!?0I(PwMQkUy|0abgnFR8UA*@gX05}{jqjdH=HNPPAXD3jha~s;%V!V6nMf`g-cCj ze*}H^>%5ZHg7lvnccEkE zkxnEJC;n+4yFU+@C!hai<^R+>C=Y)jBcazIIlQEs{TlXkR4tqB&nt}Aj0NnHXxd`{ z_EgaWsZY`WLC?n;ajW5&GA4hal&z5z)Y;gHX!f z^tIf5tNpF`h>KUVJYnbXENwVp9TsFPK3Z;wEVv1~d2w{w2bt^b-2qrUY0j&%NF=0tLjI{FFAX9!|Pg(A9@2)Q|Vr&P&1 zCFc0Qqq{w#byF=()WSE!Ab=y4x#Fim0*Y)+ z`aYuFhldb^J7&kEq43)7lH)YD2KmFYm>aq<4PIH*zArNveT91z9wrOkg_^$uAB2u@ zt0Hr8JdsQqaYam$)7WVYA%iX-0f+G4TyNoUqFliuBJ!83CMLxtWU=NlN3S412p$Mc zvK3~1y$y%mSjH*QuiU76sz;-lvov&`hyBpoe*PTy+LL<04Lk5>m~5+?-(>EY&=#Vl zVNnynU(2-?aqYk5wiZo`Ew(eJT(7-6iK|=&-D#w6WA*CUj=ZIe){uM4mIO1HV&hW1 zWzg#12FW~jK9{z@X}9CVYMMg0;aq-Ddx?&0D>*4|o+CXoGxahp)1TST`3gooLB47` zc7;hZN=7i~FS!27DJ9D7XTCjs{Bi3V39Now>%; z)17BU6%}ZhMn7rE!T5Nk9tnhv#2i2CXY*OE;-4Qi4YrN=no+xwB{GD{-ql)a7qHOf zc3{YY9B)AQ2+w19SMq-qizQ&#m?KW}o;}Uk4)B!q$Pz;AcV0~pK7g(6Gyh(>zr7f! zH*&0{_DZ;ZnY0U^)wZyQtlcpzYpKL5kt3&|*~<_oC`NEO4VS&J7fGS2JTX*w=LsP^ zcZ#f`%7N#oRJ4+8e?--v;ObUrc$mvx+rST(7c;NvIvD|VLK0#xeAKRe6&b{}zX>o* zS?5oNh+V0j=>G35RxoQXnr3y)5t0nWAS2(nCH~%j|Eu;&Hz_1a%X-Hl z&J~9pqk$9?!pT*d&<+{{d;PN5R-`0*|W9EfDfE@kisv$U6pZ+dA zBoI)CzVcQSu|`!OlkmI<@Jq2Jw3ys+rB<54Obpb~zAH$V)Pb;B6*blE z&6$%Kfqhdt367F@wt0xyXJ$M~urP^qHmSELN$SXUWL#8>Ko3OEr-+yJpRLxb{rN0Q zJ+}PPb0`jHn8j5E40`3w4*|&sj?R+zXzaUfqSrW2ya>?Gk&6F2b0HL2F#YVwO`4SW z;5vsnr$&e#h4JMfvUs_mUm`wY)Ga3RqEun_dzW))|I zlA-kFF(*Qk^(H-|E5mVoCJLnYzUlWTjBnuKS|dp>xaZYTNH9BL()a!9Of+K9R8JWR|PBp!t;-e>X#Ay{?|VQ zqU73p+R$3?JQqSr`;ToytV&e5&cMNqOXsjLG9pxYrW>6yyf5B&jkwb+dAMvak&P$rg8wbIq#}iHR!via|j+;9RLM9 zUg^1mX4037A3%I~YmnJNWK$z%usu6V)i5d$mgMxWT7p)kT*c;J%4HF~0=I7Gd3*rj zMRlVz8?#5G-_*)p-J2`*GwZBzVHT@DA!pFSTufJ?)_CPCEs3rPSaSO(_(g$U_|;|a z$U;JtSS!$b-~1l?L@0h=+!M>+*zK5fyvV2X!ll=Le5INla1I`c%e=4G?Y5FoDJsMm zE77CUQ9}ak56!4n9NxXae`trVvL&*W(QFqy%ekP$&iW?fsS6D^CnxBxrQ6 z)up{4ct_B!fn7T*I!^pzUPGd>-1*nBUSelWvu@wCaWx8>cqWnr3|rr21&kLlSnI{8 ztzYrscA~^qHJs}0x3)_P-&qtc*+;hCKxDeu_KIL+s>M0cU)>Pn(}=WbY5g13L=R*v zgwMmE<+YOfgEjjJ$Ll9-yxt_JouCGAyz@QtX5N5WKlSkHCAnLboU?3U=MTRaFvD-L zNk`a03Vx~KW8Hq%YAu|{B0`vm2hTc?)=Y>@Pd7mnvcme{8wcUdITMbql1BW4<1Ic!GY0@V2<-W4ZsKGc1^; zto;#JvYsx@yj-+da3|sTQRm1e>aBp>GjrC6rA!vI!kEWUq;SIG9$?*mY1-iH+lf;w zd9PgRQPnp>1?arIOV;lkdir&TJ1fm|Bx+2Tc!>&ScaIh>QjKzLggQjFyg8Lo0;E47 zF;iEssz!fayc@$Yzq0jwg6eHwt`py68l|Q;R@|;;qTzfh%}Ty(Q~!wFd7ejq_VvgN zyEw)xD->v6<)lR)xdS5v)40<<9a6(t#M2`ET!jlA|wj{+6UxVETCMu=^=A&V{`U|si=XVxQar5=7-!Lzdc z-t<@&-X|K8OGg6hII-i?D>-lPEUDnLpad{yhEQWqNs_EYGBkV$U^acD4TNcBi_{}q z?1Fnbsc%JECn400Ud7zUl}!Chv)5Vi+y)4yV!a>jb94pMcbjE_%_V|Un*ABcPYpr1 zXY$+2YuaYg8mhpkjZv$n^U~Yzv1tBMmm+gdzAhpNUP39Sq@7_y2YZEAb1@}~;T4-W6SF|z*cAFC+Ke2kYSsed<`T4)GGUv+b?;fH_j?rQk{n|4+)Z%mHh`_K# zTdEK@|6Wmq6!IxbrtP!Te9VUm7F4j9hUJPQMO?o>ua(EYp7|+9m=)HYlV3-=hDkYF zDou4K&f0%1P4&zhH~OKwAgB+iIme|xV)!Y{XyPpfxbEn!v~cWRZ3uc@2wi*FUU38| zn%DVmuc;B^fXU^g-#^pTtf~P8hx*x75U@9KOKh)GktiN^utN{$USdWEL>sWkNf=S9 zjPCoK=20qs^QPwLX#Z+s#{$~CEq|-3b+}D4jlZ82;JbySwU_rhkjvFWZkPQC$#LBC z>)Ux;r2lqFgKDo*pxi`6Wl2VZd;b!|=tVBHU~0f4B}=`qe_>FeyTTN!=6-pVRl0c& zz!_6*{Wi|gtdHY#w+rbXH(Fc4=xMPx5K0{gx3^^-em=|kNEmGZwD292 zq2LB26N_?8Wym*?6HJKqwe2broGiE{JT@FGw zU)E1BdEsL7;>EQOa(d*AduP~B5 zIL!8?Q+fR|l3A_XxD1U3k${03qwTJHo}}u8c+Cio##qb##ip?R#jEk$&d|E3!xN0o2IF+$+r|h ziMI~w05a3SroCnA;0r|sjj+8%wP-#o{Y{%r<*qL#iqLr3wDS5N&*?qa!z$O!LHN+^ zEfRxFoy#c)P33)5pNwVuK1Ejw<=4{Sw-R&>&DU2jO z-BDeaX<$SJmb=5CN?^8@`Jb8yzJIPBGXXkDtc;F^9kB&0bp@cs)OPL!t?js4zDs$j zF?{#)pa}SkJ9Dd#OA&6syOloh`0bOKt3r3I7_(c_2aOfwg*4D^zqN1+YWCho-My&S!?%YrEMk~Lr2kbFdRwnZOM>O%qZ;Js2% z&zV#|vLR%_JIwpH3Z8yJrIQ@ISUSASzkY-LD8l{YfP}13x`0Vp>;864SAH$(WP14S zjClFLbT-N) zId;EFndl0k102RtV+us*OVG+?^zI~=KX#5XN*J5STk7=*46$un<};3cFnRs2P`_3C zL}HSRrCUg5b^y78iP|V4vHhJ*7p?oJ=X#1_=z_7psT=+W-`Oq}-k8*D)*HD9u%Sg- z{mXGSSDt(ygPi=7OL}B@Q+E+_GR`6*?t_NX3%9ul1tnK6ud<(q1g_)T{edl2utc2p z{nlgafn0nHBPFoXVZnmZsa_J5*1=5GlX5!9B={x5D;ytaJvr;Mth-6Uvx2fD(%p%Z z8H%;ve*D?n)%GH9RuFBJkbw&&kAHwY6s4J^sdHQ)Y5ro7vhDeo z&t`E8Ukt=c6Ihg?Z=Z;iLto-79%<`M?$b8Nh6Cbqp(kL{qA*Nf&<&3&;H^Ij!e7YY z2YbCeXLIdV4GA@I#{cnGEjo+`1i#A-Xj)BT0HI+;0@3 zlH2p07o1ZRj1`9asY@A>Y*xX{xMbBf1eLd+ELiVn>$3 zEuCu9Fcx=4Huwm)j^H z4cJyB-93{zgHZE-?IlC^KlqQKzcqI7m4K`kY7ADWGdd`WXrv3K&ht|sJuE_Y9~Aap zT_$*pT0~SvFGFhYtWY9>poi!@WU5iLLJ64)xyc}Dk35^rGL&>x8ne_Kr?s6y#1fjF zZ1g{$aP--e*1;~{RiNPvaTm}m(v8ssk`j=FW{$k2p3U;n^L%+aX&}g7?y$TVaf^d5 z+Wq#>dAI(zb32fR^{~p$u}aH(;`!-ulkwA_4hF9sBgB8%m8 zrM~LaM)W$L{BFn4F9B8-8vK{m98hkx(vz_V8!fj0fEM#pxBt^6W>5-_TXFmiGZr8N z;a&4OimY{X4iz9Bq2N+`2Pc&;{|RJi%)HcCF?m-g^v?Bc%`t{+GGjF#6~Tau+C`4svmOxZju%GTsH$%00@{Gx z7-sJeQ8$Ek$1HeGtM`nMgI&vhP5J04I9gLB@D&p%R60WF2=w>viK;6r1+5`sw=>5s zfH-j}p=46w-8{qJ6 z3V04Vm)49FFGr{|NH3}2jPW$7(3i41+KJEp<`**NF=%M+||O#;Q0sVOyW*J?F`hzZHu2Z){O-(_9wjn zycXx|v8%sh9~+p>_(GkJpjqr{Q#bcese8~vZ+n4htGGF?tI^Z)MJ=$xn?Rl4q1*FBqkkA(N{)hw?lin~I|+=4#u4o^Xsa zh(|xdknkE}{;J3;6zj z7=KD1%57b}hbRdSS1UtszH+5pQhLQ3M`< zR$%`Xc1dzCZ&vRhAFdepWq(X1XTD&Fdh~BK&ONs7;SU?J+D5$JCbGHEOUr|-#3Mm$ zpP^&jd9m$q(zN$P1U8P`Bm832#sKA+dKvjzce_;>I|8O{Kha_j|2Zar0L`!^wbs^K z=HmYEcp@!qC)2Dur0`>+p#HW~#z%vPeLGVJ`#%TW*j0hpvJ%iX1LIz}tuVS2u*@C9 z6@5%uhui%bFd`5RdWw4~C9E`ejQq!nYe*E~63C8c_WP~`*ZXx6#V*~ESAA#sv(^#r zXslVCRPD$8N8xQAdxASZY0pi8qNfWB@&-~szFC!>|F8fxGmvZ_ww)Nwm_{wqQ(5uV z`@%-3qKfUr>$=t;wuxpn`g?bEpd6Whzm+|W9JaIUF6O+JZnc$N8gW?gPuhcFpeF1( z)E98tAPPV)pM+5WXeySCgmTqtxa?VwqX5KGx1TqyU(ke z^y3dSX`1&5A)s5UHm?pt>K}SsRji?E>{IMc%;&fxyrwLujR)0RCZv`=C5;wt%+DDos@+F5r^*W*ZxEv~~Z_ zc(q+_&!_bN?j1()%p&;Ajju?g!5)4oJB#D!Vt03G|$y>*sQtc8^m<>LzS1PhRKFtG$Gw z|Lzi3WMDj7OD-2}CwC?{Qua2cTHk}EcBWH$fL)V@22h(xGc14)``CycoQ#qn%X!Q( zRqz?+X)5Ou7QyCjSwW3Hv!N~S$wTTy*K0+mlY&eAKlRk!ZW;-WtY6$wthi$B_oeKn z;fPK%v`94ITMPW*rX%&e%0Qtc&D&LE4b-Lb@ISpARtK9qQ*6Vr8WSwi;-H^obTL3? zSYt5mk{e3Qa4)mIV^f+KNr*?zru|<%ssI9Pc?g%1KO2DMnIA%ZAw1ntmxU#11SrxN zat6g6&D-rS+n2fg7Yq8CCGk@1hd>cor;y-A(?2KVkVWchbqm~w9co!bI*_CB9hCt0`|M$Z+fd{~osFkB<9>hLL1BC$HFGrcMeYEHbs5%w8 zk~AOo=G5iC?D});i0Wh}qWbssfXtseifV5>6VkC68*Ir=OEdW~0tg^h2_6e;^enj? z^lylnVs1Tnr_OLYDDvq3+M`pB$ERN0TZk9M!#$KeDN^y;761cF!TtuqMMlpLp!&cv z-USu&FQo(d`m=L6AzoWR^=sicxOjLfK^w+UR9~x!P)5b692v*u3%hAQG3FKOR5HIK z`fJ3Vwxm!YDll(z_55_wlWqv<2;da0L?M@#)0;irCc_s1AtuY}-qf#E~Y*BV-BROTW1^n zW+c)h_cv#HL48I`4!THs1M;p$`Ad)!hcg_QdvLfH8E?{K((?F{MlJRqOwtTB^HCH1 z`o?mMG8REn8hbX5GlY!g1Xnc6|J^$laMp!W+E#_?_mRRCSUJ|E5REliZ7S#;cQ3UL z7z=twPhltzn>L!MRk{0qgq?oxWH4)X@fV}HoJ6Q(e!>tXl4wVykuMoZpTOhIj!jy0 z6lYiki!iLB{Qp;8M9+T>251=rfO+y^V+QETPK-pB|L*l!bir=;ZH()Mx2xJ45b$Ss M)$B@{9_Gpa0|<_eG5`Po literal 0 HcmV?d00001 diff --git a/public/weback.jpg b/public/weback.jpg new file mode 100644 index 0000000000000000000000000000000000000000..8c0a31258f6c45d67c0625848c992ecf365c1872 GIT binary patch literal 14747 zcmb8WWmFtp(>6Ld1cJK;W^hYzcLsO2;E>?%65QS0-CctOcXxM~;4UG2-1qZ7=RMzA z=g-+Qv)1(V>fY7YuBu&iRn6P-+cp4Q8Y~3{KtTZjQ136m+ZrGc@Bsk<3lkFu9TO7| z=l>pfxCHpPIJh{3Wb~ATBs3&AxRmUaG>ptFEGz^RT>PA$`RKo}FhhMnMn*$LBf-ER zVWy;@Wd8p>-g*ESh)|!PKEXg?0H86TU@)NG`T<1mGQ#~M>HiKWXc$;Hc!c+pGUxzk z==Vc77z6}FM1+5y2O0o_0ZR^t`2`+JSjmvW-VXtrMIp7L)vo zFdUW=HU*2JyiZPC9Xq^CjL9TPiU}6)xr|*G74rj` z=%OM6Db7eSiTv^)@tnww9!o&bwwq>EbUxR>h?PO?V6Y4}qVC{`kw5S=*>|i)7HKgw zlep&Cgf#A6I?%!b&Tx}hB7O(J-JN zk@yqzE`S#u${rd96RBsrmVkW6Fy4*}6dAYdCTa4S4+I{Ow38yhfB)dY{E{?b5vu*< zCPe^*8E{}B()v_QfP#jEN8YF_Y6pSdr8f-+%K8uY1P{h*<_Zn6fWlU#VZ+a_!M%8z zCZ?DkC*jK{SV3WNLsfR<^&4uER%HhX;*^MDesHSsJ^Mmgz-%r=^nrJwaD_hL!WMEd zu*kz51dD##j?*Fgb|=|pvK~u3LcS+vy6xZA)zGr?Y6x`gEKq~;6A56*L-U?dX zXvYv|j7Gcx#Qz?N5aK>xS}9#y=MEhaz7P#$AEJ1FZpE6=-+ew?r+fo+D{q-drh28> zUBI=xcnU_2|8dl8e*-|BPGfte*mWNcy;yetr1#;ixYPe}?}qTqyyH6&bHC*4Bk_1% zv*3et?}o8)t(QA=DBj2!pkWy=fGTgg;Hzk1a0ALY`}5&;#>~5$y1k#fA~b|xg(=tR znyFGspa82$;m&xfGySFHZxH6J2LVqPS$`?Q&*_J7c)4^}w2NP4mcKkc3f|#nk2`ex zx5ySOkJUt|pQ>ae_0lQzN}%xgqlIRzIwxtSaM4qm_pb)Pxw`Fc$H4Z1wp+m2z}RIb z4PD&?f5kW!P7#x8Q4?cOYFup3f#!!1ZW9wfk-kp}=&A554&>A-e8hRZM*ErXivy&I zMk249m$()iF+*T<&_Thhve$uD($%1_lcbMp<%rSVx1olOP*oq1bOaPI2*H$r0Q~#? zjxwbXD$A?biH(zTYtfHkaur18s||&H{1%h+S|sW#C3&aGHEIE6Kj})!R<@WAPQaX88%rpEavj<1#p28 zv++GYV1pAN_#mC(yR^#LC4ps(Ud-x3x3kYH`I$bX((42?1|G5wfdval)0(*epVvKM zYEaQHoZIb?@Ox^hN)g5?;ueVmsG(B(R@RjhCl`?qPI^gee3kg7Mz)v|sV)h%>64_q zqfbp9_I1AwAwzy#&&Ep+3Q~nvGISQ4mEMmpg14sf+>f#!7;pij4rq~g>}y28c81EC zxfAUp?=+Goexva$ObAE>KpRmOmJqc_&rh7m>fREG zPC=ISpZzBT%yGzjjUu@i5F}$k>O+!tl3|h0n2dH-VEH6*t9Z3?CMhxh(l#QP0c93x zQD+TC$;D+MH3I4fG^2k>mW z1$)ce6ST~wO6DjnFdJ2sW>f(Jz08rS9)zmO$C}lP`uY8y(G*&8E?)Ku5S9czBo%!F z?j!2yb1JPzmvl>1G#!|gkTvRm0Y@L2kl^h+V{2N zoO|oYXLAzH7>RJ5oqM|KNnV)-eZe$d&dQ-nN5YSq4huXFuIkb>xNHYQCY$`*v$x z=4h2Sz*ejT3G$}D1rco10#046JSNq)m9FGEc&6TgC4f)|lSRAYQ;Q-(q)9z%c$nNB z>}@ioe<$~s&d0d8J#BGxsz2Gz}oA_l7~>jW}g5^pi=;e)E7#&WXGt;Z z==yIQf$<^$cz{#7(}6gtw4l?|eYxj(Vwc%tC52X|!4Fu#C#ZLjCA}_T%SwUOZ-6eN z7I6Yc(hnOxLFrXnbgo$NYiu{ly6iKF(!_?z`r6|~nlRbf_!-q#mbT?;%L!9HsWhsf z>$I`^l)BsBLRJqN#dn(lt12zCM2=B)0l(4Eu5_I0rEbU+6!JKfi$)=KW!CMU-~{cg zjS`9uo+URjEln%5lG2<0XS0w2`uSeAn>WDEQLMf-pca@~2KWkh7%OW4LM}kL1J+%^ zh#3Y5Dpg)5(}ZysUaB%FXiiGulyW=6HzKKDpVA~*FC@$|r>3Z|Cz}UK!$S6V4C2)3 z4GQ9aY0x_`MF*CKUcDTA9RqBZfUzB_78wf1EfJXN2FnzX^8 zja^59ik;PE3p!;5N*w3a=XA;=*)76QrUgGB0b{@M7|R{cwF|}~o(f}%d7vt;dw)`C zHEAW0(gACSlFiX_SiQ$6WhuD^tuwX;tMs1~u>Wr19XH=1GTXvCP=Uxv3-ng=@x7nR zsdh}UC7b%}qw>|HP{lx!xKuLOI>fNx!By!p^18Z!?7eDdp72|wT9K5^7prpbM6yPC zfeWqpr7mg2)FEcXt@Mw*pnsttBxcqcbtlR@_KLa8WX759z{^0bcw{bj+sR-ZN%#K` zTK_8JJ9Ko-sHQIJCAeuH&HZ-;z~&!THyl>S&kVQs1(Y6zJ@E82P8!qfW2%&1>2iYK z05H6pP?gSl2b@bKOMhjT7>q3IhI~|x6d^7Kh>uf66$LF+%gaaDGg|9uvUjvjz|q9^ z$o;g@g*Z~?LgpV_bfD#!cdayDvbE=&&PatlQ4-78@1@;HAj`bF~e zC&MzOBU#;M_IvYzi-XI6nvUUPpo$;F%(SSXeP|){4*e8pU&3|X=n=s|ZLbHMyR9zq zb+R=Je=JV9_=AwX)rZS{m;STODJ+N7TT&6>r&6>JCXSP9&MB(Peat!M5zob8Bbt5| zuYqTWUrw|9gy3)A9~-tO`f*>+z7b$gE2MZE9nJA?&SE>A-B3TmpWW)m;siL^u<9taQ2R4@~s! zxACbdvVOVqzbdLaTjEn%~7`5rV%7THa zoj#fJ`Qt%%%ZBV+-+1qNG(K~J9ElR;(|lZ}x^hn1=0g)Jf1Ddl2~6%Q1{0Lqx@fyW!5ZJK;pfzqgd6(`&FIw+6*cjI&1EM($iH&{eV zo`2{(yP@}dt0<97vwJ0~FnL1N_tnl6HiMIO>2yf3KQYuzILyNDt3;K1a;d*ueE1cc zMu`K3I*lJz-q$Q3@x{orgOQUf>)7cqP3(%7VW6h;t_^)0H7GLeeqS+mxatFa@a|_m z4Lp0`dqM>?|FQ-)C-lbL{wNp7O-+_j32#c|_7}5)m8t+DaYfmx^OudTI` zAw~wp{%0$tw_t}vqE|&qPSvT@UOo31khGg=(3W<^Tmq9xKg#0_3e}KI9If)x_>%Jt zg{vIuwXwBc^yXL8whQ)}ST=DOcg$X+oIB&vWx8K$j5}VDn6sR(gY@ z@w`)?1`FQ+qOwHtC7XW+{tlwwRQM0rzjR4nA2CP$MJn4SG$9{A)OB(t*?W1~qUKH0 zZBp;zw0|T~f?J-3A(AGVQr@p+hzv$KQ-CSj>g>}n{FFiGQnu{pH?F$V{Ays9YEgm{ zO?c~-C>`389kWmIjU8Zo6luRZKd<>R+(W$$>ag%#d5OkpPV2#89msvMb>BTgWPPXa&%txshgH(%ct%0l*v%XUwj{|4C58Qh$9XVR)YHa`)HxIc-ZN_(_qkBERt!XCBA+U@{O|A$iaP z4eB8&?g?kmqGN}8h7~4-wT&>wyn`a#bhOSs-f7Obaz>9yFVyCXmhmNm)J%nRiV8`3 zL}{sfC9%k)HdJg;dj+M9 z`KTUdP~?wA{Kz((%$)D*UN=Il-RwuCT<6cb%x{2Ma#BzfLG2=@(Er+T`EZRdrsM-U z!VaHp#0n8R>4j;4sSE}aF2E!cjkPv23HO;|aF&(%j46&Nb~Ew~FtRAuOdNTpZIab; z!Zsx3)HW#4iNpI_Z|JXv0HL15AEd`d{xqLPQp;BcL3+-ce|~v=^kjMc#&`6rWh_y7 zAxbJCQZ*_bG3xlkf%8)&Mr+cVO4T0s^G9S(bN}(ee|!wE{Z4rN6x+QH;C)qqBoi2v znCOufvFgoD@bxM2)?^lQ%2{1C zdaE4SFJK>)bter?imkr(>SUOHFH=CE`1NC44DDf)bL;X}874${pjA)1TwlR%RP(b} z@1-FG6)~c4eoVolo}}BqY(UB|+qk#!y!ec9tt-7x$#zNUmS|TH={DS`Iy}>ZJ)v&L zBTc#m=?!2DpI3ur^(`FYq&HRD_tkW5k2Hu0Gev2vcwGNW-GY*MV8JXfbxoN;qmW8{ zQ+R@6L2aUqZz2Y+y&`laUYQ}kpU%Pf3df%mR$UU`1aWy#NVR?9pbQ>WAyjK(fKJ!@ zBWvy72ERv1nLi7+RDuLm?CJj#uzi9}R2zqMnRsUlI!%=|si~G6hlP*zc2rjX%4}>e zu!3Q63?k*yjm&hKocjf~cia$=OHf`5O;l-c!kY5*Ijtx?#XoyX7tq6F5oua2?s%#Tx3fM(Khi*!LUK}XOM6i>FiJOQTR+-L( zwqL?rUZ9n5&_z5EXBo?Crto_@)LQYOg;HMjqfLKH)0K876X>x}H>G|5!R%-qFe6 znC8P7BbzMIx8x=uLT3f)5jNpm*jlCRq)`%xMpr!YWMRc_yK(f(3Sf9fjb<_^)NGeO zzI6OV<2oY^IsEf|!6&*zwuSR{CfuBKhrcVR+;?o!{tYm%vvZx$5q+G{J|)$qZgP7S zt|vj2uF`4_ictGDav9NCu3o59Aptvn-P)K4g+E3g?qSc~Y}8KKh@tbP6*IJrewo0J zSNt%RqZcJkSF#dbni3>rnM{R1U8;Mf?5iRvz_&=YgWHnf^Q$IuDFgY}|Adcl=jd$> z0@4cSWRZJr<8>uN6--5*gX>p^wf}L?bXO$M`dVc-0si<_!=u6)D%-+&L7 zwUU`EmF(gQraUL=jOX3GyJc+?W<)5aoSI*TOTKe2Nh{MoO9WQm0FtsE@QdZ09tDiu zFkD|(6fO7IzWy1kD7WSz&F>IC9BPdj^xR-7#?>;>(CqYfqnni(ZD#SF5SyM@x#fpx zO=A<1l$kHqs5g|vs8v2SZZk=0^N6AnfZaM)sRB0ZP&miMhe-efB=N?G$>a2l{Uz5$ zL;s$GcUv$yBq5SGCf2H1eS_U9QG3p=x+?N-fH5Ug6L(kh9D$$qRBgVgX=-(AMnjIu zw)p?`W%}D&g@QAJo$_Q+Z-8liWrzUK!X2nJdOsElsj%kf;lx+-UOj^#k3)!Sc$G2$>fkyup9UbLdw+@^P_qHSpVuBHX6I0;Icpy{_e}jPUQS?>b-n zg2@|1U<9}ULEWc)U`Y{=*Nta+OtD}~H_UKG@FRr-*m<{!tLZ-qM2QF#sK{Sgp%l zmnCWO#5w$7Lr_~9=I1G|W5j9sku|f&!pZLjCN;&qPTXdskC}T!PM4Xs7^~@04o;1p z>DFDS^$t=0&_q#$$Sjh(O4OYBT&Dw2? z!I4@IT9?Ijk{3*6b|$YMo+4i#a9~`Q4F{Q&HS>!_#?E~D5f~m1yW#gb9gKKmCtf+j zn53no*gG>FFdeM?*_+c3v#BXyw1eLEuYRjx1l#GnW{E>(2$}igWO24uFh4S_d-TV$ z+{XWyIl|m`FbYq=U$Nrnq^4Gxwl8k(Jajgwd5Ndy{#{{noJdT;kDZk+&CE)z0cE(!o}O@uU*;Jyz5)%6?~zN;Yg=gFZ#X=}dN zCkc+%AJh9uRllc{^qMKMb5x1tyK=Pmo5yO(COYJr4*5TLsY&(r0Wq;qth^n_Cgv zK{=@AVyaG~rJCGzyRrY}GRMPc%BPU(stz~18My5iKW22%#rX(x)KYb+vyeC%{X0?V z=Ymnr^2xVCmSA-_N!ZWau%v+mbq9Q`W$(0RZ6nkkkk%PvqRs`~6~xS7sZFV%a?BDz zVOgZ0%;V?bQ4r!+IZI$dF_=M-t_r|{HYF3W=nYpLNt7Ip7ptJAWP<5u30)S%WC72O zb1hIS3^K_*u{#w(Bmu5rrz`<6lp_x@+zm1eGNc6>{dExSEG(gSvP9gcjQ-%rpmQ9< zad#*`MzcOXMoBjwA_Fq`fyd?35PO}IFaI}W(xFp9q$c?8iemMsX?r94OU8S^1BJ3u z@VJtq`m+a831jEBvUXAWlH|3XrbF6vD z?w%#%oQ(6gRaT;>2F2O;YQbzB{z%k*n7@OLdG?gMKO06$(&&CflBE2-@*QG>LH=>q zY^WWGurzxKLr*f!nM$HDkK|kISFgf~HLS5Px>eI`V=N0&hA%C3E(OVF4{+AN_i|OOM=`F$vQ#&*~+bu}`aZ`KcYsE}|s^TPSX8 z?S9~stW{nsp?pZ9{rRiIxncCF_WGc!;bL;)5-y@X0=v=#Vpt<{ayR|Jn{q+H!S|$$2 zn>gy>=9aahWtB$rvEUCr>;Br5#GkCl11E^>un>Z}oml?kc~ze2bO#r_gPeL!eC^7} zr#~FSSruOV-}ls_wKgbk?^p*nCTc{3pV%wj05Z8TSoVTzTz_D1d4{>Q#~N#JvNZEe z?_Iua@;k*Z6b)C;3KwDy6zUD!XrAwgITwfHd76FyZdJ*#EhTClPYtT-tsOonm(Hsv z4x3Y8icT#X&!WW=b2`D{jt|5<&=z+mY0RYH#;C^7jiq79x(}eqDxEeD5v@jW`94I5Nxu@687x52G4xD zZEb!1dszqY3GOSfuY6v0Ue|AC5K+(`p(h50p?pPyaR$>)Mbn)9S8G-}vIN>9E4x*% zQ+hm0Wr=C=;Naw!zb;wG7ExC>mcKU$3^`Ss6-vyUZ<4K{<~?_(c1#3 ziY_#NU=|xm^{nv8-f4%nEfx7c`NGriaW_Og3D&ChISqSUH{1@*)n$5*Q)?)dm*cWM zXEe^YX!9_SQkeRX!%ffZfBute&Zj-9xQ6|yN)Lb4elCGO0+QbRj~rc7caT4 zNNJsM;+ZHTp!wnvQi^NU=kR_8?}x6kZTu!*BU4s|eHyAP*G{e8qw9a_ub3DS(DWZ6 zSeKva37v@~Q>Pz^So=zHlLdE2Gv&&Qy3V7I#iCoY~!Rt*>LXt-5-{INu##t9= zf6q!Kt@_U3JqQolY16iEuyvAA&Fs$!wiZt7bm;jHWcf(IE4M5edF_I18e9ix+qz;9 zwuMbFGR(v_cVZIA!$hjwGdF_@bKK#Zj07Uo&pw31l)abnD*?K-1<5syUpkq;{#lp3 zX$$A}pQe^o-aOwo=vB*9xJ;?j!nQzvPZn+3EhFqFW%08T;x{6R{@?{vCpT!Xn5Nm7 zS?YG+euDPqag)W{JaZrXsHR?_IZ{Bw_)fT^ibe@eaYg5d<%mj-Rs8Hs^@;9wA6^^I zp`9;sXOkXU)CJY|LnunKcx`&N3GDX+KU#* z)>3KRly9epmlm^DXl5r8kxsWb;R-Y&X8!gD*v0H6Lcj79c8d&J4!Y0=b%MP^*U*Uv z)y`Z#drE{yDzFSP?Q^Nl9&Z>D#qMY&)1Trw5MvK%hdtoYD5KO9hS}&wFc@aLFUQ;w z{=D~H|9;Ca*yBL=D0h3b&Xl@R6cIJ0==nYBTs+=nEIo~`k zFBwD){?3F5)!pYS)=l;o>=uLl&e6p+*910k7x}%%+@n|7L_RrcPZAOeUV7Z@YzX@t z#>B}eri13ZlQ9p?s-9mX*2+r~EBz>`^;bRh=g z*zgA2!d5mS``hMyte{NYpPRu!iEZ3tFOK5!mh($48Rq+zDl0x;JByHRU&OXZP_l`& z%{0no!l(jjdF1sz>eGC{9^+oS!3ex=3B`AN1TsRA;zYpN7p2p|3wTl%#$%Nn}!J zH(u0g?(_&V-mbIGQ_IYRgN>I508B1+xK#(0>%jbv+Y?M7nJ%WNxhwtWe2Wl*HrAL4 zW&FS@uXY8E%4TfJ;_w~N_seI8&ZUJcc=Ip%a5E%^#b_GGpEK)Q%Hy~R)q!0KZQS@{ zaK;2I)19&%1mHp1b|Oh7ydz8w_@1Y64vd3nLO)XWYtaBIJ;ijR{Enpd<*Ulb1DtgU z#S%4om)z@EU9YooKPsap{5PlV*@?yvbmrdICWTS&eu8`! zWi9cM15->eXmtlI{c6A8M=kDeQ8v`nKuavUzK4%PFnKg;?zD()?c2WHOe~C^3%LCew5z|2J?9HH96?shGh+`8C7{t@EliRQ~W5;g;V?tAOT<{nI_cB zD?=vtDR!F+$xScesVP%qgQv);Nh?js)WIuXT16SigByq($Q-cRg16T>+uhdkIgX=o z1is)oiEj|^dz`F-Z|Unn5k+70{CTA`q$zEFwOEqf7y>=<#*MpdK(;3C{^5a9{WiwG zkO+*Lha8XyOiV*Q-9+hlrYw9|>+Nx&eg)Tf=R6R4M<`qpOZ#AXe0#T;tU~;G3wquP zM)mD;D7)i5Qi%?PO@+*2`mOkvB+3gm(+$zf2ToPw6V1cb;KPJHQ{y|Y{<0F4(PN-I z8hzgM$sMv$1V#Woia!qqTNy1w{_UI=cnZ!t&0F{n+Bw$5<0*IG-_iw9NR-skM<@WT zy`JZsKED(H5!ZfA$|r-&Y$J}fK5BPoc6!R-OAV8CT;&m!;QJb>Qsj}+N?xd^7H1u( zL5OO0fls#b7+Z307t2qv;CxIyIj`|$=HdE(788rbZb@;3Ba6lH;PY>WH58Ae<;Px? z#?Eiw8k;SoQ|JI_F-LVY%8kvcFM0cR zv`@up_^=H;uQ=VJ>*VNYcQIZkJoNZ-Te>RO56CLGp3K&}mDUSJ!CC}{!+d$k1O_5~ zv13^D2h^L?o2bs$!T3gCBcS_)T!E!Mwf5Si`YN46=w;LqkVbv)n2X+hU=+`9MZj*G z3%%|p1S*h9XObA99p9FlTkPNi`qS_zkjB-;Yu_YlI6%v;yrrp~f;9)?Zx+SwV>;u1 zK3Y`={+O9#LPMKZNQORfX~rP1OTG9_)1OO?*=aQ;IS?>4_XeoiI#JK`ov=@0evUcr zp0DzN>_*q71S7a^Es{J$<`3%{bwoXU3|kSgRINi z;$AZ&qh2Kxh12-JO+IOj&kh#F-#ZJ6vp;XXmjVg2|BfABjI_w zt=FJ0Uax|GzjRjkYVPzs=AW`RekOumbsU$-Fsw^snxOx?2wr;wC?Q|U(;PAOFF^5D zd6+EwjPMTL+|Hb6k?KuVMhQTzG|t>yYnevb($-Zvte>-{^d8H*J|JbNl;7adBjIW& z*JNgqeD@1GsA(C`FyTx|h(CL|$-<5GZCRi-9F_TdZglZ0iUGCgB?U|>9!1Gmk_@_4&9W|x4Y4Z|Q5TkfI#$qv8Z?R}P( zafTk31g8#P5!`xq`L9wn{0{(_;g5mOgqoV-NQMSp@ghP5>kN37>>zYTI`q%IIJx9f zq(zw*UAt;ubsX;KLm0>AXHsxsf3Sp@S!sC7`}e7hg`AK4nJ5|1@^%}23z-~Qs{U=QOYpKo0HvlmD3<@Fqd#sI$f+go^FqaDd(e1o`QgTN_ zN?Edv_|vKQe>)|JbSWyyZ5IXYmVH=1? zpOg1aXo-A-#RiHOtRASxHCt3VHe*nH%{rvn$;;Y2tJHM#q?Y%MIX;byVoI!?Q#$k5 zGH=CvF_c?-^j7-$C0*n{cxEVkead__9bt?pQ;R><%8V}v-d&Aw%P!x5Q+M@{M$}Wu zyRUX^E5dhz!57%$XZ%vrSE`}e!0q%k*v2}0b7L|?qAxyuu{=%*@3=e?Dr;)1^JWP` z?X-UCZnXi2UrFI}Jc|AjTOhxwJb)w+8e&9Q1NJITznx7{)u2~0bn;0sVT)*|bBTGX zDQr41U`_|GO6fMn-4bYvJhOh8k_*yhfQ=PQ6k1r;^IrNmx|Lfm}_6y%Z#H-NU+WOvxEu0|#_l`6?veR~pG+PAnUME&)|!%8vMsG_3(x*~ zK;dar7N{_^azC-sXx{IyQI3;W6qxIIay+>e9qboV}zA^cf za*w!nN2&IjR*j)w2;M5A%e~GZ*Y(EGvHE)IE~AI@jL{b2#8>E!^9+AT?ZTJ0zt} zQ41b*Rja>izvvk}W<9*zM|T(PEja?c1uB!PmsWBtl7RP82p8kUTPHsJE0|dd1?*0* z#d$vtjY5VC$et20xR%^d%h7@oiM&7m-7jh6@jdW8S@wM%#S5@lo&2Td;QK0`?v};G zoxCsf+-ZCR;C?S^*@+4c%(vo}`uB!Xw{V|*9*Y`PEX_Z8*gf>u)vRSAG zz7x}Xn<5@%0In0xjb>Mfpvlo=RM*#Zg{7DgQ%e>eq@Ad|cSBeb0wcnbL9+-9&m<`L z6DpKVLOjXxyKfqOOnaR#oN6{QgZp5{>d5_w9w%lNHuSNMAf_H32jgzf=q09ieS*(F zH}U_BI^B&qN8UNT0rtq1%zsmL)VD8yeMcWi)y#$*_5Bl~yAn{mw9_u~tEm9?G4^7Ia1bgLt zgR~rRx~6yZ!dcI^aO)CBtKusKoY47@2Q?#sNWZ$9t21D+E=W`m8Dn*Y-dWaKp*|yP z%J*ED@GDwFnV6OiTN4v%SBq?WG`8b?n0~78g$|XiR^QJ2$V{7@n@MW44sz09SBzgscGQ*llOY*(@sCm#XS-ot)4L1nYw6do!uYo&@;yS&>8#srm;55%)Q6*KF?uNAqVXMz0QUuMBHhJXZc^iuAtYW?T%9!vdiYs-+%wphd%})qpalv9%x^EeQa>>7`XU|i zIcx&;w_c;LnGP#&sAB&~WZ?1VKT_rQem^Ks9KBn%!(cl2aMyO+xn4e&U06G5&)NEP zM@?F{QxlgUpt|Z!&>Bc=uuq0D>v%e@GE&sIsO0?Ko8*BmOd4nqc4WwjM9Z|ErajVg zDL=!EXxVC0m8qtaJgd1KWTC%GLqv3){31n)wP4|Ny(EG+6WMe@P_ah9HL<-h>vA12 z#mK3ud8_<)=7F|X5GnjCURj8Wvt7Ra`d1sSWk|tN%^ux7a!#4YoEik|Hjz`=b&9sf z6ca2}?ZDYE_xmM&X>e;m^(BMMDa=iG2@GmnxR3kJ)tw%91#i?K;q_rN>E4~{D+RwP zuZ_4(uzdT4mou-DPGN(5iGZUu$5flx?_PTw{H#(7qr6~qdw2S^(*6T|r-oi912u2+ zF2@q>R}N@HBJy6$Jiq3PA+^jx9Z5z6@y zlA3Y)kU+pFC_c1d!T8W{Ww15r4Uk`4MVD|}$$yagH*`&aw^xnZE1Wd{#A&-aE8~dr zjxgiA_FTbjIQQ>p*|JPQag2*bU3XYr+42Pb(Bbl4E(bTvMyId2AE~U|H041a=tHJx zu!bA-q^-%mLkG3KfJ$ezbJ=@7tPJ4|xrE%FY|Q6ra0`HPI0UFnr4obm(QPz)+zvJtx~im@rBBZwU6$qYt_g3ZWGE>7_S{fa^N+yFkIW zJWIvUJT)+mDx4ar5URge;r3UN5o zHRc9i3#JN|AoN}U^}~zb);H~$a-Xk;CyXGA0>bt7hb+dPf0-pt*b*T)I&h`6MUmZ= z7@gjNujxR3xGj`*$Caof-7d=5f|dXuAF^s?J^`!u%CVK}&efTT=xWM|xp=RqRLMe~ z2gApxG==(O$4DNM!CfV@_|f*f0-BZSMLms5aa`1T-pcV172`oJTK+<}G=a(QO0riI z>Rn?l&4c#4NcReN9^P}13{1W*EiJCHvR{iAn{JF=1I=5xm$zwE?X7$Q1(gksqW!H^ ze~0;JF`g}zyX@zV7jtbd@E>Gn>m7Y|f88+!DVy&6au(o|QOY5PgAw2&#pEmVDgJit zV-$^5_6Bf+2p)vQzX4Rxs1IfO9&uFLT2|{1pEWdPxL-V+E~7pelX}$cX@?atG5j|k z{U1id>)`kOjQg^aZqVT^=oU+$xX4=mqcf${P~tacy!q0j28qNTzIk1yPqy(&Nl3>W zx>-k24Kj!$y)sFVTD`*whSsZVwnmTDb74~(0!dhm$LQQs7Xh$IeVO_dNFS-JGT&YE z(55CvNhLjkm8+LlO?`bVYv^;80`m}-EZd?qZ{5(u-V<%oOhhkJNcfUHIb1=a$wQ)^ z;v*5Z`3~l&F0PKgS?Wxk)Q2^(Xfi$9tz7UZZ(QXLk!3`Loj(+K7Q+miT3^AWNsI*gQ#b9QjYV^<|k=rH>)ng1`%^gk)le>f-PPC;$O zAVINA=AHJ#u$hRt)XwycRVXLa(wOdL}T7tX&8#Ub` z1ihH(jZ41Y7SeY5`l;$T2iyb;Qq5Vza@C87UUG91n5V2$@bAq0oo%hu^VE#GB! z+(r`*C92S{$$bvB-)JkGG0OD)%=G=UXigaupY_f(@JsxF2+&d9f!%*5sOUR> z?GOg|BLY7~2a*PRl=lBFF0B8N`T{>PeChM?YjT&^M)dj739oy?!3od8Vs4i?D!LLl zu=fAyEd6hn@qe`%^Td(?f{-d`c4)S+T#>Kr6CiGy1$uhw{124>EB(J0XyuQenv-dW z1Z)&)B-e9uYEuRbs&Pl$=}=#;4W~a~_4R?pbU}|6^bO&2Il+^W$;ppGdTII~}-n zzgE2XQZ{*kPp{1WzZV~p0ZWjh^!0fa*OI1#i+}tGZY>-&N27SDW~PBh=lStM#=v9W i0) or upload config" + ); + openModal(); + return null; + } + + parsedConfig = getDefault(modelSize); + modelSizeinB = modelSize * billion; + } + } else { + parsedConfig = getParseConfig( + parsedJSONData, + setErrorMessage, + openModal + ); + if (parsedConfig == null) { + return null; + } + console.log(parsedConfig); + modelSizeinB = computeModelSize(parsedConfig); + } + + let fB = floatBytes; + if (quantType === "bnb_int8") { + fB = 1; + } + if (quantType === "bnb_q4") { + fB = 0.5; + } + let modelSizeinMB = convertToMBModelSize(modelSizeinB, quantType); + // console.log(modelSizeinB); + + //!Inference + if (trnType != "trn") { + const checkSanity = checkCombinationInference( + trnType, + quantType, + setErrorMessage, + openModal + ); + if (!checkSanity) { + return null; + } + + if (trnType === "inf" || trnType === "inf_vLLM") { + let fB = 2; + //If bnb quant + if (quantType === "bnb_int8") { + fB = 1; + } + if (quantType === "bnb_q4") { + fB = 0.5; + } + + inferenceMemory = convertToMB( + 2 * + contextLen * + 2 * + 2 * + parsedConfig["hiddenDim"] * + parsedConfig["num_layers"] + ); + + activationMemory = computeInferenceOnlyActivationMemory( + contextLen, + parsedConfig + ); + + console.log( + "HERE!!!", + inferenceMemory, + modelSizeinMB, + overHead, + activationMemory + ); + } + if (trnType === "inf_ggml") { + modelSizeinMB = computeModelSizeGGML(parsedConfig, quantType); + inferenceMemory = convertToMB( + 1 * + contextLen * + 2 * + 2 * + parsedConfig["hiddenDim"] * + parsedConfig["num_layers"] + ); + activationMemory = computeInferenceOnlyActivationMemory( + contextLen, + parsedConfig + ); + overHead = overHead + computeOverheadGGML(contextLen); + } + + totalMemory = + inferenceMemory + modelSizeinMB + overHead + activationMemory; + } else { + //! Train + activationMemory = getActivationMemory( + parsedConfig, + contextLen, + floatBytes, + quantType, + selections.dropdownFullOrNot, + batchSize + ); + + activationMemory = convertToMB(activationMemory); + console.log("got activation", activationMemory); + + gradAndOptMemory = getGradOptMemory( + selections.dropdownFullOrNot, + selections.dropdownOpt, + quantType, + modelSizeinB, + floatBytes, + parsedConfig + ); + + console.log("got gradOpt", gradAndOptMemory); + + gradAndOptMemory = convertToMB(gradAndOptMemory); + totalMemory = modelSizeinMB + gradAndOptMemory + activationMemory; + + console.log("got total", totalMemory); + + totalMemory = totalMemory + overHead; + } + + return { + Total: Math.ceil(totalMemory), + "KV Cache": Math.ceil(inferenceMemory), + "Model Size": Math.ceil(modelSizeinMB), + "Activation Memory": Math.ceil(activationMemory), + "Grad & Optimizer memory": Math.ceil(gradAndOptMemory), + "cuda + other overhead": overHead, + }; +} + +///Users/rahulchand/gpu_mem/public/all_configs.json +async function fetchParams(name) { + // let output = fetch('https://huggingface.co/meta-llama/Llama-2-7b/raw/main/params.json'); + + let response = await fetch(configPath); + response = await response.json(); + // console.log(response.hasOwnProperty(name)); + + return response.hasOwnProperty(name) ? response[name] : null; +} + +// function isNumberOrFloat(value) { +// return /^-?\d+(\.\d+)?$/.test(value); +// } + +function isNumberOrFloat(value) { + const num = parseFloat(value); + return !isNaN(num) && num > 0; +} + +function isValidPositiveInteger(input) { + const num = parseFloat(input); + console.log(num, input); + return Number.isInteger(num) && num > 0 && input.trim() !== ""; +} function App() { - return ( -

- ); + // let subtitle; + const [modelSize, setModelSize] = useState(""); + const [modelName, setModelName] = useState(""); + const [contextLen, setContextLen] = useState(""); + const [batchSize, setBatchSize] = useState(""); + const [totalMemoryShown, setTotalMemoryShown] = useState(" "); + const [breakDownMemory, setBreakDownMemory] = useState(" "); + const [errorMessage, setErrorMessage] = useState(""); + + const [fileNameUpload, setFileNameUpload] = useState(""); + + const [modalIsOpen, setIsOpen] = React.useState(false); + + const [jsonData, setJsonData] = useState(null); + + function openModal() { + setIsOpen(true); + } + + function closeModal() { + setIsOpen(false); + } + + const handleFileClear = (event) => { + setFileNameUpload(""); + setJsonData(null); + setTotalMemoryShown(""); + setBreakDownMemory(""); + }; + + const handleFileChange = (event) => { + const file = event.target.files[0]; + if (file) { + // Check file size + if (file.size > MAX_FILE_SIZE) { + alert("File is too large. Please upload a smaller JSON file."); + return; + } + + const reader = new FileReader(); + reader.onload = (e) => { + try { + const json = JSON.parse(e.target.result); + setJsonData(json); + event.target.value = null; + } catch (error) { + console.error("Error parsing JSON:", error); + alert("Invalid JSON file."); + } + }; + setFileNameUpload(file.name); + reader.readAsText(file); + console.log(jsonData); + } + }; + + const [selections, setSelections] = useState({ + dropdownTrnOrNot: "inf", + dropdownFullOrNot: "full_trn", + dropdownOpt: "adam_opt", + dropdownQuant: "no_quant", + dropdownGPU: "rtx_3090", + }); + + const handleChangeSelection = (e) => { + const { name, value } = e.target; + setSelections((prevState) => ({ + ...prevState, + [name]: value, + })); + }; + + // const handleChangeInText1 = (event) => { + // setModelSize(event.target.value); + // }; + + const [output1, setOutput1] = useState(""); + + async function handleClickTokS() { + setErrorMessage("To be added"); + openModal(); + return; + } + + async function handleReset() { + setFileNameUpload(""); + setJsonData(null); + setTotalMemoryShown(""); + setBreakDownMemory(""); + setContextLen(""); + setBatchSize(""); + setModelSize(""); + setModelName(""); + } + + async function handleClick() { + let parsedConfig = await fetchParams(specialMapping(modelName)); + const out = getAllComputedData( + parsedConfig, + jsonData, + modelSize, + contextLen, + 2, + selections, + setErrorMessage, + openModal, + batchSize + ); + + if (out == null) { + return; + } + + setTotalMemoryShown(`Total Memory: ${out["Total"]} MB`); + const jsonOut = JSON.stringify(out); + setBreakDownMemory(`Breakdown(in MB): ${jsonOut}`); + } + + // const handleClick = () => { + + // const trnVal = selections.dropdownTrnOrNot; + // let totalMemory = 0; + // let size = parseFloat(modelSize); + // if (trnVal==='trn'){ + + // } + + // console.log(modelSize); + // console.log(isNumberOrFloat(modelSize)); + + // // console.log("clicking"); + // // setOutput1(selections.dropdownTrnOrNot + ' ' + selections.dropdownFullOrNot); + + // // console.log() + + // }; + + return ( + +
+
+
+ +
+ +
{errorMessage}
+
+
+
+ Are you GPU poor?{" "} + 🫵🤨 +
+
+ Calculate how much GPU memory you need to run a + particular LLM +
+
+ meme +

OR

+ meme +
+
+
+ + +
+ + +
+ + +
+
OR
+
+
+ + + + {fileNameUpload} + +
+
+ +
+
+
+ +

+ +
+ + +
+ +
+
+ + +
+
+ + +
+
+ + +
+
+ +
+
+ + +
+
+ + +
+
+ + +
+
+
+

+ {/* */} +
+
+ +
+
+ +
+
+ +
+
+
+

+
+
+ {totalMemoryShown} +
+
{breakDownMemory}
+
+

+

+ +
+ PS: These are approximate values & may vary by 500MB-1GB + depending on the GPU, model, input, cuda version etc. If + your setup has ~1GB over the requirement you should likely + be good. +
+ + {/* + + */} +
+
+ ); } export default App; diff --git a/src/index.js b/src/index.js index d563c0f..5276979 100644 --- a/src/index.js +++ b/src/index.js @@ -7,6 +7,7 @@ import reportWebVitals from './reportWebVitals'; const root = ReactDOM.createRoot(document.getElementById('root')); root.render( + ); diff --git a/src/textBox.js b/src/textBox.js new file mode 100644 index 0000000..65bc57d --- /dev/null +++ b/src/textBox.js @@ -0,0 +1,18 @@ +import React from 'react'; + +function TextInput(props) { +// const [value, setValue] = useState(''); // useState hook to manage the input value + + const fun = props.setValue; + + return ( + fun(e.target.value)} + placeholder={props.placeholder} + /> + ); +} + +export default TextInput; diff --git a/tailwind.config.js b/tailwind.config.js new file mode 100644 index 0000000..c0958ec --- /dev/null +++ b/tailwind.config.js @@ -0,0 +1,11 @@ +/** @type {import('tailwindcss').Config} */ +module.exports = { + content: [ + "./src/**/*.{js,jsx,ts,tsx}", + ], + theme: { + extend: {}, + }, + plugins: [], +} +