Files
Alpaca/src/available_models.json

20267 lines
454 KiB
JSON
Raw Normal View History

{
2024-07-29 02:06:06 -06:00
"llama3.1": {
"url": "https://ollama.com/library/llama3.1",
"description": "Llama 3.1 is a new state-of-the-art model from Meta available in 8B, 70B and 405B parameter sizes.",
"tags": [
[
"latest",
"4.7GB"
2024-07-29 02:06:06 -06:00
],
[
"405b",
"231GB"
2024-07-29 02:06:06 -06:00
],
[
"70b",
"40GB"
2024-07-29 02:06:06 -06:00
],
[
"8b",
"4.7GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q2_k",
"26GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-fp16",
"141GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q2_K",
"26GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q3_K_L",
"37GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q3_K_M",
"34GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q3_K_S",
"31GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q4_0",
"40GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q4_1",
"44GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q4_K_M",
"43GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q4_K_S",
"40GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q5_0",
"49GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q5_1",
"53GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q5_K_M",
"50GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q5_K_S",
"49GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q6_K",
"58GB"
2024-07-29 02:06:06 -06:00
],
[
"70b-instruct-q8_0",
"75GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-fp16",
"16GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q2_K",
"3.2GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q3_K_L",
"4.3GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q3_K_M",
"4.0GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q3_K_S",
"3.7GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q4_0",
"4.7GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q4_1",
"5.1GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q4_K_M",
"4.9GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q4_K_S",
"4.7GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q5_0",
"5.6GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q5_1",
"6.1GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q5_K_M",
"5.7GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q5_K_S",
"5.6GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q6_K",
"6.6GB"
2024-07-29 02:06:06 -06:00
],
[
"8b-instruct-q8_0",
"8.5GB"
2024-07-29 02:06:06 -06:00
]
],
"image": false,
"author": "Meta"
},
"gemma2": {
"url": "https://ollama.com/library/gemma2",
"description": "Google Gemma 2 is now available in 2 sizes, 9B and 27B.",
"tags": [
[
"latest",
"5.4GB"
],
[
"27b",
"16GB"
],
[
"9b",
"5.4GB"
],
[
"27b-instruct-fp16",
"54GB"
],
[
"27b-instruct-q2_K",
"10GB"
],
[
"27b-instruct-q3_K_L",
"15GB"
],
[
"27b-instruct-q3_K_M",
"13GB"
],
[
"27b-instruct-q3_K_S",
"12GB"
],
[
"27b-instruct-q4_0",
"16GB"
],
[
"27b-instruct-q4_1",
"17GB"
],
[
"27b-instruct-q4_K_M",
"17GB"
],
[
"27b-instruct-q4_K_S",
"16GB"
],
[
"27b-instruct-q5_0",
"19GB"
],
[
"27b-instruct-q5_1",
"21GB"
],
[
"27b-instruct-q5_K_M",
"19GB"
],
[
"27b-instruct-q5_K_S",
"19GB"
],
[
"27b-instruct-q6_K",
"22GB"
],
[
"27b-instruct-q8_0",
"29GB"
],
2024-07-22 22:19:47 -06:00
[
"27b-text-fp16",
"54GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q2_K",
"10GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q3_K_L",
"15GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q3_K_M",
"13GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q3_K_S",
"12GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q4_0",
"16GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q4_1",
"17GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q4_K_M",
"17GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q4_K_S",
"16GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q5_0",
"19GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q5_1",
"21GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q5_K_M",
"19GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q5_K_S",
"19GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q6_K",
"22GB"
2024-07-22 22:19:47 -06:00
],
[
"27b-text-q8_0",
"29GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-instruct-fp16",
"18GB"
],
[
"9b-instruct-q2_K",
"3.8GB"
],
[
"9b-instruct-q3_K_L",
"5.1GB"
],
[
"9b-instruct-q3_K_M",
"4.8GB"
],
[
"9b-instruct-q3_K_S",
"4.3GB"
],
[
"9b-instruct-q4_0",
"5.4GB"
],
[
"9b-instruct-q4_1",
"6.0GB"
],
[
"9b-instruct-q4_K_M",
"5.8GB"
],
[
"9b-instruct-q4_K_S",
"5.5GB"
],
[
"9b-instruct-q5_0",
"6.5GB"
],
[
"9b-instruct-q5_1",
"7.0GB"
],
[
"9b-instruct-q5_K_M",
"6.6GB"
],
[
"9b-instruct-q5_K_S",
"6.5GB"
],
[
"9b-instruct-q6_K",
"7.6GB"
],
[
"9b-instruct-q8_0",
"9.8GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-fp16",
"18GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q2_K",
"3.8GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q3_K_L",
"5.1GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q3_K_M",
"4.8GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q3_K_S",
"4.3GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q4_0",
"5.4GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q4_1",
"6.0GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q4_K_M",
"5.8GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q4_K_S",
"5.5GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q5_0",
"6.5GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q5_1",
"7.0GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q5_K_M",
"6.6GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q5_K_S",
"6.5GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q6_K",
"7.6GB"
2024-07-22 22:19:47 -06:00
],
[
"9b-text-q8_0",
"9.8GB"
]
],
"image": false,
"author": "Google DeepMind"
},
2024-07-29 02:06:06 -06:00
"mistral-nemo": {
"url": "https://ollama.com/library/mistral-nemo",
"description": "A state-of-the-art 12B model with 128k context length, built by Mistral AI in collaboration with NVIDIA.",
"tags": [
[
"latest",
"7.1GB"
],
[
2024-07-29 02:06:06 -06:00
"12b",
"7.1GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-fp16",
"25GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q2_K",
"4.8GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q3_K_L",
"6.6GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q3_K_M",
"6.1GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q3_K_S",
"5.5GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q4_0",
"7.1GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q4_1",
"7.8GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q4_K_M",
"7.5GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q4_K_S",
"7.1GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q5_0",
"8.5GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q5_1",
"9.2GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q5_K_M",
"8.7GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q5_K_S",
"8.5GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q6_K",
"10GB"
],
[
2024-07-29 02:06:06 -06:00
"12b-instruct-2407-q8_0",
"13GB"
2024-07-29 02:06:06 -06:00
]
],
"image": false,
"author": "Mistral AI"
},
"mistral-large": {
"url": "https://ollama.com/library/mistral-large",
"description": "Mistral Large 2 is Mistral's new flagship model that is significantly more capable in code generation, mathematics, and reasoning with 128k context window and support for dozens of languages.",
"tags": [
[
"latest",
"69GB"
],
[
2024-07-29 02:06:06 -06:00
"123b",
"69GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-fp16",
"245GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q2_K",
"45GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q3_K_L",
"65GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q3_K_M",
"59GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q3_K_S",
"53GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q4_0",
"69GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q4_1",
"77GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q4_K_M",
"73GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q4_K_S",
"70GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q5_0",
"84GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q5_1",
"92GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q5_K_M",
"86GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q5_K_S",
"84GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q6_K",
"101GB"
],
[
2024-07-29 02:06:06 -06:00
"123b-instruct-2407-q8_0",
"130GB"
]
],
"image": false,
2024-07-29 02:06:06 -06:00
"author": "Mistral AI"
},
"qwen2": {
"url": "https://ollama.com/library/qwen2",
"description": "Qwen2 is a new series of large language models from Alibaba group",
"tags": [
[
"latest",
"4.4GB"
],
[
"72b",
"41GB"
],
[
"7b",
"4.4GB"
],
[
"1.5b",
"935MB"
],
[
"0.5b",
"352MB"
],
[
"72b-instruct",
"41GB"
],
[
"72b-instruct-fp16",
"145GB"
],
[
"72b-instruct-q2_K",
"30GB"
],
[
"72b-instruct-q3_K_L",
"40GB"
],
[
"72b-instruct-q3_K_M",
"38GB"
],
[
"72b-instruct-q3_K_S",
"34GB"
],
[
"72b-instruct-q4_0",
"41GB"
],
[
"72b-instruct-q4_1",
"46GB"
],
[
"72b-instruct-q4_K_M",
"47GB"
],
[
"72b-instruct-q4_K_S",
"44GB"
],
[
"72b-instruct-q5_0",
"50GB"
],
[
"72b-instruct-q5_1",
"55GB"
],
[
"72b-instruct-q5_K_M",
"54GB"
],
[
"72b-instruct-q5_K_S",
"51GB"
],
[
"72b-instruct-q6_K",
"64GB"
],
[
"72b-instruct-q8_0",
"77GB"
],
[
"72b-text",
"41GB"
],
[
"72b-text-fp16",
"145GB"
],
[
"72b-text-q2_K",
"30GB"
],
[
"72b-text-q3_K_L",
"40GB"
],
[
"72b-text-q3_K_M",
"38GB"
],
[
"72b-text-q3_K_S",
"34GB"
],
[
"72b-text-q4_0",
"41GB"
],
[
"72b-text-q4_1",
"46GB"
],
[
"72b-text-q4_K_M",
"47GB"
],
[
"72b-text-q4_K_S",
"44GB"
],
[
"72b-text-q5_0",
"50GB"
],
[
"72b-text-q5_1",
"55GB"
],
[
"72b-text-q5_K_M",
"54GB"
],
[
"72b-text-q5_K_S",
"51GB"
],
[
"72b-text-q6_K",
"64GB"
],
[
"72b-text-q8_0",
"77GB"
],
[
"7b-instruct",
"4.4GB"
],
[
"7b-instruct-fp16",
"15GB"
],
[
"7b-instruct-q2_K",
"3.0GB"
],
[
"7b-instruct-q3_K_L",
"4.1GB"
],
[
"7b-instruct-q3_K_M",
"3.8GB"
],
[
"7b-instruct-q3_K_S",
"3.5GB"
],
[
"7b-instruct-q4_0",
"4.4GB"
],
[
"7b-instruct-q4_1",
"4.9GB"
],
[
"7b-instruct-q4_K_M",
"4.7GB"
],
[
"7b-instruct-q4_K_S",
"4.5GB"
],
[
"7b-instruct-q5_0",
"5.3GB"
],
[
"7b-instruct-q5_1",
"5.8GB"
],
[
"7b-instruct-q5_K_M",
"5.4GB"
],
[
"7b-instruct-q5_K_S",
"5.3GB"
],
[
"7b-instruct-q6_K",
"6.3GB"
],
[
"7b-instruct-q8_0",
"8.1GB"
],
[
"7b-text",
"4.4GB"
],
[
"7b-text-q2_K",
"3.0GB"
],
[
"7b-text-q3_K_L",
"4.1GB"
],
[
"7b-text-q3_K_M",
"3.8GB"
],
[
"7b-text-q3_K_S",
"3.5GB"
],
[
"7b-text-q4_0",
"4.4GB"
],
[
"7b-text-q4_1",
"4.9GB"
],
[
"7b-text-q4_K_M",
"4.7GB"
],
[
"7b-text-q4_K_S",
"4.5GB"
],
[
"7b-text-q5_0",
"5.3GB"
],
[
"7b-text-q5_1",
"5.8GB"
],
[
"7b-text-q8_0",
"8.1GB"
],
[
"1.5b-instruct",
"935MB"
],
[
"1.5b-instruct-fp16",
"3.1GB"
],
[
"1.5b-instruct-q2_K",
"676MB"
],
[
"1.5b-instruct-q3_K_L",
"880MB"
],
[
"1.5b-instruct-q3_K_M",
"824MB"
],
[
"1.5b-instruct-q3_K_S",
"761MB"
],
[
"1.5b-instruct-q4_0",
"935MB"
],
[
"1.5b-instruct-q4_1",
"1.0GB"
],
[
"1.5b-instruct-q4_K_M",
"986MB"
],
[
"1.5b-instruct-q4_K_S",
"940MB"
],
[
"1.5b-instruct-q5_0",
"1.1GB"
],
[
"1.5b-instruct-q5_1",
"1.2GB"
],
[
"1.5b-instruct-q5_K_M",
"1.1GB"
],
[
"1.5b-instruct-q5_K_S",
"1.1GB"
],
[
"1.5b-instruct-q6_K",
"1.3GB"
],
[
"1.5b-instruct-q8_0",
"1.6GB"
],
[
"0.5b-instruct",
"352MB"
],
[
"0.5b-instruct-fp16",
"994MB"
],
[
"0.5b-instruct-q2_K",
"339MB"
],
[
"0.5b-instruct-q3_K_L",
"369MB"
],
[
"0.5b-instruct-q3_K_M",
"355MB"
],
[
"0.5b-instruct-q3_K_S",
"338MB"
],
[
"0.5b-instruct-q4_0",
"352MB"
],
[
"0.5b-instruct-q4_1",
"375MB"
],
[
"0.5b-instruct-q4_K_M",
"398MB"
],
[
"0.5b-instruct-q4_K_S",
"385MB"
],
[
"0.5b-instruct-q5_0",
"397MB"
],
[
"0.5b-instruct-q5_1",
"419MB"
],
[
"0.5b-instruct-q5_K_M",
"420MB"
],
[
"0.5b-instruct-q5_K_S",
"413MB"
],
[
"0.5b-instruct-q6_K",
"506MB"
],
[
"0.5b-instruct-q8_0",
"531MB"
]
],
"image": false,
"author": "Alibaba"
},
"deepseek-coder-v2": {
"url": "https://ollama.com/library/deepseek-coder-v2",
"description": "An open-source Mixture-of-Experts code language model that achieves performance comparable to GPT4-Turbo in code-specific tasks.",
"tags": [
[
"latest",
"8.9GB"
],
[
"236b",
"133GB"
],
[
"16b",
"8.9GB"
],
[
"lite",
"8.9GB"
],
[
"236b-instruct-q4_k_m",
"142GB"
],
[
"236b-instruct-fp16",
"472GB"
],
[
"236b-instruct-q2_K",
"86GB"
],
[
"236b-instruct-q3_K_L",
"122GB"
],
[
"236b-instruct-q3_K_M",
"113GB"
],
[
"236b-instruct-q3_K_S",
"102GB"
],
[
"236b-instruct-q4_0",
"133GB"
],
[
"236b-instruct-q4_1",
"148GB"
],
[
"236b-instruct-q4_K_M",
"142GB"
],
[
"236b-instruct-q4_K_S",
"134GB"
],
[
"236b-instruct-q5_0",
"162GB"
],
[
"236b-instruct-q5_1",
"177GB"
],
[
"236b-instruct-q5_K_M",
"167GB"
],
[
"236b-instruct-q5_K_S",
"162GB"
],
[
"236b-instruct-q6_K",
"194GB"
],
[
"236b-instruct-q8_0",
"251GB"
],
[
"16b-lite-base-fp16",
"31GB"
],
[
"16b-lite-base-q2_K",
"6.4GB"
],
[
"16b-lite-base-q3_K_L",
"8.5GB"
],
[
"16b-lite-base-q3_K_M",
"8.1GB"
],
[
"16b-lite-base-q3_K_S",
"7.5GB"
],
[
"16b-lite-base-q4_0",
"8.9GB"
],
[
"16b-lite-base-q4_1",
"9.9GB"
],
[
"16b-lite-base-q4_K_M",
"10GB"
],
[
"16b-lite-base-q4_K_S",
"9.5GB"
],
[
"16b-lite-base-q5_0",
"11GB"
],
[
"16b-lite-base-q5_1",
"12GB"
],
[
"16b-lite-base-q5_K_M",
"12GB"
],
[
"16b-lite-base-q5_K_S",
"11GB"
],
[
"16b-lite-base-q6_K",
"14GB"
],
[
"16b-lite-base-q8_0",
"17GB"
],
[
"16b-lite-instruct-fp16",
"31GB"
],
[
"16b-lite-instruct-q2_K",
"6.4GB"
],
[
"16b-lite-instruct-q3_K_L",
"8.5GB"
],
[
"16b-lite-instruct-q3_K_M",
"8.1GB"
],
[
"16b-lite-instruct-q3_K_S",
"7.5GB"
],
[
"16b-lite-instruct-q4_0",
"8.9GB"
],
[
"16b-lite-instruct-q4_1",
"9.9GB"
],
[
"16b-lite-instruct-q4_K_M",
"10GB"
],
[
"16b-lite-instruct-q4_K_S",
"9.5GB"
],
[
"16b-lite-instruct-q5_0",
"11GB"
],
[
"16b-lite-instruct-q5_1",
"12GB"
],
[
"16b-lite-instruct-q5_K_M",
"12GB"
],
[
"16b-lite-instruct-q5_K_S",
"11GB"
],
[
"16b-lite-instruct-q6_K",
"14GB"
],
[
"16b-lite-instruct-q8_0",
"17GB"
]
],
"image": false,
"author": "DeepSeek Team"
},
"phi3": {
"url": "https://ollama.com/library/phi3",
"description": "Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art open models by Microsoft.",
"tags": [
[
"latest",
"2.2GB"
],
[
"14b",
"7.9GB"
],
[
"3.8b",
"2.2GB"
],
[
"instruct",
"2.2GB"
],
[
"medium",
"7.9GB"
],
[
"mini",
"2.2GB"
],
[
"14b-instruct",
"7.9GB"
],
[
2024-07-29 02:06:06 -06:00
"14b-medium-128k-instruct-f16",
"28GB"
],
[
2024-07-29 02:06:06 -06:00
"14b-medium-4k-instruct-f16",
"28GB"
],
[
"14b-medium-128k-instruct-q2_K",
"5.1GB"
],
[
"14b-medium-128k-instruct-q3_K_L",
"7.5GB"
],
[
"14b-medium-128k-instruct-q3_K_M",
"6.9GB"
],
[
"14b-medium-128k-instruct-q3_K_S",
"6.1GB"
],
[
"14b-medium-128k-instruct-q4_0",
"7.9GB"
],
[
"14b-medium-128k-instruct-q4_1",
"8.8GB"
],
[
"14b-medium-128k-instruct-q4_K_M",
"8.6GB"
],
[
"14b-medium-128k-instruct-q4_K_S",
"8.0GB"
],
[
"14b-medium-128k-instruct-q5_0",
"9.6GB"
],
[
"14b-medium-128k-instruct-q5_1",
"10GB"
],
[
"14b-medium-128k-instruct-q5_K_M",
"10GB"
],
[
"14b-medium-128k-instruct-q5_K_S",
"9.6GB"
],
[
"14b-medium-128k-instruct-q6_K",
"11GB"
],
[
"14b-medium-4k-instruct-q2_K",
"5.1GB"
],
[
"14b-medium-4k-instruct-q3_K_L",
"7.5GB"
],
[
"14b-medium-4k-instruct-q3_K_M",
"6.9GB"
],
[
"14b-medium-4k-instruct-q3_K_S",
"6.1GB"
],
[
"14b-medium-4k-instruct-q4_0",
"7.9GB"
],
[
"14b-medium-4k-instruct-q4_1",
"8.8GB"
],
[
"14b-medium-4k-instruct-q4_K_M",
"8.6GB"
],
[
"14b-medium-4k-instruct-q4_K_S",
"8.0GB"
],
[
"14b-medium-4k-instruct-q5_0",
"9.6GB"
],
[
"14b-medium-4k-instruct-q5_1",
"10GB"
],
[
"14b-medium-4k-instruct-q5_K_M",
"10GB"
],
[
"14b-medium-4k-instruct-q5_K_S",
"9.6GB"
],
[
"14b-medium-4k-instruct-q6_K",
"11GB"
],
[
"14b-medium-4k-instruct-q8_0",
"15GB"
],
[
"3.8b-instruct",
"2.2GB"
2024-07-22 22:19:47 -06:00
],
[
"3.8b-mini-4k-instruct-f16",
"7.6GB"
],
[
"3.8b-mini-128k-instruct-f16",
"7.6GB"
],
[
2024-07-22 22:19:47 -06:00
"3.8b-mini-128k-instruct-fp16",
"7.6GB"
],
[
"3.8b-mini-128k-instruct-q2_K",
"1.4GB"
],
[
"3.8b-mini-128k-instruct-q3_K_L",
"2.1GB"
],
[
"3.8b-mini-128k-instruct-q3_K_M",
"2.0GB"
],
[
"3.8b-mini-128k-instruct-q3_K_S",
"1.7GB"
],
[
"3.8b-mini-128k-instruct-q4_0",
"2.2GB"
],
[
"3.8b-mini-128k-instruct-q4_1",
"2.4GB"
],
[
"3.8b-mini-128k-instruct-q4_K_M",
"2.4GB"
],
[
"3.8b-mini-128k-instruct-q4_K_S",
"2.2GB"
],
[
"3.8b-mini-128k-instruct-q5_0",
"2.6GB"
],
[
"3.8b-mini-128k-instruct-q5_1",
"2.9GB"
],
[
"3.8b-mini-128k-instruct-q5_K_M",
"2.8GB"
],
[
"3.8b-mini-128k-instruct-q5_K_S",
"2.6GB"
],
[
"3.8b-mini-128k-instruct-q6_K",
"3.1GB"
],
[
"3.8b-mini-128k-instruct-q8_0",
"4.1GB"
],
2024-07-22 22:19:47 -06:00
[
"3.8b-mini-4k-instruct-fp16",
"7.6GB"
2024-07-22 22:19:47 -06:00
],
[
"3.8b-mini-4k-instruct-q2_K",
"1.4GB"
],
[
"3.8b-mini-4k-instruct-q3_K_L",
"2.1GB"
],
[
"3.8b-mini-4k-instruct-q3_K_M",
"2.0GB"
],
[
"3.8b-mini-4k-instruct-q3_K_S",
"1.7GB"
],
[
"3.8b-mini-4k-instruct-q4_0",
"2.2GB"
],
[
"3.8b-mini-4k-instruct-q4_1",
"2.4GB"
],
[
"3.8b-mini-4k-instruct-q4_K_M",
"2.4GB"
],
[
"3.8b-mini-4k-instruct-q4_K_S",
"2.2GB"
],
[
"3.8b-mini-4k-instruct-q5_0",
"2.6GB"
],
[
"3.8b-mini-4k-instruct-q5_1",
"2.9GB"
],
[
"3.8b-mini-4k-instruct-q5_K_M",
"2.8GB"
],
[
"3.8b-mini-4k-instruct-q5_K_S",
"2.6GB"
],
[
"3.8b-mini-4k-instruct-q6_K",
"3.1GB"
],
[
"3.8b-mini-4k-instruct-q8_0",
"4.1GB"
],
[
"3.8b-mini-instruct-4k-fp16",
"7.6GB"
],
[
"mini-128k",
"2.2GB"
],
[
"medium-128k",
"7.9GB"
2024-07-22 22:19:47 -06:00
],
[
"mini-4k",
"2.4GB"
]
],
"image": false,
"author": "Microsoft"
},
2024-07-29 02:06:06 -06:00
"mistral": {
"url": "https://ollama.com/library/mistral",
"description": "The 7B model released by Mistral AI, updated to version 0.3.",
"tags": [
[
"latest",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"instruct",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"text",
"4.1GB"
],
2024-07-22 22:19:47 -06:00
[
2024-07-29 02:06:06 -06:00
"v0.1",
"4.1GB"
2024-07-22 22:19:47 -06:00
],
[
2024-07-29 02:06:06 -06:00
"v0.2",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"v0.3",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-fp16",
"14GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q2_K",
"3.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q3_K_L",
"3.8GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q3_K_M",
"3.5GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q3_K_S",
"3.2GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q4_0",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q4_1",
"4.6GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q4_K_M",
"4.4GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q4_K_S",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q5_0",
"5.0GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q5_1",
"5.4GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q5_K_M",
"5.1GB"
],
2024-07-22 22:19:47 -06:00
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q5_K_S",
"5.0GB"
2024-07-22 22:19:47 -06:00
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q6_K",
"5.9GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.2-q8_0",
"7.7GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-fp16",
"14GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q2_K",
"2.7GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q3_K_L",
"3.8GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q3_K_M",
"3.5GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q3_K_S",
"3.2GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q4_0",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q4_1",
"4.6GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q4_0",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q5_K_M",
"5.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q8_0",
"7.7GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q4_K_S",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q3_K_M",
"3.5GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-fp16",
"14GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q3_K_L",
"3.8GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q5_0",
"5.0GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q2_K",
"3.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q3_K_S",
"3.2GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q6_K",
"5.9GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q5_K_S",
"5.0GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q5_1",
"5.4GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v0.3-q4_K_M",
"4.4GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q6_K",
"5.9GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q5_K_S",
"5.0GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q4_K_S",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q5_K_M",
"5.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q4_1",
"4.6GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q5_0",
"5.0GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q4_K_M",
"4.4GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q5_1",
"5.4GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q8_0",
"7.7GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-fp16",
"14GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q2_K",
"3.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q3_K_L",
"3.8GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q3_K_M",
"3.5GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q3_K_M",
"3.5GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q8_0",
"7.7GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q5_K_S",
"5.0GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q3_K_S",
"3.2GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q5_0",
"5.0GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q5_K_M",
"5.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q4_K_M",
"4.4GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q4_1",
"4.6GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q3_K_L",
"3.8GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q6_K",
"5.9GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q4_0",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q5_1",
"5.4GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q2_K",
"2.7GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-fp16",
"14GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-q4_K_S",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q3_K_S",
"3.2GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q4_0",
"4.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q4_1",
"4.6GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q4_K_M",
"4.4GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q4_K_S",
"4.1GB"
2024-07-22 22:19:47 -06:00
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q5_0",
"5.0GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q5_1",
"5.4GB"
2024-07-22 22:19:47 -06:00
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q5_K_M",
"5.1GB"
2024-07-22 22:19:47 -06:00
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q5_K_S",
"5.0GB"
2024-07-22 22:19:47 -06:00
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q6_K",
"5.9GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-text-v0.2-q8_0",
"7.7GB"
2024-07-29 02:06:06 -06:00
]
],
"image": false,
"author": "Mistral AI"
},
"mixtral": {
"url": "https://ollama.com/library/mixtral",
"description": "A set of Mixture of Experts (MoE) model with open weights by Mistral AI in 8x7b and 8x22b parameter sizes.",
"tags": [
2024-07-22 22:19:47 -06:00
[
2024-07-29 02:06:06 -06:00
"latest",
"26GB"
],
[
2024-07-29 02:06:06 -06:00
"8x7b",
"26GB"
],
[
"8x22b",
"80GB"
],
[
"instruct",
"26GB"
],
[
"text",
"26GB"
],
[
"v0.1",
"80GB"
],
[
"8x22b-instruct",
"80GB"
],
[
"8x22b-instruct-v0.1-fp16",
"281GB"
],
[
"8x22b-instruct-v0.1-q2_K",
"52GB"
],
[
"8x22b-instruct-v0.1-q3_K_L",
"73GB"
],
[
"8x22b-instruct-v0.1-q3_K_M",
"68GB"
],
[
"8x22b-instruct-v0.1-q3_K_S",
"62GB"
],
[
"8x22b-instruct-v0.1-q4_0",
"80GB"
],
[
"8x22b-instruct-v0.1-q4_1",
"88GB"
],
[
"8x22b-instruct-v0.1-q4_K_M",
"86GB"
],
[
"8x22b-instruct-v0.1-q4_K_S",
"80GB"
],
[
"8x22b-instruct-v0.1-q5_0",
"97GB"
],
[
"8x22b-instruct-v0.1-q5_1",
"106GB"
],
[
"8x22b-instruct-v0.1-q5_K_M",
"100GB"
],
[
"8x22b-instruct-v0.1-q5_K_S",
"97GB"
],
[
"8x22b-instruct-v0.1-q6_K",
"116GB"
],
[
"8x22b-instruct-v0.1-q8_0",
"149GB"
],
[
"8x7b-instruct-v0.1-fp16",
"93GB"
],
[
"8x7b-instruct-v0.1-q2_K",
"16GB"
],
[
"8x7b-instruct-v0.1-q3_K_L",
"20GB"
],
[
"8x7b-instruct-v0.1-q3_K_M",
"20GB"
],
[
"8x7b-instruct-v0.1-q3_K_S",
"20GB"
],
[
"8x7b-instruct-v0.1-q4_0",
"26GB"
],
[
"8x7b-instruct-v0.1-q4_1",
"29GB"
],
[
"8x7b-instruct-v0.1-q4_K_M",
"26GB"
],
[
"8x7b-instruct-v0.1-q4_K_S",
"26GB"
],
[
"8x7b-instruct-v0.1-q5_0",
"32GB"
],
[
"8x7b-instruct-v0.1-q5_1",
"35GB"
],
[
"8x7b-instruct-v0.1-q5_K_M",
"32GB"
],
[
"8x7b-instruct-v0.1-q5_K_S",
"32GB"
],
[
"8x7b-instruct-v0.1-q6_K",
"38GB"
],
[
"8x7b-instruct-v0.1-q8_0",
"50GB"
],
[
"8x22b-text",
"80GB"
],
[
"8x22b-text-v0.1-fp16",
"281GB"
],
[
"8x22b-text-v0.1-q2_K",
"52GB"
],
[
"8x22b-text-v0.1-q3_K_L",
"73GB"
],
[
"8x22b-text-v0.1-q3_K_M",
"68GB"
],
[
"8x22b-text-v0.1-q3_K_S",
"61GB"
],
[
"8x22b-text-v0.1-q4_0",
"80GB"
],
[
"8x22b-text-v0.1-q4_1",
"88GB"
],
[
"8x22b-text-v0.1-q4_K_M",
"86GB"
],
[
"8x22b-text-v0.1-q4_K_S",
"80GB"
],
[
"8x22b-text-v0.1-q5_0",
"97GB"
],
[
"8x22b-text-v0.1-q5_1",
"106GB"
],
[
"8x22b-text-v0.1-q5_K_M",
"100GB"
],
[
"8x22b-text-v0.1-q5_K_S",
"97GB"
],
[
"8x22b-text-v0.1-q6_K",
"116GB"
],
[
"8x22b-text-v0.1-q8_0",
"149GB"
],
[
"8x7b-text-v0.1-fp16",
"93GB"
],
[
"8x7b-text-v0.1-q2_K",
"16GB"
],
[
"8x7b-text-v0.1-q3_K_L",
"20GB"
],
[
"8x7b-text-v0.1-q3_K_M",
"20GB"
],
[
"8x7b-text-v0.1-q3_K_S",
"20GB"
],
[
"8x7b-text-v0.1-q4_0",
"26GB"
],
[
"8x7b-text-v0.1-q4_1",
"29GB"
],
[
"8x7b-text-v0.1-q4_K_M",
"26GB"
],
[
"8x7b-text-v0.1-q4_K_S",
"26GB"
],
[
"8x7b-text-v0.1-q5_0",
"32GB"
],
[
"8x7b-text-v0.1-q5_1",
"35GB"
],
[
"8x7b-text-v0.1-q5_K_M",
"32GB"
],
[
"8x7b-text-v0.1-q5_K_S",
"32GB"
],
[
"8x7b-text-v0.1-q6_K",
"38GB"
],
[
"8x7b-text-v0.1-q8_0",
"50GB"
],
[
"v0.1-instruct",
"80GB"
]
],
"image": false,
"author": "Mistral AI"
},
"codegemma": {
"url": "https://ollama.com/library/codegemma",
"description": "CodeGemma is a collection of powerful, lightweight models that can perform a variety of coding tasks like fill-in-the-middle code completion, code generation, natural language understanding, mathematical reasoning, and instruction following.",
"tags": [
[
"latest",
"5.0GB"
],
[
"7b",
"5.0GB"
],
[
"2b",
"1.6GB"
],
[
"code",
"1.6GB"
],
[
"instruct",
"5.0GB"
],
[
"7b-code",
"5.0GB"
],
[
"7b-code-fp16",
"17GB"
],
[
"7b-code-q2_K",
"3.5GB"
],
[
"7b-code-q3_K_L",
"4.7GB"
],
[
"7b-code-q3_K_M",
"4.4GB"
],
[
"7b-code-q3_K_S",
"4.0GB"
],
[
"7b-code-q4_0",
"5.0GB"
],
[
"7b-code-q4_1",
"5.5GB"
],
[
"7b-code-q4_K_M",
"5.3GB"
],
[
"7b-code-q4_K_S",
"5.0GB"
],
[
"7b-code-q5_0",
"6.0GB"
],
[
"7b-code-q5_1",
"6.5GB"
],
[
"7b-code-q5_K_M",
"6.1GB"
],
[
"7b-code-q5_K_S",
"6.0GB"
],
[
"7b-code-q6_K",
"7.0GB"
],
[
"7b-code-q8_0",
"9.1GB"
],
[
"7b-instruct",
"5.0GB"
],
[
2024-07-22 22:19:47 -06:00
"7b-instruct-v1.1-fp16",
"17GB"
],
[
2024-07-22 22:19:47 -06:00
"7b-instruct-v1.1-q2_K",
"3.5GB"
],
[
2024-07-22 22:19:47 -06:00
"7b-instruct-v1.1-q3_K_L",
"4.7GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q3_K_S",
"4.0GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v1.1-q6_K",
"7.0GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v1.1-q5_K_M",
"6.1GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v1.1-q8_0",
"9.1GB"
2024-07-22 22:19:47 -06:00
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v1.1-q4_0",
"5.0GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v1.1-q5_K_S",
"6.0GB"
],
2024-07-29 02:06:06 -06:00
[
"7b-instruct-fp16",
"17GB"
2024-07-29 02:06:06 -06:00
],
[
2024-07-22 22:19:47 -06:00
"7b-instruct-v1.1-q5_1",
"6.5GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v1.1-q4_1",
"5.5GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v1.1-q5_0",
"6.0GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v1.1-q3_K_M",
"4.4GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v1.1-q4_K_S",
"5.0GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v1.1-q4_K_M",
"5.3GB"
],
[
2024-07-22 22:19:47 -06:00
"7b-instruct-q3_K_M",
"4.4GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-v1.1-q3_K_S",
"4.0GB"
],
[
2024-07-22 22:19:47 -06:00
"7b-instruct-q2_K",
"3.5GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q3_K_L",
"4.7GB"
],
[
2024-07-29 02:06:06 -06:00
"7b-instruct-q4_0",
"5.0GB"
],
[
2024-07-22 22:19:47 -06:00
"7b-instruct-q4_1",
"5.5GB"
2024-07-22 22:19:47 -06:00
],
2024-07-29 02:06:06 -06:00
[
"7b-instruct-q4_K_M",
"5.3GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-instruct-q4_K_S",
"5.0GB"
2024-07-29 02:06:06 -06:00
],
2024-07-22 22:19:47 -06:00
[
"7b-instruct-q5_0",
"6.0GB"
],
[
2024-07-22 22:19:47 -06:00
"7b-instruct-q5_1",
"6.5GB"
],
[
2024-07-22 22:19:47 -06:00
"7b-instruct-q5_K_M",
"6.1GB"
],
[
2024-07-22 22:19:47 -06:00
"7b-instruct-q5_K_S",
"6.0GB"
],
[
2024-07-22 22:19:47 -06:00
"7b-instruct-q6_K",
"7.0GB"
],
[
2024-07-22 22:19:47 -06:00
"7b-instruct-q8_0",
"9.1GB"
],
[
"7b-v1.1",
"5.0GB"
],
[
"2b-code",
"1.6GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-fp16",
"5.0GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-q2_K",
"1.2GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-q3_K_L",
"1.5GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-q3_K_M",
"1.4GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-q3_K_S",
"1.3GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-q4_0",
"1.6GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-q4_1",
"1.7GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-q4_K_M",
"1.6GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-q4_K_S",
"1.6GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-q5_0",
"1.8GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-v1.1-q4_1",
"1.7GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-v1.1-fp16",
"5.0GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-v1.1-q3_K_S",
"1.3GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-q5_1",
"1.9GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-v1.1-q3_K_L",
"1.5GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-v1.1-q3_K_M",
"1.4GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-q5_K_S",
"1.8GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-q8_0",
"2.7GB"
2024-07-29 02:06:06 -06:00
],
[
"2b-code-v1.1-q4_0",
"1.6GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-v1.1-q2_K",
"1.2GB"
2024-07-29 02:06:06 -06:00
],
[
"2b-code-q6_K",
"2.1GB"
2024-07-29 02:06:06 -06:00
],
[
"2b-code-q5_K_M",
"1.8GB"
2024-07-29 02:06:06 -06:00
],
[
"2b-code-v1.1-q4_K_M",
"1.6GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-v1.1-q4_K_S",
"1.6GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-v1.1-q5_0",
"1.8GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-v1.1-q5_1",
"1.9GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-v1.1-q5_K_M",
"1.8GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-v1.1-q5_K_S",
"1.8GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-v1.1-q6_K",
"2.1GB"
],
[
2024-07-29 02:06:06 -06:00
"2b-code-v1.1-q8_0",
"2.7GB"
],
[
"2b-v1.1",
"1.6GB"
]
],
"image": false,
"author": "Google DeepMind"
},
"command-r": {
"url": "https://ollama.com/library/command-r",
"description": "Command R is a Large Language Model optimized for conversational interaction and long context tasks.",
"tags": [
[
"latest",
"20GB"
],
[
"35b",
"20GB"
],
[
"v0.1",
"20GB"
],
[
"35b-v0.1-fp16",
"70GB"
],
[
"35b-v0.1-q2_K",
"14GB"
],
[
"35b-v0.1-q3_K_L",
"19GB"
],
[
"35b-v0.1-q3_K_M",
"18GB"
],
[
"35b-v0.1-q3_K_S",
"16GB"
],
[
"35b-v0.1-q4_0",
"20GB"
],
[
"35b-v0.1-q4_1",
"22GB"
],
[
"35b-v0.1-q4_K_M",
"22GB"
],
[
"35b-v0.1-q4_K_S",
"20GB"
],
[
"35b-v0.1-q5_1",
"26GB"
],
[
"35b-v0.1-q5_K_M",
"25GB"
],
[
"35b-v0.1-q5_K_S",
"24GB"
],
[
"35b-v0.1-q6_K",
"29GB"
],
[
"35b-v0.1-q8_0",
"37GB"
]
],
"image": false,
"author": "Cohere"
},
"command-r-plus": {
"url": "https://ollama.com/library/command-r-plus",
"description": "Command R+ is a powerful, scalable large language model purpose-built to excel at real-world enterprise use cases.",
"tags": [
[
"latest",
"59GB"
],
[
"104b",
"59GB"
],
[
"104b-fp16",
"208GB"
],
[
"104b-q2_K",
"39GB"
],
[
"104b-q4_0",
"59GB"
],
[
"104b-q8_0",
"110GB"
]
],
"image": false,
"author": "Cohere"
},
"llava": {
"url": "https://ollama.com/library/llava",
"description": "\ud83c\udf0b LLaVA is a novel end-to-end trained large multimodal model that combines a vision encoder and Vicuna for general-purpose visual and language understanding. Updated to version 1.6.",
"tags": [
[
"latest",
"4.7GB"
],
[
"34b",
"20GB"
],
[
"13b",
"8.0GB"
],
[
"7b",
"4.7GB"
],
[
"v1.6",
"4.7GB"
],
[
"34b-v1.6",
"20GB"
],
[
"34b-v1.6-fp16",
"69GB"
],
[
"34b-v1.6-q2_K",
"14GB"
],
[
"34b-v1.6-q3_K_L",
"19GB"
],
[
"34b-v1.6-q3_K_M",
"17GB"
],
[
"34b-v1.6-q3_K_S",
"16GB"
],
[
"34b-v1.6-q4_0",
"20GB"
],
[
"34b-v1.6-q4_1",
"22GB"
],
[
"34b-v1.6-q4_K_M",
"21GB"
],
[
"34b-v1.6-q4_K_S",
"20GB"
],
[
"34b-v1.6-q5_0",
"24GB"
],
[
"34b-v1.6-q5_1",
"27GB"
],
[
"34b-v1.6-q5_K_M",
"25GB"
],
[
"34b-v1.6-q5_K_S",
"24GB"
],
[
"34b-v1.6-q6_K",
"29GB"
],
[
"34b-v1.6-q8_0",
"37GB"
],
[
"13b-v1.5-fp16",
"27GB"
],
[
"13b-v1.5-q2_K",
"6.1GB"
],
[
"13b-v1.5-q3_K_L",
"7.6GB"
],
[
"13b-v1.5-q3_K_M",
"7.0GB"
],
[
"13b-v1.5-q3_K_S",
"6.3GB"
],
[
"13b-v1.5-q4_0",
"8.0GB"
],
[
"13b-v1.5-q4_1",
"8.8GB"
],
[
"13b-v1.5-q4_K_M",
"8.5GB"
],
[
"13b-v1.5-q4_K_S",
"8.1GB"
],
[
"13b-v1.5-q5_0",
"9.6GB"
],
[
"13b-v1.5-q5_1",
"10GB"
],
[
2024-07-29 02:06:06 -06:00
"13b-v1.5-q5_K_M",
"9.9GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.5-q5_K_S",
"9.6GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.5-q6_K",
"11GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.5-q8_0",
"14GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6",
"8.0GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-fp16",
"27GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q2_K",
"5.5GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q3_K_L",
"7.6GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q3_K_M",
"7.0GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q3_K_S",
"6.3GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q4_0",
"8.0GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q4_1",
"8.8GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q4_K_M",
"8.5GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q4_K_S",
"8.1GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q5_0",
"9.6GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q5_1",
"10GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q5_K_M",
"9.9GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q5_K_S",
"9.6GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q6_K",
"11GB"
2024-07-29 02:06:06 -06:00
],
[
"13b-v1.6-vicuna-q8_0",
"14GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-fp16",
"14GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q2_K",
"3.5GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q3_K_L",
"4.2GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q3_K_M",
"3.9GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q3_K_S",
"3.6GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q4_0",
"4.5GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q4_1",
"4.9GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q4_K_M",
"4.7GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q4_K_S",
"4.5GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q5_0",
"5.3GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q5_1",
"5.7GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q5_K_M",
"5.4GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q5_K_S",
"5.3GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q6_K",
"6.2GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.5-q8_0",
"7.8GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6",
"4.7GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-fp16",
"15GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q2_K",
"3.3GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q3_K_L",
"4.4GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q3_K_M",
"4.1GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q3_K_S",
"3.8GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q4_0",
"4.7GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q4_1",
"5.2GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q4_K_M",
"5.0GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q4_K_S",
"4.8GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q5_0",
"5.6GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q5_1",
"6.1GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q5_K_M",
"5.8GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q5_K_S",
"5.6GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q6_K",
"6.6GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-mistral-q8_0",
"8.3GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-fp16",
"14GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q2_K",
"3.2GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q3_K_L",
"4.2GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q3_K_M",
"3.9GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q3_K_S",
"3.6GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q4_0",
"4.5GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q4_1",
"4.9GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q4_K_M",
"4.7GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q4_K_S",
"4.5GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q5_0",
"5.3GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q5_1",
"5.7GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q5_K_M",
"5.4GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q5_K_S",
"5.3GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q6_K",
"6.2GB"
2024-07-29 02:06:06 -06:00
],
[
"7b-v1.6-vicuna-q8_0",
"7.8GB"
2024-07-29 02:06:06 -06:00
]
],
"image": true,
"author": "Haotian Liu"
},
"llama3": {
"url": "https://ollama.com/library/llama3",
"description": "Meta Llama 3: The most capable openly available LLM to date",
"tags": [
[
"latest",
"4.7GB"
2024-07-29 02:06:06 -06:00
],
[
"70b",
"40GB"
],
[
2024-07-29 02:06:06 -06:00
"8b",
"4.7GB"
],
[
2024-07-29 02:06:06 -06:00
"instruct",
"4.7GB"
],
[
2024-07-29 02:06:06 -06:00
"text",
"4.7GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct",
"40GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct-fp16",
"141GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct-q2_K",
"26GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct-q3_K_L",
"37GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct-q3_K_M",
"34GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct-q3_K_S",
"31GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct-q4_0",
"40GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct-q4_1",
"44GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct-q4_K_M",
"43GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct-q4_K_S",
"40GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct-q5_0",
"49GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct-q5_1",
"53GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct-q5_K_M",
"50GB"
],
[
2024-07-29 02:06:06 -06:00
"70b-instruct-q5_K_S",
"49GB"