Alpaca/src/available_models.json
2024-10-06 23:26:08 -06:00

23994 lines
556 KiB
JSON

{
"llama3.2": {
"url": "https://ollama.com/library/llama3.2",
"description": "Meta's Llama 3.2 goes small with 1B and 3B models.",
"tags": [
[
"latest",
"2.0\u202fGB"
],
[
"1b",
"1.3\u202fGB"
],
[
"3b",
"2.0\u202fGB"
],
[
"1b-instruct-fp16",
"2.5\u202fGB"
],
[
"1b-instruct-q2_K",
"581\u202fMB"
],
[
"1b-instruct-q3_K_S",
"642\u202fMB"
],
[
"1b-instruct-q3_K_M",
"691\u202fMB"
],
[
"1b-instruct-q3_K_L",
"733\u202fMB"
],
[
"1b-instruct-q4_0",
"771\u202fMB"
],
[
"1b-instruct-q4_1",
"832\u202fMB"
],
[
"1b-instruct-q4_K_S",
"776\u202fMB"
],
[
"1b-instruct-q4_K_M",
"808\u202fMB"
],
[
"1b-instruct-q5_0",
"893\u202fMB"
],
[
"1b-instruct-q5_1",
"953\u202fMB"
],
[
"1b-instruct-q5_K_S",
"893\u202fMB"
],
[
"1b-instruct-q5_K_M",
"912\u202fMB"
],
[
"1b-instruct-q6_K",
"1.0\u202fGB"
],
[
"1b-instruct-q8_0",
"1.3\u202fGB"
],
[
"1b-text-fp16",
"2.5\u202fGB"
],
[
"1b-text-q2_K",
"581\u202fMB"
],
[
"1b-text-q3_K_S",
"642\u202fMB"
],
[
"1b-text-q3_K_M",
"691\u202fMB"
],
[
"1b-text-q3_K_L",
"733\u202fMB"
],
[
"1b-text-q4_0",
"771\u202fMB"
],
[
"1b-text-q4_1",
"832\u202fMB"
],
[
"1b-text-q4_K_S",
"776\u202fMB"
],
[
"1b-text-q4_K_M",
"808\u202fMB"
],
[
"1b-text-q5_0",
"893\u202fMB"
],
[
"1b-text-q5_1",
"953\u202fMB"
],
[
"1b-text-q5_K_S",
"893\u202fMB"
],
[
"1b-text-q5_K_M",
"912\u202fMB"
],
[
"1b-text-q6_K",
"1.0\u202fGB"
],
[
"1b-text-q8_0",
"1.3\u202fGB"
],
[
"3b-instruct-fp16",
"6.4\u202fGB"
],
[
"3b-instruct-q2_K",
"1.4\u202fGB"
],
[
"3b-instruct-q3_K_S",
"1.5\u202fGB"
],
[
"3b-instruct-q3_K_M",
"1.7\u202fGB"
],
[
"3b-instruct-q3_K_L",
"1.8\u202fGB"
],
[
"3b-instruct-q4_0",
"1.9\u202fGB"
],
[
"3b-instruct-q4_1",
"2.1\u202fGB"
],
[
"3b-instruct-q4_K_S",
"1.9\u202fGB"
],
[
"3b-instruct-q4_K_M",
"2.0\u202fGB"
],
[
"3b-instruct-q5_0",
"2.3\u202fGB"
],
[
"3b-instruct-q5_1",
"2.4\u202fGB"
],
[
"3b-instruct-q5_K_S",
"2.3\u202fGB"
],
[
"3b-instruct-q5_K_M",
"2.3\u202fGB"
],
[
"3b-instruct-q6_K",
"2.6\u202fGB"
],
[
"3b-instruct-q8_0",
"3.4\u202fGB"
],
[
"3b-text-fp16",
"6.4\u202fGB"
],
[
"3b-text-q2_K",
"1.4\u202fGB"
],
[
"3b-text-q3_K_S",
"1.5\u202fGB"
],
[
"3b-text-q3_K_M",
"1.7\u202fGB"
],
[
"3b-text-q3_K_L",
"1.8\u202fGB"
],
[
"3b-text-q4_0",
"1.9\u202fGB"
],
[
"3b-text-q4_1",
"2.1\u202fGB"
],
[
"3b-text-q4_K_S",
"1.9\u202fGB"
],
[
"3b-text-q4_K_M",
"2.0\u202fGB"
],
[
"3b-text-q5_0",
"2.3\u202fGB"
],
[
"3b-text-q5_1",
"2.4\u202fGB"
],
[
"3b-text-q5_K_S",
"2.3\u202fGB"
],
[
"3b-text-q5_K_M",
"2.3\u202fGB"
],
[
"3b-text-q6_K",
"2.6\u202fGB"
],
[
"3b-text-q8_0",
"3.4\u202fGB"
]
],
"image": false,
"author": "Meta"
},
"llama3.1": {
"url": "https://ollama.com/library/llama3.1",
"description": "Llama 3.1 is a new state-of-the-art model from Meta available in 8B, 70B and 405B parameter sizes.",
"tags": [
[
"latest",
"4.7\u202fGB"
],
[
"8b",
"4.7\u202fGB"
],
[
"70b",
"40\u202fGB"
],
[
"405b",
"229\u202fGB"
],
[
"8b-instruct-fp16",
"16\u202fGB"
],
[
"8b-instruct-q2_K",
"3.2\u202fGB"
],
[
"8b-instruct-q3_K_S",
"3.7\u202fGB"
],
[
"8b-instruct-q3_K_M",
"4.0\u202fGB"
],
[
"8b-instruct-q3_K_L",
"4.3\u202fGB"
],
[
"8b-instruct-q4_0",
"4.7\u202fGB"
],
[
"8b-instruct-q4_1",
"5.1\u202fGB"
],
[
"8b-instruct-q4_K_S",
"4.7\u202fGB"
],
[
"8b-instruct-q4_K_M",
"4.9\u202fGB"
],
[
"8b-instruct-q5_0",
"5.6\u202fGB"
],
[
"8b-instruct-q5_1",
"6.1\u202fGB"
],
[
"8b-instruct-q5_K_S",
"5.6\u202fGB"
],
[
"8b-instruct-q5_K_M",
"5.7\u202fGB"
],
[
"8b-instruct-q6_K",
"6.6\u202fGB"
],
[
"8b-instruct-q8_0",
"8.5\u202fGB"
],
[
"8b-text-fp16",
"16\u202fGB"
],
[
"8b-text-q2_K",
"3.2\u202fGB"
],
[
"8b-text-q3_K_S",
"3.7\u202fGB"
],
[
"8b-text-q3_K_M",
"4.0\u202fGB"
],
[
"8b-text-q3_K_L",
"4.3\u202fGB"
],
[
"8b-text-q4_0",
"4.7\u202fGB"
],
[
"8b-text-q4_1",
"5.1\u202fGB"
],
[
"8b-text-q4_K_S",
"4.7\u202fGB"
],
[
"8b-text-q4_K_M",
"4.9\u202fGB"
],
[
"8b-text-q5_0",
"5.6\u202fGB"
],
[
"8b-text-q5_1",
"6.1\u202fGB"
],
[
"8b-text-q5_K_S",
"5.6\u202fGB"
],
[
"8b-text-q5_K_M",
"5.7\u202fGB"
],
[
"8b-text-q6_K",
"6.6\u202fGB"
],
[
"8b-text-q8_0",
"8.5\u202fGB"
],
[
"70b-instruct-fp16",
"141\u202fGB"
],
[
"70b-instruct-q2_k",
"26\u202fGB"
],
[
"70b-instruct-q2_K",
"26\u202fGB"
],
[
"70b-instruct-q3_K_S",
"31\u202fGB"
],
[
"70b-instruct-q3_K_M",
"34\u202fGB"
],
[
"70b-instruct-q3_K_L",
"37\u202fGB"
],
[
"70b-instruct-q4_0",
"40\u202fGB"
],
[
"70b-instruct-q4_K_S",
"40\u202fGB"
],
[
"70b-instruct-q4_K_M",
"43\u202fGB"
],
[
"70b-instruct-q5_0",
"49\u202fGB"
],
[
"70b-instruct-q5_1",
"53\u202fGB"
],
[
"70b-instruct-q5_K_S",
"49\u202fGB"
],
[
"70b-instruct-q5_K_M",
"50\u202fGB"
],
[
"70b-instruct-q6_K",
"58\u202fGB"
],
[
"70b-instruct-q8_0",
"75\u202fGB"
],
[
"70b-text-fp16",
"141\u202fGB"
],
[
"70b-text-q2_K",
"26\u202fGB"
],
[
"70b-text-q3_K_S",
"31\u202fGB"
],
[
"70b-text-q3_K_M",
"34\u202fGB"
],
[
"70b-text-q3_K_L",
"37\u202fGB"
],
[
"70b-text-q4_0",
"40\u202fGB"
],
[
"70b-text-q4_1",
"44\u202fGB"
],
[
"70b-text-q4_K_S",
"40\u202fGB"
],
[
"70b-text-q4_K_M",
"43\u202fGB"
],
[
"70b-text-q5_0",
"49\u202fGB"
],
[
"70b-text-q5_1",
"53\u202fGB"
],
[
"70b-text-q5_K_S",
"49\u202fGB"
],
[
"70b-text-q5_K_M",
"50\u202fGB"
],
[
"70b-text-q6_K",
"58\u202fGB"
],
[
"70b-text-q8_0",
"75\u202fGB"
],
[
"405b-instruct-fp16",
"812\u202fGB"
],
[
"405b-instruct-q2_K",
"149\u202fGB"
],
[
"405b-instruct-q3_K_S",
"175\u202fGB"
],
[
"405b-instruct-q3_K_M",
"195\u202fGB"
],
[
"405b-instruct-q3_K_L",
"213\u202fGB"
],
[
"405b-instruct-q4_0",
"229\u202fGB"
],
[
"405b-instruct-q4_1",
"254\u202fGB"
],
[
"405b-instruct-q4_K_S",
"231\u202fGB"
],
[
"405b-instruct-q4_K_M",
"243\u202fGB"
],
[
"405b-instruct-q5_0",
"279\u202fGB"
],
[
"405b-instruct-q5_1",
"305\u202fGB"
],
[
"405b-instruct-q5_K_S",
"279\u202fGB"
],
[
"405b-instruct-q5_K_M",
"287\u202fGB"
],
[
"405b-instruct-q6_K",
"333\u202fGB"
],
[
"405b-instruct-q8_0",
"431\u202fGB"
],
[
"405b-text-fp16",
"812\u202fGB"
],
[
"405b-text-q2_K",
"149\u202fGB"
],
[
"405b-text-q3_K_S",
"175\u202fGB"
],
[
"405b-text-q3_K_M",
"195\u202fGB"
],
[
"405b-text-q3_K_L",
"213\u202fGB"
],
[
"405b-text-q4_0",
"229\u202fGB"
],
[
"405b-text-q4_1",
"254\u202fGB"
],
[
"405b-text-q4_K_S",
"231\u202fGB"
],
[
"405b-text-q4_K_M",
"243\u202fGB"
],
[
"405b-text-q5_0",
"279\u202fGB"
],
[
"405b-text-q5_1",
"305\u202fGB"
],
[
"405b-text-q5_K_S",
"279\u202fGB"
],
[
"405b-text-q5_K_M",
"287\u202fGB"
],
[
"405b-text-q6_K",
"333\u202fGB"
],
[
"405b-text-q8_0",
"431\u202fGB"
]
],
"image": false,
"author": "Meta"
},
"gemma2": {
"url": "https://ollama.com/library/gemma2",
"description": "Google Gemma 2 is a high-performing and efficient model available in three sizes: 2B, 9B, and 27B.",
"tags": [
[
"latest",
"5.4\u202fGB"
],
[
"2b",
"1.6\u202fGB"
],
[
"9b",
"5.4\u202fGB"
],
[
"27b",
"16\u202fGB"
],
[
"2b-instruct-fp16",
"5.2\u202fGB"
],
[
"2b-instruct-q2_K",
"1.2\u202fGB"
],
[
"2b-instruct-q3_K_S",
"1.4\u202fGB"
],
[
"2b-instruct-q3_K_M",
"1.5\u202fGB"
],
[
"2b-instruct-q3_K_L",
"1.6\u202fGB"
],
[
"2b-instruct-q4_0",
"1.6\u202fGB"
],
[
"2b-instruct-q4_1",
"1.8\u202fGB"
],
[
"2b-instruct-q4_K_S",
"1.6\u202fGB"
],
[
"2b-instruct-q4_K_M",
"1.7\u202fGB"
],
[
"2b-instruct-q5_0",
"1.9\u202fGB"
],
[
"2b-instruct-q5_1",
"2.0\u202fGB"
],
[
"2b-instruct-q5_K_S",
"1.9\u202fGB"
],
[
"2b-instruct-q5_K_M",
"1.9\u202fGB"
],
[
"2b-instruct-q6_K",
"2.2\u202fGB"
],
[
"2b-instruct-q8_0",
"2.8\u202fGB"
],
[
"2b-text-fp16",
"5.2\u202fGB"
],
[
"2b-text-q2_K",
"1.2\u202fGB"
],
[
"2b-text-q3_K_S",
"1.4\u202fGB"
],
[
"2b-text-q3_K_M",
"1.5\u202fGB"
],
[
"2b-text-q3_K_L",
"1.6\u202fGB"
],
[
"2b-text-q4_0",
"1.6\u202fGB"
],
[
"2b-text-q4_1",
"1.8\u202fGB"
],
[
"2b-text-q4_K_S",
"1.6\u202fGB"
],
[
"2b-text-q4_K_M",
"1.7\u202fGB"
],
[
"2b-text-q5_0",
"1.9\u202fGB"
],
[
"2b-text-q5_1",
"2.0\u202fGB"
],
[
"2b-text-q5_K_S",
"1.9\u202fGB"
],
[
"2b-text-q5_K_M",
"1.9\u202fGB"
],
[
"2b-text-q6_K",
"2.2\u202fGB"
],
[
"2b-text-q8_0",
"2.8\u202fGB"
],
[
"9b-instruct-fp16",
"18\u202fGB"
],
[
"9b-instruct-q2_K",
"3.8\u202fGB"
],
[
"9b-instruct-q3_K_S",
"4.3\u202fGB"
],
[
"9b-instruct-q3_K_M",
"4.8\u202fGB"
],
[
"9b-instruct-q3_K_L",
"5.1\u202fGB"
],
[
"9b-instruct-q4_0",
"5.4\u202fGB"
],
[
"9b-instruct-q4_1",
"6.0\u202fGB"
],
[
"9b-instruct-q4_K_S",
"5.5\u202fGB"
],
[
"9b-instruct-q4_K_M",
"5.8\u202fGB"
],
[
"9b-instruct-q5_0",
"6.5\u202fGB"
],
[
"9b-instruct-q5_1",
"7.0\u202fGB"
],
[
"9b-instruct-q5_K_S",
"6.5\u202fGB"
],
[
"9b-instruct-q5_K_M",
"6.6\u202fGB"
],
[
"9b-instruct-q6_K",
"7.6\u202fGB"
],
[
"9b-instruct-q8_0",
"9.8\u202fGB"
],
[
"9b-text-fp16",
"18\u202fGB"
],
[
"9b-text-q2_K",
"3.8\u202fGB"
],
[
"9b-text-q3_K_S",
"4.3\u202fGB"
],
[
"9b-text-q3_K_M",
"4.8\u202fGB"
],
[
"9b-text-q3_K_L",
"5.1\u202fGB"
],
[
"9b-text-q4_0",
"5.4\u202fGB"
],
[
"9b-text-q4_1",
"6.0\u202fGB"
],
[
"9b-text-q4_K_S",
"5.5\u202fGB"
],
[
"9b-text-q4_K_M",
"5.8\u202fGB"
],
[
"9b-text-q5_0",
"6.5\u202fGB"
],
[
"9b-text-q5_1",
"7.0\u202fGB"
],
[
"9b-text-q5_K_S",
"6.5\u202fGB"
],
[
"9b-text-q5_K_M",
"6.6\u202fGB"
],
[
"9b-text-q6_K",
"7.6\u202fGB"
],
[
"9b-text-q8_0",
"9.8\u202fGB"
],
[
"27b-instruct-fp16",
"54\u202fGB"
],
[
"27b-instruct-q2_K",
"10\u202fGB"
],
[
"27b-instruct-q3_K_S",
"12\u202fGB"
],
[
"27b-instruct-q3_K_M",
"13\u202fGB"
],
[
"27b-instruct-q3_K_L",
"15\u202fGB"
],
[
"27b-instruct-q4_0",
"16\u202fGB"
],
[
"27b-instruct-q4_1",
"17\u202fGB"
],
[
"27b-instruct-q4_K_S",
"16\u202fGB"
],
[
"27b-instruct-q4_K_M",
"17\u202fGB"
],
[
"27b-instruct-q5_0",
"19\u202fGB"
],
[
"27b-instruct-q5_1",
"21\u202fGB"
],
[
"27b-instruct-q5_K_S",
"19\u202fGB"
],
[
"27b-instruct-q5_K_M",
"19\u202fGB"
],
[
"27b-instruct-q6_K",
"22\u202fGB"
],
[
"27b-instruct-q8_0",
"29\u202fGB"
],
[
"27b-text-fp16",
"54\u202fGB"
],
[
"27b-text-q2_K",
"10\u202fGB"
],
[
"27b-text-q3_K_S",
"12\u202fGB"
],
[
"27b-text-q3_K_M",
"13\u202fGB"
],
[
"27b-text-q3_K_L",
"15\u202fGB"
],
[
"27b-text-q4_0",
"16\u202fGB"
],
[
"27b-text-q4_1",
"17\u202fGB"
],
[
"27b-text-q4_K_S",
"16\u202fGB"
],
[
"27b-text-q4_K_M",
"17\u202fGB"
],
[
"27b-text-q5_0",
"19\u202fGB"
],
[
"27b-text-q5_1",
"21\u202fGB"
],
[
"27b-text-q5_K_S",
"19\u202fGB"
],
[
"27b-text-q5_K_M",
"19\u202fGB"
],
[
"27b-text-q6_K",
"22\u202fGB"
],
[
"27b-text-q8_0",
"29\u202fGB"
]
],
"image": false,
"author": "Google DeepMind"
},
"qwen2.5": {
"url": "https://ollama.com/library/qwen2.5",
"description": "Qwen2.5 models are pretrained on Alibaba's latest large-scale dataset, encompassing up to 18 trillion tokens. The model supports up to 128K tokens and has multilingual support.",
"tags": [
[
"latest",
"4.7\u202fGB"
],
[
"0.5b",
"398\u202fMB"
],
[
"1.5b",
"986\u202fMB"
],
[
"3b",
"1.9\u202fGB"
],
[
"7b",
"4.7\u202fGB"
],
[
"14b",
"9.0\u202fGB"
],
[
"32b",
"20\u202fGB"
],
[
"72b",
"47\u202fGB"
],
[
"0.5b-base",
"398\u202fMB"
],
[
"0.5b-instruct",
"398\u202fMB"
],
[
"1.5b-instruct",
"986\u202fMB"
],
[
"3b-instruct",
"1.9\u202fGB"
],
[
"7b-instruct",
"4.7\u202fGB"
],
[
"14b-instruct",
"9.0\u202fGB"
],
[
"32b-instruct",
"20\u202fGB"
],
[
"72b-instruct",
"47\u202fGB"
],
[
"0.5b-base-q2_K",
"339\u202fMB"
],
[
"0.5b-base-q3_K_S",
"338\u202fMB"
],
[
"0.5b-base-q3_K_M",
"355\u202fMB"
],
[
"0.5b-base-q3_K_L",
"369\u202fMB"
],
[
"0.5b-base-q4_0",
"352\u202fMB"
],
[
"0.5b-base-q4_1",
"375\u202fMB"
],
[
"0.5b-base-q4_K_S",
"385\u202fMB"
],
[
"0.5b-base-q4_K_M",
"398\u202fMB"
],
[
"0.5b-base-q5_0",
"397\u202fMB"
],
[
"0.5b-base-q5_1",
"419\u202fMB"
],
[
"0.5b-base-q5_K_S",
"413\u202fMB"
],
[
"0.5b-base-q8_0",
"531\u202fMB"
],
[
"0.5b-instruct-fp16",
"994\u202fMB"
],
[
"0.5b-instruct-q2_K",
"339\u202fMB"
],
[
"0.5b-instruct-q3_K_S",
"338\u202fMB"
],
[
"0.5b-instruct-q3_K_M",
"355\u202fMB"
],
[
"0.5b-instruct-q3_K_L",
"369\u202fMB"
],
[
"0.5b-instruct-q4_0",
"352\u202fMB"
],
[
"0.5b-instruct-q4_1",
"375\u202fMB"
],
[
"0.5b-instruct-q4_K_S",
"385\u202fMB"
],
[
"0.5b-instruct-q4_K_M",
"398\u202fMB"
],
[
"0.5b-instruct-q5_0",
"397\u202fMB"
],
[
"0.5b-instruct-q5_1",
"419\u202fMB"
],
[
"0.5b-instruct-q5_K_S",
"413\u202fMB"
],
[
"0.5b-instruct-q5_K_M",
"420\u202fMB"
],
[
"0.5b-instruct-q6_K",
"506\u202fMB"
],
[
"0.5b-instruct-q8_0",
"531\u202fMB"
],
[
"1.5b-instruct-fp16",
"3.1\u202fGB"
],
[
"1.5b-instruct-q2_K",
"676\u202fMB"
],
[
"1.5b-instruct-q3_K_S",
"761\u202fMB"
],
[
"1.5b-instruct-q3_K_M",
"824\u202fMB"
],
[
"1.5b-instruct-q3_K_L",
"880\u202fMB"
],
[
"1.5b-instruct-q4_0",
"935\u202fMB"
],
[
"1.5b-instruct-q4_1",
"1.0\u202fGB"
],
[
"1.5b-instruct-q4_K_S",
"940\u202fMB"
],
[
"1.5b-instruct-q4_K_M",
"986\u202fMB"
],
[
"1.5b-instruct-q5_0",
"1.1\u202fGB"
],
[
"1.5b-instruct-q5_1",
"1.2\u202fGB"
],
[
"1.5b-instruct-q5_K_S",
"1.1\u202fGB"
],
[
"1.5b-instruct-q5_K_M",
"1.1\u202fGB"
],
[
"1.5b-instruct-q6_K",
"1.3\u202fGB"
],
[
"1.5b-instruct-q8_0",
"1.6\u202fGB"
],
[
"3b-instruct-fp16",
"6.2\u202fGB"
],
[
"3b-instruct-q2_K",
"1.3\u202fGB"
],
[
"3b-instruct-q3_K_S",
"1.5\u202fGB"
],
[
"3b-instruct-q3_K_M",
"1.6\u202fGB"
],
[
"3b-instruct-q3_K_L",
"1.7\u202fGB"
],
[
"3b-instruct-q4_0",
"1.8\u202fGB"
],
[
"3b-instruct-q4_1",
"2.0\u202fGB"
],
[
"3b-instruct-q4_K_S",
"1.8\u202fGB"
],
[
"3b-instruct-q4_K_M",
"1.9\u202fGB"
],
[
"3b-instruct-q5_0",
"2.2\u202fGB"
],
[
"3b-instruct-q5_1",
"2.3\u202fGB"
],
[
"3b-instruct-q5_K_S",
"2.2\u202fGB"
],
[
"3b-instruct-q5_K_M",
"2.2\u202fGB"
],
[
"3b-instruct-q6_K",
"2.5\u202fGB"
],
[
"3b-instruct-q8_0",
"3.3\u202fGB"
],
[
"7b-instruct-fp16",
"15\u202fGB"
],
[
"7b-instruct-q2_K",
"3.0\u202fGB"
],
[
"7b-instruct-q3_K_S",
"3.5\u202fGB"
],
[
"7b-instruct-q3_K_M",
"3.8\u202fGB"
],
[
"7b-instruct-q3_K_L",
"4.1\u202fGB"
],
[
"7b-instruct-q4_0",
"4.4\u202fGB"
],
[
"7b-instruct-q4_1",
"4.9\u202fGB"
],
[
"7b-instruct-q4_K_S",
"4.5\u202fGB"
],
[
"7b-instruct-q4_K_M",
"4.7\u202fGB"
],
[
"7b-instruct-q5_0",
"5.3\u202fGB"
],
[
"7b-instruct-q5_1",
"5.8\u202fGB"
],
[
"7b-instruct-q5_K_S",
"5.3\u202fGB"
],
[
"7b-instruct-q5_K_M",
"5.4\u202fGB"
],
[
"7b-instruct-q6_K",
"6.3\u202fGB"
],
[
"7b-instruct-q8_0",
"8.1\u202fGB"
],
[
"14b-instruct-fp16",
"30\u202fGB"
],
[
"14b-instruct-q2_K",
"5.8\u202fGB"
],
[
"14b-instruct-q3_K_S",
"6.7\u202fGB"
],
[
"14b-instruct-q3_K_M",
"7.3\u202fGB"
],
[
"14b-instruct-q3_K_L",
"7.9\u202fGB"
],
[
"14b-instruct-q4_0",
"8.5\u202fGB"
],
[
"14b-instruct-q4_1",
"9.4\u202fGB"
],
[
"14b-instruct-q4_K_S",
"8.6\u202fGB"
],
[
"14b-instruct-q4_K_M",
"9.0\u202fGB"
],
[
"14b-instruct-q5_0",
"10\u202fGB"
],
[
"14b-instruct-q5_1",
"11\u202fGB"
],
[
"14b-instruct-q5_K_S",
"10\u202fGB"
],
[
"14b-instruct-q5_K_M",
"11\u202fGB"
],
[
"14b-instruct-q6_K",
"12\u202fGB"
],
[
"14b-instruct-q8_0",
"16\u202fGB"
],
[
"32b-instruct-fp16",
"66\u202fGB"
],
[
"32b-instruct-q2_K",
"12\u202fGB"
],
[
"32b-instruct-q3_K_S",
"14\u202fGB"
],
[
"32b-instruct-q3_K_M",
"16\u202fGB"
],
[
"32b-instruct-q3_K_L",
"17\u202fGB"
],
[
"32b-instruct-q4_0",
"19\u202fGB"
],
[
"32b-instruct-q4_1",
"21\u202fGB"
],
[
"32b-instruct-q4_K_S",
"19\u202fGB"
],
[
"32b-instruct-q4_K_M",
"20\u202fGB"
],
[
"32b-instruct-q5_0",
"23\u202fGB"
],
[
"32b-instruct-q5_1",
"25\u202fGB"
],
[
"32b-instruct-q5_K_S",
"23\u202fGB"
],
[
"32b-instruct-q5_K_M",
"23\u202fGB"
],
[
"32b-instruct-q6_K",
"27\u202fGB"
],
[
"32b-instruct-q8_0",
"35\u202fGB"
],
[
"72b-instruct-fp16",
"145\u202fGB"
],
[
"72b-instruct-q2_K",
"30\u202fGB"
],
[
"72b-instruct-q3_K_S",
"34\u202fGB"
],
[
"72b-instruct-q3_K_M",
"38\u202fGB"
],
[
"72b-instruct-q3_K_L",
"40\u202fGB"
],
[
"72b-instruct-q4_0",
"41\u202fGB"
],
[
"72b-instruct-q4_1",
"46\u202fGB"
],
[
"72b-instruct-q4_K_S",
"44\u202fGB"
],
[
"72b-instruct-q4_K_M",
"47\u202fGB"
],
[
"72b-instruct-q5_0",
"50\u202fGB"
],
[
"72b-instruct-q5_1",
"55\u202fGB"
],
[
"72b-instruct-q5_K_S",
"51\u202fGB"
],
[
"72b-instruct-q5_K_M",
"54\u202fGB"
],
[
"72b-instruct-q6_K",
"64\u202fGB"
],
[
"72b-instruct-q8_0",
"77\u202fGB"
]
],
"image": false,
"author": "Alibaba"
},
"phi3.5": {
"url": "https://ollama.com/library/phi3.5",
"description": "A lightweight AI model with 3.8 billion parameters with performance overtaking similarly and larger sized models.",
"tags": [
[
"latest",
"2.2\u202fGB"
],
[
"3.8b",
"2.2\u202fGB"
],
[
"3.8b-mini-instruct-fp16",
"7.6\u202fGB"
],
[
"3.8b-mini-instruct-q2_K",
"1.4\u202fGB"
],
[
"3.8b-mini-instruct-q3_K_S",
"1.7\u202fGB"
],
[
"3.8b-mini-instruct-q3_K_M",
"2.0\u202fGB"
],
[
"3.8b-mini-instruct-q3_K_L",
"2.1\u202fGB"
],
[
"3.8b-mini-instruct-q4_0",
"2.2\u202fGB"
],
[
"3.8b-mini-instruct-q4_1",
"2.4\u202fGB"
],
[
"3.8b-mini-instruct-q4_K_S",
"2.2\u202fGB"
],
[
"3.8b-mini-instruct-q4_K_M",
"2.4\u202fGB"
],
[
"3.8b-mini-instruct-q5_0",
"2.6\u202fGB"
],
[
"3.8b-mini-instruct-q5_1",
"2.9\u202fGB"
],
[
"3.8b-mini-instruct-q5_K_S",
"2.6\u202fGB"
],
[
"3.8b-mini-instruct-q5_K_M",
"2.8\u202fGB"
],
[
"3.8b-mini-instruct-q6_K",
"3.1\u202fGB"
],
[
"3.8b-mini-instruct-q8_0",
"4.1\u202fGB"
]
],
"image": false,
"author": "Microsoft"
},
"nemotron-mini": {
"url": "https://ollama.com/library/nemotron-mini",
"description": "A commercial-friendly small language model by NVIDIA optimized for roleplay, RAG QA, and function calling.",
"tags": [
[
"latest",
"2.7\u202fGB"
],
[
"4b",
"2.7\u202fGB"
],
[
"4b-instruct-fp16",
"8.4\u202fGB"
],
[
"4b-instruct-q2_K",
"1.9\u202fGB"
],
[
"4b-instruct-q3_K_S",
"2.1\u202fGB"
],
[
"4b-instruct-q3_K_M",
"2.3\u202fGB"
],
[
"4b-instruct-q3_K_L",
"2.5\u202fGB"
],
[
"4b-instruct-q4_0",
"2.6\u202fGB"
],
[
"4b-instruct-q4_1",
"2.8\u202fGB"
],
[
"4b-instruct-q4_K_S",
"2.6\u202fGB"
],
[
"4b-instruct-q4_K_M",
"2.7\u202fGB"
],
[
"4b-instruct-q5_0",
"3.0\u202fGB"
],
[
"4b-instruct-q5_1",
"3.2\u202fGB"
],
[
"4b-instruct-q5_K_S",
"3.0\u202fGB"
],
[
"4b-instruct-q5_K_M",
"3.1\u202fGB"
],
[
"4b-instruct-q6_K",
"3.4\u202fGB"
],
[
"4b-instruct-q8_0",
"4.5\u202fGB"
]
],
"image": false,
"author": "Nvidia"
},
"mistral-small": {
"url": "https://ollama.com/library/mistral-small",
"description": "Mistral Small is a lightweight model designed for cost-effective use in tasks like translation and summarization.",
"tags": [
[
"latest",
"13\u202fGB"
],
[
"22b",
"13\u202fGB"
],
[
"22b-instruct-2409-fp16",
"44\u202fGB"
],
[
"22b-instruct-2409-q2_K",
"8.3\u202fGB"
],
[
"22b-instruct-2409-q3_K_S",
"9.6\u202fGB"
],
[
"22b-instruct-2409-q3_K_M",
"11\u202fGB"
],
[
"22b-instruct-2409-q3_K_L",
"12\u202fGB"
],
[
"22b-instruct-2409-q4_0",
"13\u202fGB"
],
[
"22b-instruct-2409-q4_1",
"14\u202fGB"
],
[
"22b-instruct-2409-q4_K_S",
"13\u202fGB"
],
[
"22b-instruct-2409-q4_K_M",
"13\u202fGB"
],
[
"22b-instruct-2409-q5_0",
"15\u202fGB"
],
[
"22b-instruct-2409-q5_1",
"17\u202fGB"
],
[
"22b-instruct-2409-q5_K_S",
"15\u202fGB"
],
[
"22b-instruct-2409-q5_K_M",
"16\u202fGB"
],
[
"22b-instruct-2409-q6_K",
"18\u202fGB"
],
[
"22b-instruct-2409-q8_0",
"24\u202fGB"
]
],
"image": false,
"author": "Mistral AI"
},
"mistral-nemo": {
"url": "https://ollama.com/library/mistral-nemo",
"description": "A state-of-the-art 12B model with 128k context length, built by Mistral AI in collaboration with NVIDIA.",
"tags": [
[
"latest",
"7.1\u202fGB"
],
[
"12b",
"7.1\u202fGB"
],
[
"12b-instruct-2407-fp16",
"25\u202fGB"
],
[
"12b-instruct-2407-q2_K",
"4.8\u202fGB"
],
[
"12b-instruct-2407-q3_K_S",
"5.5\u202fGB"
],
[
"12b-instruct-2407-q3_K_M",
"6.1\u202fGB"
],
[
"12b-instruct-2407-q3_K_L",
"6.6\u202fGB"
],
[
"12b-instruct-2407-q4_0",
"7.1\u202fGB"
],
[
"12b-instruct-2407-q4_1",
"7.8\u202fGB"
],
[
"12b-instruct-2407-q4_K_S",
"7.1\u202fGB"
],
[
"12b-instruct-2407-q4_K_M",
"7.5\u202fGB"
],
[
"12b-instruct-2407-q5_0",
"8.5\u202fGB"
],
[
"12b-instruct-2407-q5_1",
"9.2\u202fGB"
],
[
"12b-instruct-2407-q5_K_S",
"8.5\u202fGB"
],
[
"12b-instruct-2407-q5_K_M",
"8.7\u202fGB"
],
[
"12b-instruct-2407-q6_K",
"10\u202fGB"
],
[
"12b-instruct-2407-q8_0",
"13\u202fGB"
]
],
"image": false,
"author": "Mistral AI"
},
"deepseek-coder-v2": {
"url": "https://ollama.com/library/deepseek-coder-v2",
"description": "An open-source Mixture-of-Experts code language model that achieves performance comparable to GPT4-Turbo in code-specific tasks.",
"tags": [
[
"latest",
"8.9\u202fGB"
],
[
"16b",
"8.9\u202fGB"
],
[
"236b",
"133\u202fGB"
],
[
"lite",
"8.9\u202fGB"
],
[
"16b-lite-base-fp16",
"31\u202fGB"
],
[
"16b-lite-base-q2_K",
"6.4\u202fGB"
],
[
"16b-lite-base-q3_K_S",
"7.5\u202fGB"
],
[
"16b-lite-base-q3_K_M",
"8.1\u202fGB"
],
[
"16b-lite-base-q3_K_L",
"8.5\u202fGB"
],
[
"16b-lite-base-q4_0",
"8.9\u202fGB"
],
[
"16b-lite-base-q4_1",
"9.9\u202fGB"
],
[
"16b-lite-base-q4_K_S",
"9.5\u202fGB"
],
[
"16b-lite-base-q4_K_M",
"10\u202fGB"
],
[
"16b-lite-base-q5_0",
"11\u202fGB"
],
[
"16b-lite-base-q5_1",
"12\u202fGB"
],
[
"16b-lite-base-q5_K_S",
"11\u202fGB"
],
[
"16b-lite-base-q5_K_M",
"12\u202fGB"
],
[
"16b-lite-base-q6_K",
"14\u202fGB"
],
[
"16b-lite-base-q8_0",
"17\u202fGB"
],
[
"16b-lite-instruct-fp16",
"31\u202fGB"
],
[
"16b-lite-instruct-q2_K",
"6.4\u202fGB"
],
[
"16b-lite-instruct-q3_K_S",
"7.5\u202fGB"
],
[
"16b-lite-instruct-q3_K_M",
"8.1\u202fGB"
],
[
"16b-lite-instruct-q3_K_L",
"8.5\u202fGB"
],
[
"16b-lite-instruct-q4_0",
"8.9\u202fGB"
],
[
"16b-lite-instruct-q4_1",
"9.9\u202fGB"
],
[
"16b-lite-instruct-q4_K_S",
"9.5\u202fGB"
],
[
"16b-lite-instruct-q4_K_M",
"10\u202fGB"
],
[
"16b-lite-instruct-q5_0",
"11\u202fGB"
],
[
"16b-lite-instruct-q5_1",
"12\u202fGB"
],
[
"16b-lite-instruct-q5_K_S",
"11\u202fGB"
],
[
"16b-lite-instruct-q5_K_M",
"12\u202fGB"
],
[
"16b-lite-instruct-q6_K",
"14\u202fGB"
],
[
"16b-lite-instruct-q8_0",
"17\u202fGB"
],
[
"236b-base-fp16",
"472\u202fGB"
],
[
"236b-base-q2_K",
"86\u202fGB"
],
[
"236b-base-q3_K_S",
"102\u202fGB"
],
[
"236b-base-q3_K_M",
"113\u202fGB"
],
[
"236b-base-q3_K_L",
"122\u202fGB"
],
[
"236b-base-q4_0",
"133\u202fGB"
],
[
"236b-base-q4_1",
"148\u202fGB"
],
[
"236b-base-q4_K_S",
"134\u202fGB"
],
[
"236b-base-q4_K_M",
"142\u202fGB"
],
[
"236b-base-q5_0",
"162\u202fGB"
],
[
"236b-base-q5_1",
"177\u202fGB"
],
[
"236b-base-q5_K_S",
"162\u202fGB"
],
[
"236b-base-q5_K_M",
"167\u202fGB"
],
[
"236b-base-q6_K",
"194\u202fGB"
],
[
"236b-base-q8_0",
"251\u202fGB"
],
[
"236b-instruct-fp16",
"472\u202fGB"
],
[
"236b-instruct-q2_K",
"86\u202fGB"
],
[
"236b-instruct-q3_K_S",
"102\u202fGB"
],
[
"236b-instruct-q3_K_M",
"113\u202fGB"
],
[
"236b-instruct-q3_K_L",
"122\u202fGB"
],
[
"236b-instruct-q4_0",
"133\u202fGB"
],
[
"236b-instruct-q4_1",
"148\u202fGB"
],
[
"236b-instruct-q4_K_S",
"134\u202fGB"
],
[
"236b-instruct-q4_k_m",
"142\u202fGB"
],
[
"236b-instruct-q4_K_M",
"142\u202fGB"
],
[
"236b-instruct-q5_0",
"162\u202fGB"
],
[
"236b-instruct-q5_1",
"177\u202fGB"
],
[
"236b-instruct-q5_K_S",
"162\u202fGB"
],
[
"236b-instruct-q5_K_M",
"167\u202fGB"
],
[
"236b-instruct-q6_K",
"194\u202fGB"
],
[
"236b-instruct-q8_0",
"251\u202fGB"
]
],
"image": false,
"author": "DeepSeek Team"
},
"mistral": {
"url": "https://ollama.com/library/mistral",
"description": "The 7B model released by Mistral AI, updated to version 0.3.",
"tags": [
[
"latest",
"4.1\u202fGB"
],
[
"7b",
"4.1\u202fGB"
],
[
"instruct",
"4.1\u202fGB"
],
[
"text",
"4.1\u202fGB"
],
[
"v0.1",
"4.1\u202fGB"
],
[
"v0.2",
"4.1\u202fGB"
],
[
"v0.3",
"4.1\u202fGB"
],
[
"7b-instruct",
"4.1\u202fGB"
],
[
"7b-text",
"4.1\u202fGB"
],
[
"7b-instruct-fp16",
"14\u202fGB"
],
[
"7b-instruct-q2_K",
"3.1\u202fGB"
],
[
"7b-instruct-q3_K_S",
"3.2\u202fGB"
],
[
"7b-instruct-q3_K_M",
"3.5\u202fGB"
],
[
"7b-instruct-q3_K_L",
"3.8\u202fGB"
],
[
"7b-instruct-q4_0",
"4.1\u202fGB"
],
[
"7b-instruct-q4_1",
"4.6\u202fGB"
],
[
"7b-instruct-q4_K_S",
"4.1\u202fGB"
],
[
"7b-instruct-q4_K_M",
"4.4\u202fGB"
],
[
"7b-instruct-q5_0",
"5.0\u202fGB"
],
[
"7b-instruct-q5_1",
"5.4\u202fGB"
],
[
"7b-instruct-q5_K_S",
"5.0\u202fGB"
],
[
"7b-instruct-q5_K_M",
"5.1\u202fGB"
],
[
"7b-instruct-q6_K",
"5.9\u202fGB"
],
[
"7b-instruct-q8_0",
"7.7\u202fGB"
],
[
"7b-instruct-v0.2-fp16",
"14\u202fGB"
],
[
"7b-instruct-v0.2-q2_K",
"3.1\u202fGB"
],
[
"7b-instruct-v0.2-q3_K_S",
"3.2\u202fGB"
],
[
"7b-instruct-v0.2-q3_K_M",
"3.5\u202fGB"
],
[
"7b-instruct-v0.2-q3_K_L",
"3.8\u202fGB"
],
[
"7b-instruct-v0.2-q4_0",
"4.1\u202fGB"
],
[
"7b-instruct-v0.2-q4_1",
"4.6\u202fGB"
],
[
"7b-instruct-v0.2-q4_K_S",
"4.1\u202fGB"
],
[
"7b-instruct-v0.2-q4_K_M",
"4.4\u202fGB"
],
[
"7b-instruct-v0.2-q5_0",
"5.0\u202fGB"
],
[
"7b-instruct-v0.2-q5_1",
"5.4\u202fGB"
],
[
"7b-instruct-v0.2-q5_K_S",
"5.0\u202fGB"
],
[
"7b-instruct-v0.2-q5_K_M",
"5.1\u202fGB"
],
[
"7b-instruct-v0.2-q6_K",
"5.9\u202fGB"
],
[
"7b-instruct-v0.2-q8_0",
"7.7\u202fGB"
],
[
"7b-instruct-v0.3-fp16",
"14\u202fGB"
],
[
"7b-instruct-v0.3-q2_K",
"2.7\u202fGB"
],
[
"7b-instruct-v0.3-q3_K_S",
"3.2\u202fGB"
],
[
"7b-instruct-v0.3-q3_K_M",
"3.5\u202fGB"
],
[
"7b-instruct-v0.3-q3_K_L",
"3.8\u202fGB"
],
[
"7b-instruct-v0.3-q4_0",
"4.1\u202fGB"
],
[
"7b-instruct-v0.3-q4_1",
"4.6\u202fGB"
],
[
"7b-instruct-v0.3-q4_K_S",
"4.1\u202fGB"
],
[
"7b-instruct-v0.3-q4_K_M",
"4.4\u202fGB"
],
[
"7b-instruct-v0.3-q5_0",
"5.0\u202fGB"
],
[
"7b-instruct-v0.3-q5_1",
"5.4\u202fGB"
],
[
"7b-instruct-v0.3-q5_K_S",
"5.0\u202fGB"
],
[
"7b-instruct-v0.3-q5_K_M",
"5.1\u202fGB"
],
[
"7b-instruct-v0.3-q6_K",
"5.9\u202fGB"
],
[
"7b-instruct-v0.3-q8_0",
"7.7\u202fGB"
],
[
"7b-text-fp16",
"14\u202fGB"
],
[
"7b-text-q2_K",
"3.1\u202fGB"
],
[
"7b-text-q3_K_S",
"3.2\u202fGB"
],
[
"7b-text-q3_K_M",
"3.5\u202fGB"
],
[
"7b-text-q3_K_L",
"3.8\u202fGB"
],
[
"7b-text-q4_0",
"4.1\u202fGB"
],
[
"7b-text-q4_1",
"4.6\u202fGB"
],
[
"7b-text-q4_K_S",
"4.1\u202fGB"
],
[
"7b-text-q4_K_M",
"4.4\u202fGB"
],
[
"7b-text-q5_0",
"5.0\u202fGB"
],
[
"7b-text-q5_1",
"5.4\u202fGB"
],
[
"7b-text-q5_K_S",
"5.0\u202fGB"
],
[
"7b-text-q5_K_M",
"5.1\u202fGB"
],
[
"7b-text-q6_K",
"5.9\u202fGB"
],
[
"7b-text-q8_0",
"7.7\u202fGB"
],
[
"7b-text-v0.2-fp16",
"14\u202fGB"
],
[
"7b-text-v0.2-q2_K",
"2.7\u202fGB"
],
[
"7b-text-v0.2-q3_K_S",
"3.2\u202fGB"
],
[
"7b-text-v0.2-q3_K_M",
"3.5\u202fGB"
],
[
"7b-text-v0.2-q3_K_L",
"3.8\u202fGB"
],
[
"7b-text-v0.2-q4_0",
"4.1\u202fGB"
],
[
"7b-text-v0.2-q4_1",
"4.6\u202fGB"
],
[
"7b-text-v0.2-q4_K_S",
"4.1\u202fGB"
],
[
"7b-text-v0.2-q4_K_M",
"4.4\u202fGB"
],
[
"7b-text-v0.2-q5_0",
"5.0\u202fGB"
],
[
"7b-text-v0.2-q5_1",
"5.4\u202fGB"
],
[
"7b-text-v0.2-q5_K_S",
"5.0\u202fGB"
],
[
"7b-text-v0.2-q5_K_M",
"5.1\u202fGB"
],
[
"7b-text-v0.2-q6_K",
"5.9\u202fGB"
],
[
"7b-text-v0.2-q8_0",
"7.7\u202fGB"
]
],
"image": false,
"author": "Mistral AI"
},
"mixtral": {
"url": "https://ollama.com/library/mixtral",
"description": "A set of Mixture of Experts (MoE) model with open weights by Mistral AI in 8x7b and 8x22b parameter sizes.",
"tags": [
[
"latest",
"26\u202fGB"
],
[
"8x7b",
"26\u202fGB"
],
[
"8x22b",
"80\u202fGB"
],
[
"instruct",
"26\u202fGB"
],
[
"text",
"26\u202fGB"
],
[
"v0.1",
"80\u202fGB"
],
[
"v0.1-instruct",
"80\u202fGB"
],
[
"8x22b-instruct",
"80\u202fGB"
],
[
"8x22b-text",
"80\u202fGB"
],
[
"8x7b-instruct-v0.1-fp16",
"93\u202fGB"
],
[
"8x7b-instruct-v0.1-q2_K",
"16\u202fGB"
],
[
"8x7b-instruct-v0.1-q3_K_S",
"20\u202fGB"
],
[
"8x7b-instruct-v0.1-q3_K_M",
"20\u202fGB"
],
[
"8x7b-instruct-v0.1-q3_K_L",
"20\u202fGB"
],
[
"8x7b-instruct-v0.1-q4_0",
"26\u202fGB"
],
[
"8x7b-instruct-v0.1-q4_1",
"29\u202fGB"
],
[
"8x7b-instruct-v0.1-q4_K_S",
"26\u202fGB"
],
[
"8x7b-instruct-v0.1-q4_K_M",
"26\u202fGB"
],
[
"8x7b-instruct-v0.1-q5_0",
"32\u202fGB"
],
[
"8x7b-instruct-v0.1-q5_1",
"35\u202fGB"
],
[
"8x7b-instruct-v0.1-q5_K_S",
"32\u202fGB"
],
[
"8x7b-instruct-v0.1-q5_K_M",
"32\u202fGB"
],
[
"8x7b-instruct-v0.1-q6_K",
"38\u202fGB"
],
[
"8x7b-instruct-v0.1-q8_0",
"50\u202fGB"
],
[
"8x7b-text-v0.1-fp16",
"93\u202fGB"
],
[
"8x7b-text-v0.1-q2_K",
"16\u202fGB"
],
[
"8x7b-text-v0.1-q3_K_S",
"20\u202fGB"
],
[
"8x7b-text-v0.1-q3_K_M",
"20\u202fGB"
],
[
"8x7b-text-v0.1-q3_K_L",
"20\u202fGB"
],
[
"8x7b-text-v0.1-q4_0",
"26\u202fGB"
],
[
"8x7b-text-v0.1-q4_1",
"29\u202fGB"
],
[
"8x7b-text-v0.1-q4_K_S",
"26\u202fGB"
],
[
"8x7b-text-v0.1-q4_K_M",
"26\u202fGB"
],
[
"8x7b-text-v0.1-q5_0",
"32\u202fGB"
],
[
"8x7b-text-v0.1-q5_1",
"35\u202fGB"
],
[
"8x7b-text-v0.1-q5_K_S",
"32\u202fGB"
],
[
"8x7b-text-v0.1-q5_K_M",
"32\u202fGB"
],
[
"8x7b-text-v0.1-q6_K",
"38\u202fGB"
],
[
"8x7b-text-v0.1-q8_0",
"50\u202fGB"
],
[
"8x22b-instruct-v0.1-fp16",
"281\u202fGB"
],
[
"8x22b-instruct-v0.1-q2_K",
"52\u202fGB"
],
[
"8x22b-instruct-v0.1-q3_K_S",
"62\u202fGB"
],
[
"8x22b-instruct-v0.1-q3_K_M",
"68\u202fGB"
],
[
"8x22b-instruct-v0.1-q3_K_L",
"73\u202fGB"
],
[
"8x22b-instruct-v0.1-q4_0",
"80\u202fGB"
],
[
"8x22b-instruct-v0.1-q4_1",
"88\u202fGB"
],
[
"8x22b-instruct-v0.1-q4_K_S",
"80\u202fGB"
],
[
"8x22b-instruct-v0.1-q4_K_M",
"86\u202fGB"
],
[
"8x22b-instruct-v0.1-q5_0",
"97\u202fGB"
],
[
"8x22b-instruct-v0.1-q5_1",
"106\u202fGB"
],
[
"8x22b-instruct-v0.1-q5_K_S",
"97\u202fGB"
],
[
"8x22b-instruct-v0.1-q5_K_M",
"100\u202fGB"
],
[
"8x22b-instruct-v0.1-q6_K",
"116\u202fGB"
],
[
"8x22b-instruct-v0.1-q8_0",
"149\u202fGB"
],
[
"8x22b-text-v0.1-fp16",
"281\u202fGB"
],
[
"8x22b-text-v0.1-q2_K",
"52\u202fGB"
],
[
"8x22b-text-v0.1-q3_K_S",
"61\u202fGB"
],
[
"8x22b-text-v0.1-q3_K_M",
"68\u202fGB"
],
[
"8x22b-text-v0.1-q3_K_L",
"73\u202fGB"
],
[
"8x22b-text-v0.1-q4_0",
"80\u202fGB"
],
[
"8x22b-text-v0.1-q4_1",
"88\u202fGB"
],
[
"8x22b-text-v0.1-q4_K_S",
"80\u202fGB"
],
[
"8x22b-text-v0.1-q4_K_M",
"86\u202fGB"
],
[
"8x22b-text-v0.1-q5_0",
"97\u202fGB"
],
[
"8x22b-text-v0.1-q5_1",
"106\u202fGB"
],
[
"8x22b-text-v0.1-q5_K_S",
"97\u202fGB"
],
[
"8x22b-text-v0.1-q5_K_M",
"100\u202fGB"
],
[
"8x22b-text-v0.1-q6_K",
"116\u202fGB"
],
[
"8x22b-text-v0.1-q8_0",
"149\u202fGB"
]
],
"image": false,
"author": "Mistral AI"
},
"codegemma": {
"url": "https://ollama.com/library/codegemma",
"description": "CodeGemma is a collection of powerful, lightweight models that can perform a variety of coding tasks like fill-in-the-middle code completion, code generation, natural language understanding, mathematical reasoning, and instruction following.",
"tags": [
[
"latest",
"5.0\u202fGB"
],
[
"2b",
"1.6\u202fGB"
],
[
"7b",
"5.0\u202fGB"
],
[
"code",
"1.6\u202fGB"
],
[
"instruct",
"5.0\u202fGB"
],
[
"2b-code",
"1.6\u202fGB"
],
[
"2b-v1.1",
"1.6\u202fGB"
],
[
"7b-code",
"5.0\u202fGB"
],
[
"7b-instruct",
"5.0\u202fGB"
],
[
"7b-v1.1",
"5.0\u202fGB"
],
[
"2b-code-fp16",
"5.0\u202fGB"
],
[
"2b-code-q2_K",
"1.2\u202fGB"
],
[
"2b-code-q3_K_S",
"1.3\u202fGB"
],
[
"2b-code-q3_K_M",
"1.4\u202fGB"
],
[
"2b-code-q3_K_L",
"1.5\u202fGB"
],
[
"2b-code-q4_0",
"1.6\u202fGB"
],
[
"2b-code-q4_1",
"1.7\u202fGB"
],
[
"2b-code-q4_K_S",
"1.6\u202fGB"
],
[
"2b-code-q4_K_M",
"1.6\u202fGB"
],
[
"2b-code-q5_0",
"1.8\u202fGB"
],
[
"2b-code-q5_1",
"1.9\u202fGB"
],
[
"2b-code-q5_K_S",
"1.8\u202fGB"
],
[
"2b-code-q5_K_M",
"1.8\u202fGB"
],
[
"2b-code-q6_K",
"2.1\u202fGB"
],
[
"2b-code-q8_0",
"2.7\u202fGB"
],
[
"2b-code-v1.1-fp16",
"5.0\u202fGB"
],
[
"2b-code-v1.1-q2_K",
"1.2\u202fGB"
],
[
"2b-code-v1.1-q3_K_S",
"1.3\u202fGB"
],
[
"2b-code-v1.1-q3_K_M",
"1.4\u202fGB"
],
[
"2b-code-v1.1-q3_K_L",
"1.5\u202fGB"
],
[
"2b-code-v1.1-q4_0",
"1.6\u202fGB"
],
[
"2b-code-v1.1-q4_1",
"1.7\u202fGB"
],
[
"2b-code-v1.1-q4_K_S",
"1.6\u202fGB"
],
[
"2b-code-v1.1-q4_K_M",
"1.6\u202fGB"
],
[
"2b-code-v1.1-q5_0",
"1.8\u202fGB"
],
[
"2b-code-v1.1-q5_1",
"1.9\u202fGB"
],
[
"2b-code-v1.1-q5_K_S",
"1.8\u202fGB"
],
[
"2b-code-v1.1-q5_K_M",
"1.8\u202fGB"
],
[
"2b-code-v1.1-q6_K",
"2.1\u202fGB"
],
[
"2b-code-v1.1-q8_0",
"2.7\u202fGB"
],
[
"7b-code-fp16",
"17\u202fGB"
],
[
"7b-code-q2_K",
"3.5\u202fGB"
],
[
"7b-code-q3_K_S",
"4.0\u202fGB"
],
[
"7b-code-q3_K_M",
"4.4\u202fGB"
],
[
"7b-code-q3_K_L",
"4.7\u202fGB"
],
[
"7b-code-q4_0",
"5.0\u202fGB"
],
[
"7b-code-q4_1",
"5.5\u202fGB"
],
[
"7b-code-q4_K_S",
"5.0\u202fGB"
],
[
"7b-code-q4_K_M",
"5.3\u202fGB"
],
[
"7b-code-q5_0",
"6.0\u202fGB"
],
[
"7b-code-q5_1",
"6.5\u202fGB"
],
[
"7b-code-q5_K_S",
"6.0\u202fGB"
],
[
"7b-code-q5_K_M",
"6.1\u202fGB"
],
[
"7b-code-q6_K",
"7.0\u202fGB"
],
[
"7b-code-q8_0",
"9.1\u202fGB"
],
[
"7b-instruct-fp16",
"17\u202fGB"
],
[
"7b-instruct-q2_K",
"3.5\u202fGB"
],
[
"7b-instruct-q3_K_S",
"4.0\u202fGB"
],
[
"7b-instruct-q3_K_M",
"4.4\u202fGB"
],
[
"7b-instruct-q3_K_L",
"4.7\u202fGB"
],
[
"7b-instruct-q4_0",
"5.0\u202fGB"
],
[
"7b-instruct-q4_1",
"5.5\u202fGB"
],
[
"7b-instruct-q4_K_S",
"5.0\u202fGB"
],
[
"7b-instruct-q4_K_M",
"5.3\u202fGB"
],
[
"7b-instruct-q5_0",
"6.0\u202fGB"
],
[
"7b-instruct-q5_1",
"6.5\u202fGB"
],
[
"7b-instruct-q5_K_S",
"6.0\u202fGB"
],
[
"7b-instruct-q5_K_M",
"6.1\u202fGB"
],
[
"7b-instruct-q6_K",
"7.0\u202fGB"
],
[
"7b-instruct-q8_0",
"9.1\u202fGB"
],
[
"7b-instruct-v1.1-fp16",
"17\u202fGB"
],
[
"7b-instruct-v1.1-q2_K",
"3.5\u202fGB"
],
[
"7b-instruct-v1.1-q3_K_S",
"4.0\u202fGB"
],
[
"7b-instruct-v1.1-q3_K_M",
"4.4\u202fGB"
],
[
"7b-instruct-v1.1-q3_K_L",
"4.7\u202fGB"
],
[
"7b-instruct-v1.1-q4_0",
"5.0\u202fGB"
],
[
"7b-instruct-v1.1-q4_1",
"5.5\u202fGB"
],
[
"7b-instruct-v1.1-q4_K_S",
"5.0\u202fGB"
],
[
"7b-instruct-v1.1-q4_K_M",
"5.3\u202fGB"
],
[
"7b-instruct-v1.1-q5_0",
"6.0\u202fGB"
],
[
"7b-instruct-v1.1-q5_1",
"6.5\u202fGB"
],
[
"7b-instruct-v1.1-q5_K_S",
"6.0\u202fGB"
],
[
"7b-instruct-v1.1-q5_K_M",
"6.1\u202fGB"
],
[
"7b-instruct-v1.1-q6_K",
"7.0\u202fGB"
],
[
"7b-instruct-v1.1-q8_0",
"9.1\u202fGB"
]
],
"image": false,
"author": "Google DeepMind"
},
"command-r": {
"url": "https://ollama.com/library/command-r",
"description": "Command R is a Large Language Model optimized for conversational interaction and long context tasks.",
"tags": [
[
"latest",
"19\u202fGB"
],
[
"35b",
"19\u202fGB"
],
[
"v0.1",
"20\u202fGB"
],
[
"35b-08-2024-fp16",
"65\u202fGB"
],
[
"35b-08-2024-q2_K",
"13\u202fGB"
],
[
"35b-08-2024-q3_K_S",
"15\u202fGB"
],
[
"35b-08-2024-q3_K_M",
"16\u202fGB"
],
[
"35b-08-2024-q3_K_L",
"18\u202fGB"
],
[
"35b-08-2024-q4_0",
"19\u202fGB"
],
[
"35b-08-2024-q4_1",
"21\u202fGB"
],
[
"35b-08-2024-q4_K_S",
"19\u202fGB"
],
[
"35b-08-2024-q4_K_M",
"20\u202fGB"
],
[
"35b-08-2024-q5_0",
"22\u202fGB"
],
[
"35b-08-2024-q5_1",
"24\u202fGB"
],
[
"35b-08-2024-q5_K_S",
"22\u202fGB"
],
[
"35b-08-2024-q5_K_M",
"23\u202fGB"
],
[
"35b-08-2024-q6_K",
"27\u202fGB"
],
[
"35b-08-2024-q8_0",
"34\u202fGB"
],
[
"35b-v0.1-fp16",
"70\u202fGB"
],
[
"35b-v0.1-q2_K",
"14\u202fGB"
],
[
"35b-v0.1-q3_K_S",
"16\u202fGB"
],
[
"35b-v0.1-q3_K_M",
"18\u202fGB"
],
[
"35b-v0.1-q3_K_L",
"19\u202fGB"
],
[
"35b-v0.1-q4_0",
"20\u202fGB"
],
[
"35b-v0.1-q4_1",
"22\u202fGB"
],
[
"35b-v0.1-q4_K_S",
"20\u202fGB"
],
[
"35b-v0.1-q4_K_M",
"22\u202fGB"
],
[
"35b-v0.1-q5_1",
"26\u202fGB"
],
[
"35b-v0.1-q5_K_S",
"24\u202fGB"
],
[
"35b-v0.1-q5_K_M",
"25\u202fGB"
],
[
"35b-v0.1-q6_K",
"29\u202fGB"
],
[
"35b-v0.1-q8_0",
"37\u202fGB"
]
],
"image": false,
"author": "Cohere"
},
"command-r-plus": {
"url": "https://ollama.com/library/command-r-plus",
"description": "Command R+ is a powerful, scalable large language model purpose-built to excel at real-world enterprise use cases.",
"tags": [
[
"latest",
"59\u202fGB"
],
[
"104b",
"59\u202fGB"
],
[
"104b-fp16",
"208\u202fGB"
],
[
"104b-q2_K",
"39\u202fGB"
],
[
"104b-q4_0",
"59\u202fGB"
],
[
"104b-q8_0",
"110\u202fGB"
],
[
"104b-08-2024-fp16",
"208\u202fGB"
],
[
"104b-08-2024-q2_K",
"39\u202fGB"
],
[
"104b-08-2024-q3_K_S",
"46\u202fGB"
],
[
"104b-08-2024-q3_K_M",
"51\u202fGB"
],
[
"104b-08-2024-q3_K_L",
"55\u202fGB"
],
[
"104b-08-2024-q4_0",
"59\u202fGB"
],
[
"104b-08-2024-q4_1",
"66\u202fGB"
],
[
"104b-08-2024-q4_K_S",
"60\u202fGB"
],
[
"104b-08-2024-q4_K_M",
"63\u202fGB"
],
[
"104b-08-2024-q5_0",
"72\u202fGB"
],
[
"104b-08-2024-q5_1",
"78\u202fGB"
],
[
"104b-08-2024-q5_K_S",
"72\u202fGB"
],
[
"104b-08-2024-q5_K_M",
"74\u202fGB"
],
[
"104b-08-2024-q6_K",
"85\u202fGB"
],
[
"104b-08-2024-q8_0",
"110\u202fGB"
]
],
"image": false,
"author": "Cohere"
},
"llava": {
"url": "https://ollama.com/library/llava",
"description": "\ud83c\udf0b LLaVA is a novel end-to-end trained large multimodal model that combines a vision encoder and Vicuna for general-purpose visual and language understanding. Updated to version 1.6.",
"tags": [
[
"latest",
"4.7\u202fGB"
],
[
"7b",
"4.7\u202fGB"
],
[
"13b",
"8.0\u202fGB"
],
[
"34b",
"20\u202fGB"
],
[
"v1.6",
"4.7\u202fGB"
],
[
"7b-v1.6",
"4.7\u202fGB"
],
[
"13b-v1.6",
"8.0\u202fGB"
],
[
"34b-v1.6",
"20\u202fGB"
],
[
"7b-v1.5-fp16",
"14\u202fGB"
],
[
"7b-v1.5-q2_K",
"3.5\u202fGB"
],
[
"7b-v1.5-q3_K_S",
"3.6\u202fGB"
],
[
"7b-v1.5-q3_K_M",
"3.9\u202fGB"
],
[
"7b-v1.5-q3_K_L",
"4.2\u202fGB"
],
[
"7b-v1.5-q4_0",
"4.5\u202fGB"
],
[
"7b-v1.5-q4_1",
"4.9\u202fGB"
],
[
"7b-v1.5-q4_K_S",
"4.5\u202fGB"
],
[
"7b-v1.5-q4_K_M",
"4.7\u202fGB"
],
[
"7b-v1.5-q5_0",
"5.3\u202fGB"
],
[
"7b-v1.5-q5_1",
"5.7\u202fGB"
],
[
"7b-v1.5-q5_K_S",
"5.3\u202fGB"
],
[
"7b-v1.5-q5_K_M",
"5.4\u202fGB"
],
[
"7b-v1.5-q6_K",
"6.2\u202fGB"
],
[
"7b-v1.5-q8_0",
"7.8\u202fGB"
],
[
"7b-v1.6-mistral-fp16",
"15\u202fGB"
],
[
"7b-v1.6-mistral-q2_K",
"3.3\u202fGB"
],
[
"7b-v1.6-mistral-q3_K_S",
"3.8\u202fGB"
],
[
"7b-v1.6-mistral-q3_K_M",
"4.1\u202fGB"
],
[
"7b-v1.6-mistral-q3_K_L",
"4.4\u202fGB"
],
[
"7b-v1.6-mistral-q4_0",
"4.7\u202fGB"
],
[
"7b-v1.6-mistral-q4_1",
"5.2\u202fGB"
],
[
"7b-v1.6-mistral-q4_K_S",
"4.8\u202fGB"
],
[
"7b-v1.6-mistral-q4_K_M",
"5.0\u202fGB"
],
[
"7b-v1.6-mistral-q5_0",
"5.6\u202fGB"
],
[
"7b-v1.6-mistral-q5_1",
"6.1\u202fGB"
],
[
"7b-v1.6-mistral-q5_K_S",
"5.6\u202fGB"
],
[
"7b-v1.6-mistral-q5_K_M",
"5.8\u202fGB"
],
[
"7b-v1.6-mistral-q6_K",
"6.6\u202fGB"
],
[
"7b-v1.6-mistral-q8_0",
"8.3\u202fGB"
],
[
"7b-v1.6-vicuna-fp16",
"14\u202fGB"
],
[
"7b-v1.6-vicuna-q2_K",
"3.2\u202fGB"
],
[
"7b-v1.6-vicuna-q3_K_S",
"3.6\u202fGB"
],
[
"7b-v1.6-vicuna-q3_K_M",
"3.9\u202fGB"
],
[
"7b-v1.6-vicuna-q3_K_L",
"4.2\u202fGB"
],
[
"7b-v1.6-vicuna-q4_0",
"4.5\u202fGB"
],
[
"7b-v1.6-vicuna-q4_1",
"4.9\u202fGB"
],
[
"7b-v1.6-vicuna-q4_K_S",
"4.5\u202fGB"
],
</