diff --git a/src/available_models.json b/src/available_models.json index 084baa8..9558675 100644 --- a/src/available_models.json +++ b/src/available_models.json @@ -5,143 +5,383 @@ "tags": [ [ "latest", - "4.7 GB" - ], - [ - "405b", - "231 GB" - ], - [ - "70b", - "40 GB" + "4.7\u202fGB" ], [ "8b", - "4.7 GB" + "4.7\u202fGB" ], [ - "70b-instruct-q2_k", - "26 GB" + "70b", + "40\u202fGB" ], [ - "70b-instruct-fp16", - "141 GB" - ], - [ - "70b-instruct-q2_K", - "26 GB" - ], - [ - "70b-instruct-q3_K_L", - "37 GB" - ], - [ - "70b-instruct-q3_K_M", - "34 GB" - ], - [ - "70b-instruct-q3_K_S", - "31 GB" - ], - [ - "70b-instruct-q4_0", - "40 GB" - ], - [ - "70b-instruct-q4_1", - "44 GB" - ], - [ - "70b-instruct-q4_K_M", - "43 GB" - ], - [ - "70b-instruct-q4_K_S", - "40 GB" - ], - [ - "70b-instruct-q5_0", - "49 GB" - ], - [ - "70b-instruct-q5_1", - "53 GB" - ], - [ - "70b-instruct-q5_K_M", - "50 GB" - ], - [ - "70b-instruct-q5_K_S", - "49 GB" - ], - [ - "70b-instruct-q6_K", - "58 GB" - ], - [ - "70b-instruct-q8_0", - "75 GB" + "405b", + "229\u202fGB" ], [ "8b-instruct-fp16", - "16 GB" + "16\u202fGB" ], [ "8b-instruct-q2_K", - "3.2 GB" - ], - [ - "8b-instruct-q3_K_L", - "4.3 GB" - ], - [ - "8b-instruct-q3_K_M", - "4.0 GB" + "3.2\u202fGB" ], [ "8b-instruct-q3_K_S", - "3.7 GB" + "3.7\u202fGB" + ], + [ + "8b-instruct-q3_K_M", + "4.0\u202fGB" + ], + [ + "8b-instruct-q3_K_L", + "4.3\u202fGB" ], [ "8b-instruct-q4_0", - "4.7 GB" + "4.7\u202fGB" ], [ "8b-instruct-q4_1", - "5.1 GB" - ], - [ - "8b-instruct-q4_K_M", - "4.9 GB" + "5.1\u202fGB" ], [ "8b-instruct-q4_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "8b-instruct-q4_K_M", + "4.9\u202fGB" ], [ "8b-instruct-q5_0", - "5.6 GB" + "5.6\u202fGB" ], [ "8b-instruct-q5_1", - "6.1 GB" - ], - [ - "8b-instruct-q5_K_M", - "5.7 GB" + "6.1\u202fGB" ], [ "8b-instruct-q5_K_S", - "5.6 GB" + "5.6\u202fGB" + ], + [ + "8b-instruct-q5_K_M", + "5.7\u202fGB" ], [ "8b-instruct-q6_K", - "6.6 GB" + "6.6\u202fGB" ], [ "8b-instruct-q8_0", - "8.5 GB" + "8.5\u202fGB" + ], + [ + "8b-text-fp16", + "16\u202fGB" + ], + [ + "8b-text-q2_K", + "3.2\u202fGB" + ], + [ + "8b-text-q3_K_S", + "3.7\u202fGB" + ], + [ + "8b-text-q3_K_M", + "4.0\u202fGB" + ], + [ + "8b-text-q3_K_L", + "4.3\u202fGB" + ], + [ + "8b-text-q4_0", + "4.7\u202fGB" + ], + [ + "8b-text-q4_1", + "5.1\u202fGB" + ], + [ + "8b-text-q4_K_S", + "4.7\u202fGB" + ], + [ + "8b-text-q4_K_M", + "4.9\u202fGB" + ], + [ + "8b-text-q5_0", + "5.6\u202fGB" + ], + [ + "8b-text-q5_1", + "6.1\u202fGB" + ], + [ + "8b-text-q5_K_S", + "5.6\u202fGB" + ], + [ + "8b-text-q5_K_M", + "5.7\u202fGB" + ], + [ + "8b-text-q6_K", + "6.6\u202fGB" + ], + [ + "8b-text-q8_0", + "8.5\u202fGB" + ], + [ + "70b-instruct-fp16", + "141\u202fGB" + ], + [ + "70b-instruct-q2_k", + "26\u202fGB" + ], + [ + "70b-instruct-q2_K", + "26\u202fGB" + ], + [ + "70b-instruct-q3_K_S", + "31\u202fGB" + ], + [ + "70b-instruct-q3_K_M", + "34\u202fGB" + ], + [ + "70b-instruct-q3_K_L", + "37\u202fGB" + ], + [ + "70b-instruct-q4_0", + "40\u202fGB" + ], + [ + "70b-instruct-q4_1", + "44\u202fGB" + ], + [ + "70b-instruct-q4_K_S", + "40\u202fGB" + ], + [ + "70b-instruct-q4_K_M", + "43\u202fGB" + ], + [ + "70b-instruct-q5_0", + "49\u202fGB" + ], + [ + "70b-instruct-q5_1", + "53\u202fGB" + ], + [ + "70b-instruct-q5_K_S", + "49\u202fGB" + ], + [ + "70b-instruct-q5_K_M", + "50\u202fGB" + ], + [ + "70b-instruct-q6_K", + "58\u202fGB" + ], + [ + "70b-instruct-q8_0", + "75\u202fGB" + ], + [ + "70b-text-fp16", + "141\u202fGB" + ], + [ + "70b-text-q2_K", + "26\u202fGB" + ], + [ + "70b-text-q3_K_S", + "31\u202fGB" + ], + [ + "70b-text-q3_K_M", + "34\u202fGB" + ], + [ + "70b-text-q3_K_L", + "37\u202fGB" + ], + [ + "70b-text-q4_0", + "40\u202fGB" + ], + [ + "70b-text-q4_1", + "44\u202fGB" + ], + [ + "70b-text-q4_K_S", + "40\u202fGB" + ], + [ + "70b-text-q4_K_M", + "43\u202fGB" + ], + [ + "70b-text-q5_0", + "49\u202fGB" + ], + [ + "70b-text-q5_1", + "53\u202fGB" + ], + [ + "70b-text-q5_K_S", + "49\u202fGB" + ], + [ + "70b-text-q5_K_M", + "50\u202fGB" + ], + [ + "70b-text-q6_K", + "58\u202fGB" + ], + [ + "70b-text-q8_0", + "75\u202fGB" + ], + [ + "405b-instruct-fp16", + "812\u202fGB" + ], + [ + "405b-instruct-q2_K", + "149\u202fGB" + ], + [ + "405b-instruct-q3_K_S", + "175\u202fGB" + ], + [ + "405b-instruct-q3_K_M", + "195\u202fGB" + ], + [ + "405b-instruct-q3_K_L", + "213\u202fGB" + ], + [ + "405b-instruct-q4_0", + "229\u202fGB" + ], + [ + "405b-instruct-q4_1", + "254\u202fGB" + ], + [ + "405b-instruct-q4_K_S", + "231\u202fGB" + ], + [ + "405b-instruct-q4_K_M", + "243\u202fGB" + ], + [ + "405b-instruct-q5_0", + "279\u202fGB" + ], + [ + "405b-instruct-q5_1", + "305\u202fGB" + ], + [ + "405b-instruct-q5_K_S", + "279\u202fGB" + ], + [ + "405b-instruct-q5_K_M", + "287\u202fGB" + ], + [ + "405b-instruct-q6_K", + "333\u202fGB" + ], + [ + "405b-instruct-q8_0", + "431\u202fGB" + ], + [ + "405b-text-fp16", + "812\u202fGB" + ], + [ + "405b-text-q2_K", + "149\u202fGB" + ], + [ + "405b-text-q3_K_S", + "175\u202fGB" + ], + [ + "405b-text-q3_K_M", + "195\u202fGB" + ], + [ + "405b-text-q3_K_L", + "213\u202fGB" + ], + [ + "405b-text-q4_0", + "229\u202fGB" + ], + [ + "405b-text-q4_1", + "254\u202fGB" + ], + [ + "405b-text-q4_K_S", + "231\u202fGB" + ], + [ + "405b-text-q4_K_M", + "243\u202fGB" + ], + [ + "405b-text-q5_0", + "279\u202fGB" + ], + [ + "405b-text-q5_1", + "305\u202fGB" + ], + [ + "405b-text-q5_K_S", + "279\u202fGB" + ], + [ + "405b-text-q5_K_M", + "287\u202fGB" + ], + [ + "405b-text-q6_K", + "333\u202fGB" + ], + [ + "405b-text-q8_0", + "431\u202fGB" ] ], "image": false, @@ -149,259 +389,383 @@ }, "gemma2": { "url": "https://ollama.com/library/gemma2", - "description": "Google Gemma 2 is now available in 2 sizes, 9B and 27B.", + "description": "Google Gemma 2 is a high-performing and efficient model by now available in three sizes: 2B, 9B, and 27B.", "tags": [ [ "latest", - "5.4 GB" + "5.4\u202fGB" ], [ - "27b", - "16 GB" + "2b", + "1.6\u202fGB" ], [ "9b", - "5.4 GB" + "5.4\u202fGB" ], [ - "27b-instruct-fp16", - "54 GB" + "27b", + "16\u202fGB" ], [ - "27b-instruct-q2_K", - "10 GB" + "2b-instruct-fp16", + "5.2\u202fGB" ], [ - "27b-instruct-q3_K_L", - "15 GB" + "2b-instruct-q2_K", + "1.2\u202fGB" ], [ - "27b-instruct-q3_K_M", - "13 GB" + "2b-instruct-q3_K_S", + "1.4\u202fGB" ], [ - "27b-instruct-q3_K_S", - "12 GB" + "2b-instruct-q3_K_M", + "1.5\u202fGB" ], [ - "27b-instruct-q4_0", - "16 GB" + "2b-instruct-q3_K_L", + "1.6\u202fGB" ], [ - "27b-instruct-q4_1", - "17 GB" + "2b-instruct-q4_0", + "1.6\u202fGB" ], [ - "27b-instruct-q4_K_M", - "17 GB" + "2b-instruct-q4_1", + "1.8\u202fGB" ], [ - "27b-instruct-q4_K_S", - "16 GB" + "2b-instruct-q4_K_S", + "1.6\u202fGB" ], [ - "27b-instruct-q5_0", - "19 GB" + "2b-instruct-q4_K_M", + "1.7\u202fGB" ], [ - "27b-instruct-q5_1", - "21 GB" + "2b-instruct-q5_0", + "1.9\u202fGB" ], [ - "27b-instruct-q5_K_M", - "19 GB" + "2b-instruct-q5_1", + "2.0\u202fGB" ], [ - "27b-instruct-q5_K_S", - "19 GB" + "2b-instruct-q5_K_S", + "1.9\u202fGB" ], [ - "27b-instruct-q6_K", - "22 GB" + "2b-instruct-q5_K_M", + "1.9\u202fGB" ], [ - "27b-instruct-q8_0", - "29 GB" + "2b-instruct-q6_K", + "2.2\u202fGB" ], [ - "27b-text-fp16", - "54 GB" + "2b-instruct-q8_0", + "2.8\u202fGB" ], [ - "27b-text-q2_K", - "10 GB" + "2b-text-fp16", + "5.2\u202fGB" ], [ - "27b-text-q3_K_L", - "15 GB" + "2b-text-q2_K", + "1.2\u202fGB" ], [ - "27b-text-q3_K_M", - "13 GB" + "2b-text-q3_K_S", + "1.4\u202fGB" ], [ - "27b-text-q3_K_S", - "12 GB" + "2b-text-q3_K_M", + "1.5\u202fGB" ], [ - "27b-text-q4_0", - "16 GB" + "2b-text-q3_K_L", + "1.6\u202fGB" ], [ - "27b-text-q4_1", - "17 GB" + "2b-text-q4_0", + "1.6\u202fGB" ], [ - "27b-text-q4_K_M", - "17 GB" + "2b-text-q4_1", + "1.8\u202fGB" ], [ - "27b-text-q4_K_S", - "16 GB" + "2b-text-q4_K_S", + "1.6\u202fGB" ], [ - "27b-text-q5_0", - "19 GB" + "2b-text-q4_K_M", + "1.7\u202fGB" ], [ - "27b-text-q5_1", - "21 GB" + "2b-text-q5_0", + "1.9\u202fGB" ], [ - "27b-text-q5_K_M", - "19 GB" + "2b-text-q5_1", + "2.0\u202fGB" ], [ - "27b-text-q5_K_S", - "19 GB" + "2b-text-q5_K_S", + "1.9\u202fGB" ], [ - "27b-text-q6_K", - "22 GB" + "2b-text-q5_K_M", + "1.9\u202fGB" ], [ - "27b-text-q8_0", - "29 GB" + "2b-text-q6_K", + "2.2\u202fGB" + ], + [ + "2b-text-q8_0", + "2.8\u202fGB" ], [ "9b-instruct-fp16", - "18 GB" + "18\u202fGB" ], [ "9b-instruct-q2_K", - "3.8 GB" - ], - [ - "9b-instruct-q3_K_L", - "5.1 GB" - ], - [ - "9b-instruct-q3_K_M", - "4.8 GB" + "3.8\u202fGB" ], [ "9b-instruct-q3_K_S", - "4.3 GB" + "4.3\u202fGB" + ], + [ + "9b-instruct-q3_K_M", + "4.8\u202fGB" + ], + [ + "9b-instruct-q3_K_L", + "5.1\u202fGB" ], [ "9b-instruct-q4_0", - "5.4 GB" + "5.4\u202fGB" ], [ "9b-instruct-q4_1", - "6.0 GB" - ], - [ - "9b-instruct-q4_K_M", - "5.8 GB" + "6.0\u202fGB" ], [ "9b-instruct-q4_K_S", - "5.5 GB" + "5.5\u202fGB" + ], + [ + "9b-instruct-q4_K_M", + "5.8\u202fGB" ], [ "9b-instruct-q5_0", - "6.5 GB" + "6.5\u202fGB" ], [ "9b-instruct-q5_1", - "7.0 GB" - ], - [ - "9b-instruct-q5_K_M", - "6.6 GB" + "7.0\u202fGB" ], [ "9b-instruct-q5_K_S", - "6.5 GB" + "6.5\u202fGB" + ], + [ + "9b-instruct-q5_K_M", + "6.6\u202fGB" ], [ "9b-instruct-q6_K", - "7.6 GB" + "7.6\u202fGB" ], [ "9b-instruct-q8_0", - "9.8 GB" + "9.8\u202fGB" ], [ "9b-text-fp16", - "18 GB" + "18\u202fGB" ], [ "9b-text-q2_K", - "3.8 GB" - ], - [ - "9b-text-q3_K_L", - "5.1 GB" - ], - [ - "9b-text-q3_K_M", - "4.8 GB" + "3.8\u202fGB" ], [ "9b-text-q3_K_S", - "4.3 GB" + "4.3\u202fGB" + ], + [ + "9b-text-q3_K_M", + "4.8\u202fGB" + ], + [ + "9b-text-q3_K_L", + "5.1\u202fGB" ], [ "9b-text-q4_0", - "5.4 GB" + "5.4\u202fGB" ], [ "9b-text-q4_1", - "6.0 GB" - ], - [ - "9b-text-q4_K_M", - "5.8 GB" + "6.0\u202fGB" ], [ "9b-text-q4_K_S", - "5.5 GB" + "5.5\u202fGB" + ], + [ + "9b-text-q4_K_M", + "5.8\u202fGB" ], [ "9b-text-q5_0", - "6.5 GB" + "6.5\u202fGB" ], [ "9b-text-q5_1", - "7.0 GB" - ], - [ - "9b-text-q5_K_M", - "6.6 GB" + "7.0\u202fGB" ], [ "9b-text-q5_K_S", - "6.5 GB" + "6.5\u202fGB" + ], + [ + "9b-text-q5_K_M", + "6.6\u202fGB" ], [ "9b-text-q6_K", - "7.6 GB" + "7.6\u202fGB" ], [ "9b-text-q8_0", - "9.8 GB" + "9.8\u202fGB" + ], + [ + "27b-instruct-fp16", + "54\u202fGB" + ], + [ + "27b-instruct-q2_K", + "10\u202fGB" + ], + [ + "27b-instruct-q3_K_S", + "12\u202fGB" + ], + [ + "27b-instruct-q3_K_M", + "13\u202fGB" + ], + [ + "27b-instruct-q3_K_L", + "15\u202fGB" + ], + [ + "27b-instruct-q4_0", + "16\u202fGB" + ], + [ + "27b-instruct-q4_1", + "17\u202fGB" + ], + [ + "27b-instruct-q4_K_S", + "16\u202fGB" + ], + [ + "27b-instruct-q4_K_M", + "17\u202fGB" + ], + [ + "27b-instruct-q5_0", + "19\u202fGB" + ], + [ + "27b-instruct-q5_1", + "21\u202fGB" + ], + [ + "27b-instruct-q5_K_S", + "19\u202fGB" + ], + [ + "27b-instruct-q5_K_M", + "19\u202fGB" + ], + [ + "27b-instruct-q6_K", + "22\u202fGB" + ], + [ + "27b-instruct-q8_0", + "29\u202fGB" + ], + [ + "27b-text-fp16", + "54\u202fGB" + ], + [ + "27b-text-q2_K", + "10\u202fGB" + ], + [ + "27b-text-q3_K_S", + "12\u202fGB" + ], + [ + "27b-text-q3_K_M", + "13\u202fGB" + ], + [ + "27b-text-q3_K_L", + "15\u202fGB" + ], + [ + "27b-text-q4_0", + "16\u202fGB" + ], + [ + "27b-text-q4_1", + "17\u202fGB" + ], + [ + "27b-text-q4_K_S", + "16\u202fGB" + ], + [ + "27b-text-q4_K_M", + "17\u202fGB" + ], + [ + "27b-text-q5_0", + "19\u202fGB" + ], + [ + "27b-text-q5_1", + "21\u202fGB" + ], + [ + "27b-text-q5_K_S", + "19\u202fGB" + ], + [ + "27b-text-q5_K_M", + "19\u202fGB" + ], + [ + "27b-text-q6_K", + "22\u202fGB" + ], + [ + "27b-text-q8_0", + "29\u202fGB" ] ], "image": false, @@ -413,71 +777,71 @@ "tags": [ [ "latest", - "7.1 GB" + "7.1\u202fGB" ], [ "12b", - "7.1 GB" + "7.1\u202fGB" ], [ "12b-instruct-2407-fp16", - "25 GB" + "25\u202fGB" ], [ "12b-instruct-2407-q2_K", - "4.8 GB" - ], - [ - "12b-instruct-2407-q3_K_L", - "6.6 GB" - ], - [ - "12b-instruct-2407-q3_K_M", - "6.1 GB" + "4.8\u202fGB" ], [ "12b-instruct-2407-q3_K_S", - "5.5 GB" + "5.5\u202fGB" + ], + [ + "12b-instruct-2407-q3_K_M", + "6.1\u202fGB" + ], + [ + "12b-instruct-2407-q3_K_L", + "6.6\u202fGB" ], [ "12b-instruct-2407-q4_0", - "7.1 GB" + "7.1\u202fGB" ], [ "12b-instruct-2407-q4_1", - "7.8 GB" - ], - [ - "12b-instruct-2407-q4_K_M", - "7.5 GB" + "7.8\u202fGB" ], [ "12b-instruct-2407-q4_K_S", - "7.1 GB" + "7.1\u202fGB" + ], + [ + "12b-instruct-2407-q4_K_M", + "7.5\u202fGB" ], [ "12b-instruct-2407-q5_0", - "8.5 GB" + "8.5\u202fGB" ], [ "12b-instruct-2407-q5_1", - "9.2 GB" - ], - [ - "12b-instruct-2407-q5_K_M", - "8.7 GB" + "9.2\u202fGB" ], [ "12b-instruct-2407-q5_K_S", - "8.5 GB" + "8.5\u202fGB" + ], + [ + "12b-instruct-2407-q5_K_M", + "8.7\u202fGB" ], [ "12b-instruct-2407-q6_K", - "10 GB" + "10\u202fGB" ], [ "12b-instruct-2407-q8_0", - "13 GB" + "13\u202fGB" ] ], "image": false, @@ -489,71 +853,71 @@ "tags": [ [ "latest", - "69 GB" + "69\u202fGB" ], [ "123b", - "69 GB" + "69\u202fGB" ], [ "123b-instruct-2407-fp16", - "245 GB" + "245\u202fGB" ], [ "123b-instruct-2407-q2_K", - "45 GB" - ], - [ - "123b-instruct-2407-q3_K_L", - "65 GB" - ], - [ - "123b-instruct-2407-q3_K_M", - "59 GB" + "45\u202fGB" ], [ "123b-instruct-2407-q3_K_S", - "53 GB" + "53\u202fGB" + ], + [ + "123b-instruct-2407-q3_K_M", + "59\u202fGB" + ], + [ + "123b-instruct-2407-q3_K_L", + "65\u202fGB" ], [ "123b-instruct-2407-q4_0", - "69 GB" + "69\u202fGB" ], [ "123b-instruct-2407-q4_1", - "77 GB" - ], - [ - "123b-instruct-2407-q4_K_M", - "73 GB" + "77\u202fGB" ], [ "123b-instruct-2407-q4_K_S", - "70 GB" + "70\u202fGB" + ], + [ + "123b-instruct-2407-q4_K_M", + "73\u202fGB" ], [ "123b-instruct-2407-q5_0", - "84 GB" + "84\u202fGB" ], [ "123b-instruct-2407-q5_1", - "92 GB" - ], - [ - "123b-instruct-2407-q5_K_M", - "86 GB" + "92\u202fGB" ], [ "123b-instruct-2407-q5_K_S", - "84 GB" + "84\u202fGB" + ], + [ + "123b-instruct-2407-q5_K_M", + "86\u202fGB" ], [ "123b-instruct-2407-q6_K", - "101 GB" + "101\u202fGB" ], [ "123b-instruct-2407-q8_0", - "130 GB" + "130\u202fGB" ] ], "image": false, @@ -565,391 +929,391 @@ "tags": [ [ "latest", - "4.4 GB" - ], - [ - "72b", - "41 GB" - ], - [ - "7b", - "4.4 GB" - ], - [ - "1.5b", - "935 MB" + "4.4\u202fGB" ], [ "0.5b", - "352 MB" + "352\u202fMB" ], [ - "72b-instruct", - "41 GB" + "1.5b", + "935\u202fMB" ], [ - "72b-instruct-fp16", - "145 GB" + "7b", + "4.4\u202fGB" ], [ - "72b-instruct-q2_K", - "30 GB" - ], - [ - "72b-instruct-q3_K_L", - "40 GB" - ], - [ - "72b-instruct-q3_K_M", - "38 GB" - ], - [ - "72b-instruct-q3_K_S", - "34 GB" - ], - [ - "72b-instruct-q4_0", - "41 GB" - ], - [ - "72b-instruct-q4_1", - "46 GB" - ], - [ - "72b-instruct-q4_K_M", - "47 GB" - ], - [ - "72b-instruct-q4_K_S", - "44 GB" - ], - [ - "72b-instruct-q5_0", - "50 GB" - ], - [ - "72b-instruct-q5_1", - "55 GB" - ], - [ - "72b-instruct-q5_K_M", - "54 GB" - ], - [ - "72b-instruct-q5_K_S", - "51 GB" - ], - [ - "72b-instruct-q6_K", - "64 GB" - ], - [ - "72b-instruct-q8_0", - "77 GB" - ], - [ - "72b-text", - "41 GB" - ], - [ - "72b-text-fp16", - "145 GB" - ], - [ - "72b-text-q2_K", - "30 GB" - ], - [ - "72b-text-q3_K_L", - "40 GB" - ], - [ - "72b-text-q3_K_M", - "38 GB" - ], - [ - "72b-text-q3_K_S", - "34 GB" - ], - [ - "72b-text-q4_0", - "41 GB" - ], - [ - "72b-text-q4_1", - "46 GB" - ], - [ - "72b-text-q4_K_M", - "47 GB" - ], - [ - "72b-text-q4_K_S", - "44 GB" - ], - [ - "72b-text-q5_0", - "50 GB" - ], - [ - "72b-text-q5_1", - "55 GB" - ], - [ - "72b-text-q5_K_M", - "54 GB" - ], - [ - "72b-text-q5_K_S", - "51 GB" - ], - [ - "72b-text-q6_K", - "64 GB" - ], - [ - "72b-text-q8_0", - "77 GB" - ], - [ - "7b-instruct", - "4.4 GB" - ], - [ - "7b-instruct-fp16", - "15 GB" - ], - [ - "7b-instruct-q2_K", - "3.0 GB" - ], - [ - "7b-instruct-q3_K_L", - "4.1 GB" - ], - [ - "7b-instruct-q3_K_M", - "3.8 GB" - ], - [ - "7b-instruct-q3_K_S", - "3.5 GB" - ], - [ - "7b-instruct-q4_0", - "4.4 GB" - ], - [ - "7b-instruct-q4_1", - "4.9 GB" - ], - [ - "7b-instruct-q4_K_M", - "4.7 GB" - ], - [ - "7b-instruct-q4_K_S", - "4.5 GB" - ], - [ - "7b-instruct-q5_0", - "5.3 GB" - ], - [ - "7b-instruct-q5_1", - "5.8 GB" - ], - [ - "7b-instruct-q5_K_M", - "5.4 GB" - ], - [ - "7b-instruct-q5_K_S", - "5.3 GB" - ], - [ - "7b-instruct-q6_K", - "6.3 GB" - ], - [ - "7b-instruct-q8_0", - "8.1 GB" - ], - [ - "7b-text", - "4.4 GB" - ], - [ - "7b-text-q2_K", - "3.0 GB" - ], - [ - "7b-text-q3_K_L", - "4.1 GB" - ], - [ - "7b-text-q3_K_M", - "3.8 GB" - ], - [ - "7b-text-q3_K_S", - "3.5 GB" - ], - [ - "7b-text-q4_0", - "4.4 GB" - ], - [ - "7b-text-q4_1", - "4.9 GB" - ], - [ - "7b-text-q4_K_M", - "4.7 GB" - ], - [ - "7b-text-q4_K_S", - "4.5 GB" - ], - [ - "7b-text-q5_0", - "5.3 GB" - ], - [ - "7b-text-q5_1", - "5.8 GB" - ], - [ - "7b-text-q8_0", - "8.1 GB" - ], - [ - "1.5b-instruct", - "935 MB" - ], - [ - "1.5b-instruct-fp16", - "3.1 GB" - ], - [ - "1.5b-instruct-q2_K", - "676 MB" - ], - [ - "1.5b-instruct-q3_K_L", - "880 MB" - ], - [ - "1.5b-instruct-q3_K_M", - "824 MB" - ], - [ - "1.5b-instruct-q3_K_S", - "761 MB" - ], - [ - "1.5b-instruct-q4_0", - "935 MB" - ], - [ - "1.5b-instruct-q4_1", - "1.0 GB" - ], - [ - "1.5b-instruct-q4_K_M", - "986 MB" - ], - [ - "1.5b-instruct-q4_K_S", - "940 MB" - ], - [ - "1.5b-instruct-q5_0", - "1.1 GB" - ], - [ - "1.5b-instruct-q5_1", - "1.2 GB" - ], - [ - "1.5b-instruct-q5_K_M", - "1.1 GB" - ], - [ - "1.5b-instruct-q5_K_S", - "1.1 GB" - ], - [ - "1.5b-instruct-q6_K", - "1.3 GB" - ], - [ - "1.5b-instruct-q8_0", - "1.6 GB" + "72b", + "41\u202fGB" ], [ "0.5b-instruct", - "352 MB" + "352\u202fMB" + ], + [ + "1.5b-instruct", + "935\u202fMB" + ], + [ + "7b-instruct", + "4.4\u202fGB" + ], + [ + "7b-text", + "4.4\u202fGB" + ], + [ + "72b-instruct", + "41\u202fGB" + ], + [ + "72b-text", + "41\u202fGB" ], [ "0.5b-instruct-fp16", - "994 MB" + "994\u202fMB" ], [ "0.5b-instruct-q2_K", - "339 MB" - ], - [ - "0.5b-instruct-q3_K_L", - "369 MB" - ], - [ - "0.5b-instruct-q3_K_M", - "355 MB" + "339\u202fMB" ], [ "0.5b-instruct-q3_K_S", - "338 MB" + "338\u202fMB" + ], + [ + "0.5b-instruct-q3_K_M", + "355\u202fMB" + ], + [ + "0.5b-instruct-q3_K_L", + "369\u202fMB" ], [ "0.5b-instruct-q4_0", - "352 MB" + "352\u202fMB" ], [ "0.5b-instruct-q4_1", - "375 MB" - ], - [ - "0.5b-instruct-q4_K_M", - "398 MB" + "375\u202fMB" ], [ "0.5b-instruct-q4_K_S", - "385 MB" + "385\u202fMB" + ], + [ + "0.5b-instruct-q4_K_M", + "398\u202fMB" ], [ "0.5b-instruct-q5_0", - "397 MB" + "397\u202fMB" ], [ "0.5b-instruct-q5_1", - "419 MB" - ], - [ - "0.5b-instruct-q5_K_M", - "420 MB" + "419\u202fMB" ], [ "0.5b-instruct-q5_K_S", - "413 MB" + "413\u202fMB" + ], + [ + "0.5b-instruct-q5_K_M", + "420\u202fMB" ], [ "0.5b-instruct-q6_K", - "506 MB" + "506\u202fMB" ], [ "0.5b-instruct-q8_0", - "531 MB" + "531\u202fMB" + ], + [ + "1.5b-instruct-fp16", + "3.1\u202fGB" + ], + [ + "1.5b-instruct-q2_K", + "676\u202fMB" + ], + [ + "1.5b-instruct-q3_K_S", + "761\u202fMB" + ], + [ + "1.5b-instruct-q3_K_M", + "824\u202fMB" + ], + [ + "1.5b-instruct-q3_K_L", + "880\u202fMB" + ], + [ + "1.5b-instruct-q4_0", + "935\u202fMB" + ], + [ + "1.5b-instruct-q4_1", + "1.0\u202fGB" + ], + [ + "1.5b-instruct-q4_K_S", + "940\u202fMB" + ], + [ + "1.5b-instruct-q4_K_M", + "986\u202fMB" + ], + [ + "1.5b-instruct-q5_0", + "1.1\u202fGB" + ], + [ + "1.5b-instruct-q5_1", + "1.2\u202fGB" + ], + [ + "1.5b-instruct-q5_K_S", + "1.1\u202fGB" + ], + [ + "1.5b-instruct-q5_K_M", + "1.1\u202fGB" + ], + [ + "1.5b-instruct-q6_K", + "1.3\u202fGB" + ], + [ + "1.5b-instruct-q8_0", + "1.6\u202fGB" + ], + [ + "7b-instruct-fp16", + "15\u202fGB" + ], + [ + "7b-instruct-q2_K", + "3.0\u202fGB" + ], + [ + "7b-instruct-q3_K_S", + "3.5\u202fGB" + ], + [ + "7b-instruct-q3_K_M", + "3.8\u202fGB" + ], + [ + "7b-instruct-q3_K_L", + "4.1\u202fGB" + ], + [ + "7b-instruct-q4_0", + "4.4\u202fGB" + ], + [ + "7b-instruct-q4_1", + "4.9\u202fGB" + ], + [ + "7b-instruct-q4_K_S", + "4.5\u202fGB" + ], + [ + "7b-instruct-q4_K_M", + "4.7\u202fGB" + ], + [ + "7b-instruct-q5_0", + "5.3\u202fGB" + ], + [ + "7b-instruct-q5_1", + "5.8\u202fGB" + ], + [ + "7b-instruct-q5_K_S", + "5.3\u202fGB" + ], + [ + "7b-instruct-q5_K_M", + "5.4\u202fGB" + ], + [ + "7b-instruct-q6_K", + "6.3\u202fGB" + ], + [ + "7b-instruct-q8_0", + "8.1\u202fGB" + ], + [ + "7b-text-q2_K", + "3.0\u202fGB" + ], + [ + "7b-text-q3_K_S", + "3.5\u202fGB" + ], + [ + "7b-text-q3_K_M", + "3.8\u202fGB" + ], + [ + "7b-text-q3_K_L", + "4.1\u202fGB" + ], + [ + "7b-text-q4_0", + "4.4\u202fGB" + ], + [ + "7b-text-q4_1", + "4.9\u202fGB" + ], + [ + "7b-text-q4_K_S", + "4.5\u202fGB" + ], + [ + "7b-text-q4_K_M", + "4.7\u202fGB" + ], + [ + "7b-text-q5_0", + "5.3\u202fGB" + ], + [ + "7b-text-q5_1", + "5.8\u202fGB" + ], + [ + "7b-text-q8_0", + "8.1\u202fGB" + ], + [ + "72b-instruct-fp16", + "145\u202fGB" + ], + [ + "72b-instruct-q2_K", + "30\u202fGB" + ], + [ + "72b-instruct-q3_K_S", + "34\u202fGB" + ], + [ + "72b-instruct-q3_K_M", + "38\u202fGB" + ], + [ + "72b-instruct-q3_K_L", + "40\u202fGB" + ], + [ + "72b-instruct-q4_0", + "41\u202fGB" + ], + [ + "72b-instruct-q4_1", + "46\u202fGB" + ], + [ + "72b-instruct-q4_K_S", + "44\u202fGB" + ], + [ + "72b-instruct-q4_K_M", + "47\u202fGB" + ], + [ + "72b-instruct-q5_0", + "50\u202fGB" + ], + [ + "72b-instruct-q5_1", + "55\u202fGB" + ], + [ + "72b-instruct-q5_K_S", + "51\u202fGB" + ], + [ + "72b-instruct-q5_K_M", + "54\u202fGB" + ], + [ + "72b-instruct-q6_K", + "64\u202fGB" + ], + [ + "72b-instruct-q8_0", + "77\u202fGB" + ], + [ + "72b-text-fp16", + "145\u202fGB" + ], + [ + "72b-text-q2_K", + "30\u202fGB" + ], + [ + "72b-text-q3_K_S", + "34\u202fGB" + ], + [ + "72b-text-q3_K_M", + "38\u202fGB" + ], + [ + "72b-text-q3_K_L", + "40\u202fGB" + ], + [ + "72b-text-q4_0", + "41\u202fGB" + ], + [ + "72b-text-q4_1", + "46\u202fGB" + ], + [ + "72b-text-q4_K_S", + "44\u202fGB" + ], + [ + "72b-text-q4_K_M", + "47\u202fGB" + ], + [ + "72b-text-q5_0", + "50\u202fGB" + ], + [ + "72b-text-q5_1", + "55\u202fGB" + ], + [ + "72b-text-q5_K_S", + "51\u202fGB" + ], + [ + "72b-text-q5_K_M", + "54\u202fGB" + ], + [ + "72b-text-q6_K", + "64\u202fGB" + ], + [ + "72b-text-q8_0", + "77\u202fGB" ] ], "image": false, @@ -961,203 +1325,203 @@ "tags": [ [ "latest", - "8.9 GB" - ], - [ - "236b", - "133 GB" + "8.9\u202fGB" ], [ "16b", - "8.9 GB" + "8.9\u202fGB" + ], + [ + "236b", + "133\u202fGB" ], [ "lite", - "8.9 GB" - ], - [ - "236b-instruct-q4_k_m", - "142 GB" - ], - [ - "236b-instruct-fp16", - "472 GB" - ], - [ - "236b-instruct-q2_K", - "86 GB" - ], - [ - "236b-instruct-q3_K_L", - "122 GB" - ], - [ - "236b-instruct-q3_K_M", - "113 GB" - ], - [ - "236b-instruct-q3_K_S", - "102 GB" - ], - [ - "236b-instruct-q4_0", - "133 GB" - ], - [ - "236b-instruct-q4_1", - "148 GB" - ], - [ - "236b-instruct-q4_K_M", - "142 GB" - ], - [ - "236b-instruct-q4_K_S", - "134 GB" - ], - [ - "236b-instruct-q5_0", - "162 GB" - ], - [ - "236b-instruct-q5_1", - "177 GB" - ], - [ - "236b-instruct-q5_K_M", - "167 GB" - ], - [ - "236b-instruct-q5_K_S", - "162 GB" - ], - [ - "236b-instruct-q6_K", - "194 GB" - ], - [ - "236b-instruct-q8_0", - "251 GB" + "8.9\u202fGB" ], [ "16b-lite-base-fp16", - "31 GB" + "31\u202fGB" ], [ "16b-lite-base-q2_K", - "6.4 GB" - ], - [ - "16b-lite-base-q3_K_L", - "8.5 GB" - ], - [ - "16b-lite-base-q3_K_M", - "8.1 GB" + "6.4\u202fGB" ], [ "16b-lite-base-q3_K_S", - "7.5 GB" + "7.5\u202fGB" + ], + [ + "16b-lite-base-q3_K_M", + "8.1\u202fGB" + ], + [ + "16b-lite-base-q3_K_L", + "8.5\u202fGB" ], [ "16b-lite-base-q4_0", - "8.9 GB" + "8.9\u202fGB" ], [ "16b-lite-base-q4_1", - "9.9 GB" - ], - [ - "16b-lite-base-q4_K_M", - "10 GB" + "9.9\u202fGB" ], [ "16b-lite-base-q4_K_S", - "9.5 GB" + "9.5\u202fGB" + ], + [ + "16b-lite-base-q4_K_M", + "10\u202fGB" ], [ "16b-lite-base-q5_0", - "11 GB" + "11\u202fGB" ], [ "16b-lite-base-q5_1", - "12 GB" - ], - [ - "16b-lite-base-q5_K_M", - "12 GB" + "12\u202fGB" ], [ "16b-lite-base-q5_K_S", - "11 GB" + "11\u202fGB" + ], + [ + "16b-lite-base-q5_K_M", + "12\u202fGB" ], [ "16b-lite-base-q6_K", - "14 GB" + "14\u202fGB" ], [ "16b-lite-base-q8_0", - "17 GB" + "17\u202fGB" ], [ "16b-lite-instruct-fp16", - "31 GB" + "31\u202fGB" ], [ "16b-lite-instruct-q2_K", - "6.4 GB" - ], - [ - "16b-lite-instruct-q3_K_L", - "8.5 GB" - ], - [ - "16b-lite-instruct-q3_K_M", - "8.1 GB" + "6.4\u202fGB" ], [ "16b-lite-instruct-q3_K_S", - "7.5 GB" + "7.5\u202fGB" + ], + [ + "16b-lite-instruct-q3_K_M", + "8.1\u202fGB" + ], + [ + "16b-lite-instruct-q3_K_L", + "8.5\u202fGB" ], [ "16b-lite-instruct-q4_0", - "8.9 GB" + "8.9\u202fGB" ], [ "16b-lite-instruct-q4_1", - "9.9 GB" - ], - [ - "16b-lite-instruct-q4_K_M", - "10 GB" + "9.9\u202fGB" ], [ "16b-lite-instruct-q4_K_S", - "9.5 GB" + "9.5\u202fGB" + ], + [ + "16b-lite-instruct-q4_K_M", + "10\u202fGB" ], [ "16b-lite-instruct-q5_0", - "11 GB" + "11\u202fGB" ], [ "16b-lite-instruct-q5_1", - "12 GB" - ], - [ - "16b-lite-instruct-q5_K_M", - "12 GB" + "12\u202fGB" ], [ "16b-lite-instruct-q5_K_S", - "11 GB" + "11\u202fGB" + ], + [ + "16b-lite-instruct-q5_K_M", + "12\u202fGB" ], [ "16b-lite-instruct-q6_K", - "14 GB" + "14\u202fGB" ], [ "16b-lite-instruct-q8_0", - "17 GB" + "17\u202fGB" + ], + [ + "236b-instruct-fp16", + "472\u202fGB" + ], + [ + "236b-instruct-q2_K", + "86\u202fGB" + ], + [ + "236b-instruct-q3_K_S", + "102\u202fGB" + ], + [ + "236b-instruct-q3_K_M", + "113\u202fGB" + ], + [ + "236b-instruct-q3_K_L", + "122\u202fGB" + ], + [ + "236b-instruct-q4_0", + "133\u202fGB" + ], + [ + "236b-instruct-q4_1", + "148\u202fGB" + ], + [ + "236b-instruct-q4_K_S", + "134\u202fGB" + ], + [ + "236b-instruct-q4_K_M", + "142\u202fGB" + ], + [ + "236b-instruct-q4_k_m", + "142\u202fGB" + ], + [ + "236b-instruct-q5_0", + "162\u202fGB" + ], + [ + "236b-instruct-q5_1", + "177\u202fGB" + ], + [ + "236b-instruct-q5_K_S", + "162\u202fGB" + ], + [ + "236b-instruct-q5_K_M", + "167\u202fGB" + ], + [ + "236b-instruct-q6_K", + "194\u202fGB" + ], + [ + "236b-instruct-q8_0", + "251\u202fGB" ] ], "image": false, @@ -1169,295 +1533,291 @@ "tags": [ [ "latest", - "2.2 GB" - ], - [ - "14b", - "7.9 GB" + "2.2\u202fGB" ], [ "3.8b", - "2.2 GB" + "2.2\u202fGB" + ], + [ + "14b", + "7.9\u202fGB" ], [ "instruct", - "2.2 GB" + "2.2\u202fGB" ], [ "medium", - "7.9 GB" - ], - [ - "mini", - "2.2 GB" - ], - [ - "14b-instruct", - "7.9 GB" - ], - [ - "14b-medium-128k-instruct-f16", - "28 GB" - ], - [ - "14b-medium-4k-instruct-f16", - "28 GB" - ], - [ - "14b-medium-128k-instruct-q2_K", - "5.1 GB" - ], - [ - "14b-medium-128k-instruct-q3_K_L", - "7.5 GB" - ], - [ - "14b-medium-128k-instruct-q3_K_M", - "6.9 GB" - ], - [ - "14b-medium-128k-instruct-q3_K_S", - "6.1 GB" - ], - [ - "14b-medium-128k-instruct-q4_0", - "7.9 GB" - ], - [ - "14b-medium-128k-instruct-q4_1", - "8.8 GB" - ], - [ - "14b-medium-128k-instruct-q4_K_M", - "8.6 GB" - ], - [ - "14b-medium-128k-instruct-q4_K_S", - "8.0 GB" - ], - [ - "14b-medium-128k-instruct-q5_0", - "9.6 GB" - ], - [ - "14b-medium-128k-instruct-q5_1", - "10 GB" - ], - [ - "14b-medium-128k-instruct-q5_K_M", - "10 GB" - ], - [ - "14b-medium-128k-instruct-q5_K_S", - "9.6 GB" - ], - [ - "14b-medium-128k-instruct-q6_K", - "11 GB" - ], - [ - "14b-medium-4k-instruct-q2_K", - "5.1 GB" - ], - [ - "14b-medium-4k-instruct-q3_K_L", - "7.5 GB" - ], - [ - "14b-medium-4k-instruct-q3_K_M", - "6.9 GB" - ], - [ - "14b-medium-4k-instruct-q3_K_S", - "6.1 GB" - ], - [ - "14b-medium-4k-instruct-q4_0", - "7.9 GB" - ], - [ - "14b-medium-4k-instruct-q4_1", - "8.8 GB" - ], - [ - "14b-medium-4k-instruct-q4_K_M", - "8.6 GB" - ], - [ - "14b-medium-4k-instruct-q4_K_S", - "8.0 GB" - ], - [ - "14b-medium-4k-instruct-q5_0", - "9.6 GB" - ], - [ - "14b-medium-4k-instruct-q5_1", - "10 GB" - ], - [ - "14b-medium-4k-instruct-q5_K_M", - "10 GB" - ], - [ - "14b-medium-4k-instruct-q5_K_S", - "9.6 GB" - ], - [ - "14b-medium-4k-instruct-q6_K", - "11 GB" - ], - [ - "14b-medium-4k-instruct-q8_0", - "15 GB" - ], - [ - "3.8b-instruct", - "2.2 GB" - ], - [ - "3.8b-mini-4k-instruct-f16", - "7.6 GB" - ], - [ - "3.8b-mini-128k-instruct-f16", - "7.6 GB" - ], - [ - "3.8b-mini-128k-instruct-fp16", - "7.6 GB" - ], - [ - "3.8b-mini-128k-instruct-q2_K", - "1.4 GB" - ], - [ - "3.8b-mini-128k-instruct-q3_K_L", - "2.1 GB" - ], - [ - "3.8b-mini-128k-instruct-q3_K_M", - "2.0 GB" - ], - [ - "3.8b-mini-128k-instruct-q3_K_S", - "1.7 GB" - ], - [ - "3.8b-mini-128k-instruct-q4_0", - "2.2 GB" - ], - [ - "3.8b-mini-128k-instruct-q4_1", - "2.4 GB" - ], - [ - "3.8b-mini-128k-instruct-q4_K_M", - "2.4 GB" - ], - [ - "3.8b-mini-128k-instruct-q4_K_S", - "2.2 GB" - ], - [ - "3.8b-mini-128k-instruct-q5_0", - "2.6 GB" - ], - [ - "3.8b-mini-128k-instruct-q5_1", - "2.9 GB" - ], - [ - "3.8b-mini-128k-instruct-q5_K_M", - "2.8 GB" - ], - [ - "3.8b-mini-128k-instruct-q5_K_S", - "2.6 GB" - ], - [ - "3.8b-mini-128k-instruct-q6_K", - "3.1 GB" - ], - [ - "3.8b-mini-128k-instruct-q8_0", - "4.1 GB" - ], - [ - "3.8b-mini-4k-instruct-fp16", - "7.6 GB" - ], - [ - "3.8b-mini-4k-instruct-q2_K", - "1.4 GB" - ], - [ - "3.8b-mini-4k-instruct-q3_K_L", - "2.1 GB" - ], - [ - "3.8b-mini-4k-instruct-q3_K_M", - "2.0 GB" - ], - [ - "3.8b-mini-4k-instruct-q3_K_S", - "1.7 GB" - ], - [ - "3.8b-mini-4k-instruct-q4_0", - "2.2 GB" - ], - [ - "3.8b-mini-4k-instruct-q4_1", - "2.4 GB" - ], - [ - "3.8b-mini-4k-instruct-q4_K_M", - "2.4 GB" - ], - [ - "3.8b-mini-4k-instruct-q4_K_S", - "2.2 GB" - ], - [ - "3.8b-mini-4k-instruct-q5_0", - "2.6 GB" - ], - [ - "3.8b-mini-4k-instruct-q5_1", - "2.9 GB" - ], - [ - "3.8b-mini-4k-instruct-q5_K_M", - "2.8 GB" - ], - [ - "3.8b-mini-4k-instruct-q5_K_S", - "2.6 GB" - ], - [ - "3.8b-mini-4k-instruct-q6_K", - "3.1 GB" - ], - [ - "3.8b-mini-4k-instruct-q8_0", - "4.1 GB" - ], - [ - "3.8b-mini-instruct-4k-fp16", - "7.6 GB" - ], - [ - "mini-128k", - "2.2 GB" + "7.9\u202fGB" ], [ "medium-128k", - "7.9 GB" + "7.9\u202fGB" + ], + [ + "medium-4k", + "7.9\u202fGB" + ], + [ + "mini", + "2.2\u202fGB" + ], + [ + "mini-128k", + "2.2\u202fGB" ], [ "mini-4k", - "2.4 GB" + "2.4\u202fGB" + ], + [ + "3.8b-instruct", + "2.2\u202fGB" + ], + [ + "14b-instruct", + "7.9\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-fp16", + "7.6\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q2_K", + "1.4\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q3_K_S", + "1.7\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q3_K_M", + "2.0\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q3_K_L", + "2.1\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q4_0", + "2.2\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q4_1", + "2.4\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q4_K_S", + "2.2\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q4_K_M", + "2.4\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q5_0", + "2.6\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q5_1", + "2.9\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q5_K_S", + "2.6\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q5_K_M", + "2.8\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q6_K", + "3.1\u202fGB" + ], + [ + "3.8b-mini-128k-instruct-q8_0", + "4.1\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-fp16", + "7.6\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q2_K", + "1.4\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q3_K_S", + "1.7\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q3_K_M", + "2.0\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q3_K_L", + "2.1\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q4_0", + "2.2\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q4_1", + "2.4\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q4_K_S", + "2.2\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q4_K_M", + "2.4\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q5_0", + "2.6\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q5_1", + "2.9\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q5_K_S", + "2.6\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q5_K_M", + "2.8\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q6_K", + "3.1\u202fGB" + ], + [ + "3.8b-mini-4k-instruct-q8_0", + "4.1\u202fGB" + ], + [ + "14b-medium-128k-instruct-fp16", + "28\u202fGB" + ], + [ + "14b-medium-128k-instruct-q2_K", + "5.1\u202fGB" + ], + [ + "14b-medium-128k-instruct-q3_K_S", + "6.1\u202fGB" + ], + [ + "14b-medium-128k-instruct-q3_K_M", + "6.9\u202fGB" + ], + [ + "14b-medium-128k-instruct-q3_K_L", + "7.5\u202fGB" + ], + [ + "14b-medium-128k-instruct-q4_0", + "7.9\u202fGB" + ], + [ + "14b-medium-128k-instruct-q4_1", + "8.8\u202fGB" + ], + [ + "14b-medium-128k-instruct-q4_K_S", + "8.0\u202fGB" + ], + [ + "14b-medium-128k-instruct-q4_K_M", + "8.6\u202fGB" + ], + [ + "14b-medium-128k-instruct-q5_0", + "9.6\u202fGB" + ], + [ + "14b-medium-128k-instruct-q5_1", + "10\u202fGB" + ], + [ + "14b-medium-128k-instruct-q5_K_S", + "9.6\u202fGB" + ], + [ + "14b-medium-128k-instruct-q5_K_M", + "10\u202fGB" + ], + [ + "14b-medium-128k-instruct-q6_K", + "11\u202fGB" + ], + [ + "14b-medium-128k-instruct-q8_0", + "15\u202fGB" + ], + [ + "14b-medium-4k-instruct-fp16", + "28\u202fGB" + ], + [ + "14b-medium-4k-instruct-q2_K", + "5.1\u202fGB" + ], + [ + "14b-medium-4k-instruct-q3_K_S", + "6.1\u202fGB" + ], + [ + "14b-medium-4k-instruct-q3_K_M", + "6.9\u202fGB" + ], + [ + "14b-medium-4k-instruct-q3_K_L", + "7.5\u202fGB" + ], + [ + "14b-medium-4k-instruct-q4_0", + "7.9\u202fGB" + ], + [ + "14b-medium-4k-instruct-q4_1", + "8.8\u202fGB" + ], + [ + "14b-medium-4k-instruct-q4_K_S", + "8.0\u202fGB" + ], + [ + "14b-medium-4k-instruct-q4_K_M", + "8.6\u202fGB" + ], + [ + "14b-medium-4k-instruct-q5_0", + "9.6\u202fGB" + ], + [ + "14b-medium-4k-instruct-q5_1", + "10\u202fGB" + ], + [ + "14b-medium-4k-instruct-q5_K_S", + "9.6\u202fGB" + ], + [ + "14b-medium-4k-instruct-q5_K_M", + "10\u202fGB" + ], + [ + "14b-medium-4k-instruct-q6_K", + "11\u202fGB" + ], + [ + "14b-medium-4k-instruct-q8_0", + "15\u202fGB" ] ], "image": false, @@ -1469,339 +1829,339 @@ "tags": [ [ "latest", - "4.1 GB" + "4.1\u202fGB" ], [ "7b", - "4.1 GB" + "4.1\u202fGB" ], [ "instruct", - "4.1 GB" + "4.1\u202fGB" ], [ "text", - "4.1 GB" + "4.1\u202fGB" ], [ "v0.1", - "4.1 GB" + "4.1\u202fGB" ], [ "v0.2", - "4.1 GB" + "4.1\u202fGB" ], [ "v0.3", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-instruct", - "4.1 GB" - ], - [ - "7b-instruct-v0.2-fp16", - "14 GB" - ], - [ - "7b-instruct-v0.2-q2_K", - "3.1 GB" - ], - [ - "7b-instruct-v0.2-q3_K_L", - "3.8 GB" - ], - [ - "7b-instruct-v0.2-q3_K_M", - "3.5 GB" - ], - [ - "7b-instruct-v0.2-q3_K_S", - "3.2 GB" - ], - [ - "7b-instruct-v0.2-q4_0", - "4.1 GB" - ], - [ - "7b-instruct-v0.2-q4_1", - "4.6 GB" - ], - [ - "7b-instruct-v0.2-q4_K_M", - "4.4 GB" - ], - [ - "7b-instruct-v0.2-q4_K_S", - "4.1 GB" - ], - [ - "7b-instruct-v0.2-q5_0", - "5.0 GB" - ], - [ - "7b-instruct-v0.2-q5_1", - "5.4 GB" - ], - [ - "7b-instruct-v0.2-q5_K_M", - "5.1 GB" - ], - [ - "7b-instruct-v0.2-q5_K_S", - "5.0 GB" - ], - [ - "7b-instruct-v0.2-q6_K", - "5.9 GB" - ], - [ - "7b-instruct-v0.2-q8_0", - "7.7 GB" - ], - [ - "7b-instruct-v0.3-fp16", - "14 GB" - ], - [ - "7b-instruct-v0.3-q2_K", - "2.7 GB" - ], - [ - "7b-instruct-v0.3-q3_K_L", - "3.8 GB" - ], - [ - "7b-instruct-v0.3-q3_K_M", - "3.5 GB" - ], - [ - "7b-instruct-v0.3-q3_K_S", - "3.2 GB" - ], - [ - "7b-instruct-v0.3-q4_0", - "4.1 GB" - ], - [ - "7b-instruct-v0.3-q4_1", - "4.6 GB" - ], - [ - "7b-instruct-q4_0", - "4.1 GB" - ], - [ - "7b-instruct-v0.3-q5_K_M", - "5.1 GB" - ], - [ - "7b-instruct-v0.3-q8_0", - "7.7 GB" - ], - [ - "7b-instruct-v0.3-q4_K_S", - "4.1 GB" - ], - [ - "7b-instruct-q3_K_M", - "3.5 GB" - ], - [ - "7b-instruct-fp16", - "14 GB" - ], - [ - "7b-instruct-q3_K_L", - "3.8 GB" - ], - [ - "7b-instruct-v0.3-q5_0", - "5.0 GB" - ], - [ - "7b-instruct-q2_K", - "3.1 GB" - ], - [ - "7b-instruct-q3_K_S", - "3.2 GB" - ], - [ - "7b-instruct-v0.3-q6_K", - "5.9 GB" - ], - [ - "7b-instruct-v0.3-q5_K_S", - "5.0 GB" - ], - [ - "7b-instruct-v0.3-q5_1", - "5.4 GB" - ], - [ - "7b-instruct-v0.3-q4_K_M", - "4.4 GB" - ], - [ - "7b-instruct-q6_K", - "5.9 GB" - ], - [ - "7b-instruct-q5_K_S", - "5.0 GB" - ], - [ - "7b-instruct-q4_K_S", - "4.1 GB" - ], - [ - "7b-instruct-q5_K_M", - "5.1 GB" - ], - [ - "7b-instruct-q4_1", - "4.6 GB" - ], - [ - "7b-instruct-q5_0", - "5.0 GB" - ], - [ - "7b-instruct-q4_K_M", - "4.4 GB" - ], - [ - "7b-instruct-q5_1", - "5.4 GB" - ], - [ - "7b-instruct-q8_0", - "7.7 GB" + "4.1\u202fGB" ], [ "7b-text", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-instruct-fp16", + "14\u202fGB" + ], + [ + "7b-instruct-q2_K", + "3.1\u202fGB" + ], + [ + "7b-instruct-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-instruct-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-instruct-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-instruct-q4_0", + "4.1\u202fGB" + ], + [ + "7b-instruct-q4_1", + "4.6\u202fGB" + ], + [ + "7b-instruct-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-instruct-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-instruct-q5_0", + "5.0\u202fGB" + ], + [ + "7b-instruct-q5_1", + "5.4\u202fGB" + ], + [ + "7b-instruct-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-instruct-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-instruct-q6_K", + "5.9\u202fGB" + ], + [ + "7b-instruct-q8_0", + "7.7\u202fGB" + ], + [ + "7b-instruct-v0.2-fp16", + "14\u202fGB" + ], + [ + "7b-instruct-v0.2-q2_K", + "3.1\u202fGB" + ], + [ + "7b-instruct-v0.2-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-instruct-v0.2-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-instruct-v0.2-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-instruct-v0.2-q4_0", + "4.1\u202fGB" + ], + [ + "7b-instruct-v0.2-q4_1", + "4.6\u202fGB" + ], + [ + "7b-instruct-v0.2-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-instruct-v0.2-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-instruct-v0.2-q5_0", + "5.0\u202fGB" + ], + [ + "7b-instruct-v0.2-q5_1", + "5.4\u202fGB" + ], + [ + "7b-instruct-v0.2-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-instruct-v0.2-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-instruct-v0.2-q6_K", + "5.9\u202fGB" + ], + [ + "7b-instruct-v0.2-q8_0", + "7.7\u202fGB" + ], + [ + "7b-instruct-v0.3-fp16", + "14\u202fGB" + ], + [ + "7b-instruct-v0.3-q2_K", + "2.7\u202fGB" + ], + [ + "7b-instruct-v0.3-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-instruct-v0.3-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-instruct-v0.3-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-instruct-v0.3-q4_0", + "4.1\u202fGB" + ], + [ + "7b-instruct-v0.3-q4_1", + "4.6\u202fGB" + ], + [ + "7b-instruct-v0.3-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-instruct-v0.3-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-instruct-v0.3-q5_0", + "5.0\u202fGB" + ], + [ + "7b-instruct-v0.3-q5_1", + "5.4\u202fGB" + ], + [ + "7b-instruct-v0.3-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-instruct-v0.3-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-instruct-v0.3-q6_K", + "5.9\u202fGB" + ], + [ + "7b-instruct-v0.3-q8_0", + "7.7\u202fGB" ], [ "7b-text-fp16", - "14 GB" + "14\u202fGB" ], [ "7b-text-q2_K", - "3.1 GB" - ], - [ - "7b-text-q3_K_L", - "3.8 GB" - ], - [ - "7b-text-q3_K_M", - "3.5 GB" - ], - [ - "7b-text-v0.2-q3_K_M", - "3.5 GB" - ], - [ - "7b-text-q8_0", - "7.7 GB" - ], - [ - "7b-text-q5_K_S", - "5.0 GB" + "3.1\u202fGB" ], [ "7b-text-q3_K_S", - "3.2 GB" + "3.2\u202fGB" ], [ - "7b-text-q5_0", - "5.0 GB" + "7b-text-q3_K_M", + "3.5\u202fGB" ], [ - "7b-text-q5_K_M", - "5.1 GB" - ], - [ - "7b-text-q4_K_M", - "4.4 GB" - ], - [ - "7b-text-q4_1", - "4.6 GB" - ], - [ - "7b-text-v0.2-q3_K_L", - "3.8 GB" - ], - [ - "7b-text-q6_K", - "5.9 GB" + "7b-text-q3_K_L", + "3.8\u202fGB" ], [ "7b-text-q4_0", - "4.1 GB" + "4.1\u202fGB" ], [ - "7b-text-q5_1", - "5.4 GB" - ], - [ - "7b-text-v0.2-q2_K", - "2.7 GB" - ], - [ - "7b-text-v0.2-fp16", - "14 GB" + "7b-text-q4_1", + "4.6\u202fGB" ], [ "7b-text-q4_K_S", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-text-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-text-q5_0", + "5.0\u202fGB" + ], + [ + "7b-text-q5_1", + "5.4\u202fGB" + ], + [ + "7b-text-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-text-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-text-q6_K", + "5.9\u202fGB" + ], + [ + "7b-text-q8_0", + "7.7\u202fGB" + ], + [ + "7b-text-v0.2-fp16", + "14\u202fGB" + ], + [ + "7b-text-v0.2-q2_K", + "2.7\u202fGB" ], [ "7b-text-v0.2-q3_K_S", - "3.2 GB" + "3.2\u202fGB" + ], + [ + "7b-text-v0.2-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-text-v0.2-q3_K_L", + "3.8\u202fGB" ], [ "7b-text-v0.2-q4_0", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-text-v0.2-q4_1", - "4.6 GB" - ], - [ - "7b-text-v0.2-q4_K_M", - "4.4 GB" + "4.6\u202fGB" ], [ "7b-text-v0.2-q4_K_S", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-text-v0.2-q4_K_M", + "4.4\u202fGB" ], [ "7b-text-v0.2-q5_0", - "5.0 GB" + "5.0\u202fGB" ], [ "7b-text-v0.2-q5_1", - "5.4 GB" - ], - [ - "7b-text-v0.2-q5_K_M", - "5.1 GB" + "5.4\u202fGB" ], [ "7b-text-v0.2-q5_K_S", - "5.0 GB" + "5.0\u202fGB" + ], + [ + "7b-text-v0.2-q5_K_M", + "5.1\u202fGB" ], [ "7b-text-v0.2-q6_K", - "5.9 GB" + "5.9\u202fGB" ], [ "7b-text-v0.2-q8_0", - "7.7 GB" + "7.7\u202fGB" ] ], "image": false, @@ -1813,279 +2173,279 @@ "tags": [ [ "latest", - "26 GB" + "26\u202fGB" ], [ "8x7b", - "26 GB" + "26\u202fGB" ], [ "8x22b", - "80 GB" + "80\u202fGB" ], [ "instruct", - "26 GB" + "26\u202fGB" ], [ "text", - "26 GB" + "26\u202fGB" ], [ "v0.1", - "80 GB" - ], - [ - "8x22b-instruct", - "80 GB" - ], - [ - "8x22b-instruct-v0.1-fp16", - "281 GB" - ], - [ - "8x22b-instruct-v0.1-q2_K", - "52 GB" - ], - [ - "8x22b-instruct-v0.1-q3_K_L", - "73 GB" - ], - [ - "8x22b-instruct-v0.1-q3_K_M", - "68 GB" - ], - [ - "8x22b-instruct-v0.1-q3_K_S", - "62 GB" - ], - [ - "8x22b-instruct-v0.1-q4_0", - "80 GB" - ], - [ - "8x22b-instruct-v0.1-q4_1", - "88 GB" - ], - [ - "8x22b-instruct-v0.1-q4_K_M", - "86 GB" - ], - [ - "8x22b-instruct-v0.1-q4_K_S", - "80 GB" - ], - [ - "8x22b-instruct-v0.1-q5_0", - "97 GB" - ], - [ - "8x22b-instruct-v0.1-q5_1", - "106 GB" - ], - [ - "8x22b-instruct-v0.1-q5_K_M", - "100 GB" - ], - [ - "8x22b-instruct-v0.1-q5_K_S", - "97 GB" - ], - [ - "8x22b-instruct-v0.1-q6_K", - "116 GB" - ], - [ - "8x22b-instruct-v0.1-q8_0", - "149 GB" - ], - [ - "8x7b-instruct-v0.1-fp16", - "93 GB" - ], - [ - "8x7b-instruct-v0.1-q2_K", - "16 GB" - ], - [ - "8x7b-instruct-v0.1-q3_K_L", - "20 GB" - ], - [ - "8x7b-instruct-v0.1-q3_K_M", - "20 GB" - ], - [ - "8x7b-instruct-v0.1-q3_K_S", - "20 GB" - ], - [ - "8x7b-instruct-v0.1-q4_0", - "26 GB" - ], - [ - "8x7b-instruct-v0.1-q4_1", - "29 GB" - ], - [ - "8x7b-instruct-v0.1-q4_K_M", - "26 GB" - ], - [ - "8x7b-instruct-v0.1-q4_K_S", - "26 GB" - ], - [ - "8x7b-instruct-v0.1-q5_0", - "32 GB" - ], - [ - "8x7b-instruct-v0.1-q5_1", - "35 GB" - ], - [ - "8x7b-instruct-v0.1-q5_K_M", - "32 GB" - ], - [ - "8x7b-instruct-v0.1-q5_K_S", - "32 GB" - ], - [ - "8x7b-instruct-v0.1-q6_K", - "38 GB" - ], - [ - "8x7b-instruct-v0.1-q8_0", - "50 GB" - ], - [ - "8x22b-text", - "80 GB" - ], - [ - "8x22b-text-v0.1-fp16", - "281 GB" - ], - [ - "8x22b-text-v0.1-q2_K", - "52 GB" - ], - [ - "8x22b-text-v0.1-q3_K_L", - "73 GB" - ], - [ - "8x22b-text-v0.1-q3_K_M", - "68 GB" - ], - [ - "8x22b-text-v0.1-q3_K_S", - "61 GB" - ], - [ - "8x22b-text-v0.1-q4_0", - "80 GB" - ], - [ - "8x22b-text-v0.1-q4_1", - "88 GB" - ], - [ - "8x22b-text-v0.1-q4_K_M", - "86 GB" - ], - [ - "8x22b-text-v0.1-q4_K_S", - "80 GB" - ], - [ - "8x22b-text-v0.1-q5_0", - "97 GB" - ], - [ - "8x22b-text-v0.1-q5_1", - "106 GB" - ], - [ - "8x22b-text-v0.1-q5_K_M", - "100 GB" - ], - [ - "8x22b-text-v0.1-q5_K_S", - "97 GB" - ], - [ - "8x22b-text-v0.1-q6_K", - "116 GB" - ], - [ - "8x22b-text-v0.1-q8_0", - "149 GB" - ], - [ - "8x7b-text-v0.1-fp16", - "93 GB" - ], - [ - "8x7b-text-v0.1-q2_K", - "16 GB" - ], - [ - "8x7b-text-v0.1-q3_K_L", - "20 GB" - ], - [ - "8x7b-text-v0.1-q3_K_M", - "20 GB" - ], - [ - "8x7b-text-v0.1-q3_K_S", - "20 GB" - ], - [ - "8x7b-text-v0.1-q4_0", - "26 GB" - ], - [ - "8x7b-text-v0.1-q4_1", - "29 GB" - ], - [ - "8x7b-text-v0.1-q4_K_M", - "26 GB" - ], - [ - "8x7b-text-v0.1-q4_K_S", - "26 GB" - ], - [ - "8x7b-text-v0.1-q5_0", - "32 GB" - ], - [ - "8x7b-text-v0.1-q5_1", - "35 GB" - ], - [ - "8x7b-text-v0.1-q5_K_M", - "32 GB" - ], - [ - "8x7b-text-v0.1-q5_K_S", - "32 GB" - ], - [ - "8x7b-text-v0.1-q6_K", - "38 GB" - ], - [ - "8x7b-text-v0.1-q8_0", - "50 GB" + "80\u202fGB" ], [ "v0.1-instruct", - "80 GB" + "80\u202fGB" + ], + [ + "8x22b-instruct", + "80\u202fGB" + ], + [ + "8x22b-text", + "80\u202fGB" + ], + [ + "8x7b-instruct-v0.1-fp16", + "93\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q2_K", + "16\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q3_K_S", + "20\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q3_K_M", + "20\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q3_K_L", + "20\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q4_0", + "26\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q4_1", + "29\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q4_K_S", + "26\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q4_K_M", + "26\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q5_0", + "32\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q5_1", + "35\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q5_K_S", + "32\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q5_K_M", + "32\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q6_K", + "38\u202fGB" + ], + [ + "8x7b-instruct-v0.1-q8_0", + "50\u202fGB" + ], + [ + "8x7b-text-v0.1-fp16", + "93\u202fGB" + ], + [ + "8x7b-text-v0.1-q2_K", + "16\u202fGB" + ], + [ + "8x7b-text-v0.1-q3_K_S", + "20\u202fGB" + ], + [ + "8x7b-text-v0.1-q3_K_M", + "20\u202fGB" + ], + [ + "8x7b-text-v0.1-q3_K_L", + "20\u202fGB" + ], + [ + "8x7b-text-v0.1-q4_0", + "26\u202fGB" + ], + [ + "8x7b-text-v0.1-q4_1", + "29\u202fGB" + ], + [ + "8x7b-text-v0.1-q4_K_S", + "26\u202fGB" + ], + [ + "8x7b-text-v0.1-q4_K_M", + "26\u202fGB" + ], + [ + "8x7b-text-v0.1-q5_0", + "32\u202fGB" + ], + [ + "8x7b-text-v0.1-q5_1", + "35\u202fGB" + ], + [ + "8x7b-text-v0.1-q5_K_S", + "32\u202fGB" + ], + [ + "8x7b-text-v0.1-q5_K_M", + "32\u202fGB" + ], + [ + "8x7b-text-v0.1-q6_K", + "38\u202fGB" + ], + [ + "8x7b-text-v0.1-q8_0", + "50\u202fGB" + ], + [ + "8x22b-instruct-v0.1-fp16", + "281\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q2_K", + "52\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q3_K_S", + "62\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q3_K_M", + "68\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q3_K_L", + "73\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q4_0", + "80\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q4_1", + "88\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q4_K_S", + "80\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q4_K_M", + "86\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q5_0", + "97\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q5_1", + "106\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q5_K_S", + "97\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q5_K_M", + "100\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q6_K", + "116\u202fGB" + ], + [ + "8x22b-instruct-v0.1-q8_0", + "149\u202fGB" + ], + [ + "8x22b-text-v0.1-fp16", + "281\u202fGB" + ], + [ + "8x22b-text-v0.1-q2_K", + "52\u202fGB" + ], + [ + "8x22b-text-v0.1-q3_K_S", + "61\u202fGB" + ], + [ + "8x22b-text-v0.1-q3_K_M", + "68\u202fGB" + ], + [ + "8x22b-text-v0.1-q3_K_L", + "73\u202fGB" + ], + [ + "8x22b-text-v0.1-q4_0", + "80\u202fGB" + ], + [ + "8x22b-text-v0.1-q4_1", + "88\u202fGB" + ], + [ + "8x22b-text-v0.1-q4_K_S", + "80\u202fGB" + ], + [ + "8x22b-text-v0.1-q4_K_M", + "86\u202fGB" + ], + [ + "8x22b-text-v0.1-q5_0", + "97\u202fGB" + ], + [ + "8x22b-text-v0.1-q5_1", + "106\u202fGB" + ], + [ + "8x22b-text-v0.1-q5_K_S", + "97\u202fGB" + ], + [ + "8x22b-text-v0.1-q5_K_M", + "100\u202fGB" + ], + [ + "8x22b-text-v0.1-q6_K", + "116\u202fGB" + ], + [ + "8x22b-text-v0.1-q8_0", + "149\u202fGB" ] ], "image": false, @@ -2097,343 +2457,343 @@ "tags": [ [ "latest", - "5.0 GB" - ], - [ - "7b", - "5.0 GB" + "5.0\u202fGB" ], [ "2b", - "1.6 GB" + "1.6\u202fGB" + ], + [ + "7b", + "5.0\u202fGB" ], [ "code", - "1.6 GB" + "1.6\u202fGB" ], [ "instruct", - "5.0 GB" - ], - [ - "7b-code", - "5.0 GB" - ], - [ - "7b-code-fp16", - "17 GB" - ], - [ - "7b-code-q2_K", - "3.5 GB" - ], - [ - "7b-code-q3_K_L", - "4.7 GB" - ], - [ - "7b-code-q3_K_M", - "4.4 GB" - ], - [ - "7b-code-q3_K_S", - "4.0 GB" - ], - [ - "7b-code-q4_0", - "5.0 GB" - ], - [ - "7b-code-q4_1", - "5.5 GB" - ], - [ - "7b-code-q4_K_M", - "5.3 GB" - ], - [ - "7b-code-q4_K_S", - "5.0 GB" - ], - [ - "7b-code-q5_0", - "6.0 GB" - ], - [ - "7b-code-q5_1", - "6.5 GB" - ], - [ - "7b-code-q5_K_M", - "6.1 GB" - ], - [ - "7b-code-q5_K_S", - "6.0 GB" - ], - [ - "7b-code-q6_K", - "7.0 GB" - ], - [ - "7b-code-q8_0", - "9.1 GB" - ], - [ - "7b-instruct", - "5.0 GB" - ], - [ - "7b-instruct-v1.1-fp16", - "17 GB" - ], - [ - "7b-instruct-v1.1-q2_K", - "3.5 GB" - ], - [ - "7b-instruct-v1.1-q3_K_L", - "4.7 GB" - ], - [ - "7b-instruct-q3_K_S", - "4.0 GB" - ], - [ - "7b-instruct-v1.1-q6_K", - "7.0 GB" - ], - [ - "7b-instruct-v1.1-q5_K_M", - "6.1 GB" - ], - [ - "7b-instruct-v1.1-q8_0", - "9.1 GB" - ], - [ - "7b-instruct-v1.1-q4_0", - "5.0 GB" - ], - [ - "7b-instruct-v1.1-q5_K_S", - "6.0 GB" - ], - [ - "7b-instruct-fp16", - "17 GB" - ], - [ - "7b-instruct-v1.1-q5_1", - "6.5 GB" - ], - [ - "7b-instruct-v1.1-q4_1", - "5.5 GB" - ], - [ - "7b-instruct-v1.1-q5_0", - "6.0 GB" - ], - [ - "7b-instruct-v1.1-q3_K_M", - "4.4 GB" - ], - [ - "7b-instruct-v1.1-q4_K_S", - "5.0 GB" - ], - [ - "7b-instruct-v1.1-q4_K_M", - "5.3 GB" - ], - [ - "7b-instruct-q3_K_M", - "4.4 GB" - ], - [ - "7b-instruct-v1.1-q3_K_S", - "4.0 GB" - ], - [ - "7b-instruct-q2_K", - "3.5 GB" - ], - [ - "7b-instruct-q3_K_L", - "4.7 GB" - ], - [ - "7b-instruct-q4_0", - "5.0 GB" - ], - [ - "7b-instruct-q4_1", - "5.5 GB" - ], - [ - "7b-instruct-q4_K_M", - "5.3 GB" - ], - [ - "7b-instruct-q4_K_S", - "5.0 GB" - ], - [ - "7b-instruct-q5_0", - "6.0 GB" - ], - [ - "7b-instruct-q5_1", - "6.5 GB" - ], - [ - "7b-instruct-q5_K_M", - "6.1 GB" - ], - [ - "7b-instruct-q5_K_S", - "6.0 GB" - ], - [ - "7b-instruct-q6_K", - "7.0 GB" - ], - [ - "7b-instruct-q8_0", - "9.1 GB" - ], - [ - "7b-v1.1", - "5.0 GB" + "5.0\u202fGB" ], [ "2b-code", - "1.6 GB" - ], - [ - "2b-code-fp16", - "5.0 GB" - ], - [ - "2b-code-q2_K", - "1.2 GB" - ], - [ - "2b-code-q3_K_L", - "1.5 GB" - ], - [ - "2b-code-q3_K_M", - "1.4 GB" - ], - [ - "2b-code-q3_K_S", - "1.3 GB" - ], - [ - "2b-code-q4_0", - "1.6 GB" - ], - [ - "2b-code-q4_1", - "1.7 GB" - ], - [ - "2b-code-q4_K_M", - "1.6 GB" - ], - [ - "2b-code-q4_K_S", - "1.6 GB" - ], - [ - "2b-code-q5_0", - "1.8 GB" - ], - [ - "2b-code-v1.1-q4_1", - "1.7 GB" - ], - [ - "2b-code-v1.1-fp16", - "5.0 GB" - ], - [ - "2b-code-v1.1-q3_K_S", - "1.3 GB" - ], - [ - "2b-code-q5_1", - "1.9 GB" - ], - [ - "2b-code-v1.1-q3_K_L", - "1.5 GB" - ], - [ - "2b-code-v1.1-q3_K_M", - "1.4 GB" - ], - [ - "2b-code-q5_K_S", - "1.8 GB" - ], - [ - "2b-code-q8_0", - "2.7 GB" - ], - [ - "2b-code-v1.1-q4_0", - "1.6 GB" - ], - [ - "2b-code-v1.1-q2_K", - "1.2 GB" - ], - [ - "2b-code-q6_K", - "2.1 GB" - ], - [ - "2b-code-q5_K_M", - "1.8 GB" - ], - [ - "2b-code-v1.1-q4_K_M", - "1.6 GB" - ], - [ - "2b-code-v1.1-q4_K_S", - "1.6 GB" - ], - [ - "2b-code-v1.1-q5_0", - "1.8 GB" - ], - [ - "2b-code-v1.1-q5_1", - "1.9 GB" - ], - [ - "2b-code-v1.1-q5_K_M", - "1.8 GB" - ], - [ - "2b-code-v1.1-q5_K_S", - "1.8 GB" - ], - [ - "2b-code-v1.1-q6_K", - "2.1 GB" - ], - [ - "2b-code-v1.1-q8_0", - "2.7 GB" + "1.6\u202fGB" ], [ "2b-v1.1", - "1.6 GB" + "1.6\u202fGB" + ], + [ + "7b-code", + "5.0\u202fGB" + ], + [ + "7b-instruct", + "5.0\u202fGB" + ], + [ + "7b-v1.1", + "5.0\u202fGB" + ], + [ + "2b-code-fp16", + "5.0\u202fGB" + ], + [ + "2b-code-q2_K", + "1.2\u202fGB" + ], + [ + "2b-code-q3_K_S", + "1.3\u202fGB" + ], + [ + "2b-code-q3_K_M", + "1.4\u202fGB" + ], + [ + "2b-code-q3_K_L", + "1.5\u202fGB" + ], + [ + "2b-code-q4_0", + "1.6\u202fGB" + ], + [ + "2b-code-q4_1", + "1.7\u202fGB" + ], + [ + "2b-code-q4_K_S", + "1.6\u202fGB" + ], + [ + "2b-code-q4_K_M", + "1.6\u202fGB" + ], + [ + "2b-code-q5_0", + "1.8\u202fGB" + ], + [ + "2b-code-q5_1", + "1.9\u202fGB" + ], + [ + "2b-code-q5_K_S", + "1.8\u202fGB" + ], + [ + "2b-code-q5_K_M", + "1.8\u202fGB" + ], + [ + "2b-code-q6_K", + "2.1\u202fGB" + ], + [ + "2b-code-q8_0", + "2.7\u202fGB" + ], + [ + "2b-code-v1.1-fp16", + "5.0\u202fGB" + ], + [ + "2b-code-v1.1-q2_K", + "1.2\u202fGB" + ], + [ + "2b-code-v1.1-q3_K_S", + "1.3\u202fGB" + ], + [ + "2b-code-v1.1-q3_K_M", + "1.4\u202fGB" + ], + [ + "2b-code-v1.1-q3_K_L", + "1.5\u202fGB" + ], + [ + "2b-code-v1.1-q4_0", + "1.6\u202fGB" + ], + [ + "2b-code-v1.1-q4_1", + "1.7\u202fGB" + ], + [ + "2b-code-v1.1-q4_K_S", + "1.6\u202fGB" + ], + [ + "2b-code-v1.1-q4_K_M", + "1.6\u202fGB" + ], + [ + "2b-code-v1.1-q5_0", + "1.8\u202fGB" + ], + [ + "2b-code-v1.1-q5_1", + "1.9\u202fGB" + ], + [ + "2b-code-v1.1-q5_K_S", + "1.8\u202fGB" + ], + [ + "2b-code-v1.1-q5_K_M", + "1.8\u202fGB" + ], + [ + "2b-code-v1.1-q6_K", + "2.1\u202fGB" + ], + [ + "2b-code-v1.1-q8_0", + "2.7\u202fGB" + ], + [ + "7b-code-fp16", + "17\u202fGB" + ], + [ + "7b-code-q2_K", + "3.5\u202fGB" + ], + [ + "7b-code-q3_K_S", + "4.0\u202fGB" + ], + [ + "7b-code-q3_K_M", + "4.4\u202fGB" + ], + [ + "7b-code-q3_K_L", + "4.7\u202fGB" + ], + [ + "7b-code-q4_0", + "5.0\u202fGB" + ], + [ + "7b-code-q4_1", + "5.5\u202fGB" + ], + [ + "7b-code-q4_K_S", + "5.0\u202fGB" + ], + [ + "7b-code-q4_K_M", + "5.3\u202fGB" + ], + [ + "7b-code-q5_0", + "6.0\u202fGB" + ], + [ + "7b-code-q5_1", + "6.5\u202fGB" + ], + [ + "7b-code-q5_K_S", + "6.0\u202fGB" + ], + [ + "7b-code-q5_K_M", + "6.1\u202fGB" + ], + [ + "7b-code-q6_K", + "7.0\u202fGB" + ], + [ + "7b-code-q8_0", + "9.1\u202fGB" + ], + [ + "7b-instruct-fp16", + "17\u202fGB" + ], + [ + "7b-instruct-q2_K", + "3.5\u202fGB" + ], + [ + "7b-instruct-q3_K_S", + "4.0\u202fGB" + ], + [ + "7b-instruct-q3_K_M", + "4.4\u202fGB" + ], + [ + "7b-instruct-q3_K_L", + "4.7\u202fGB" + ], + [ + "7b-instruct-q4_0", + "5.0\u202fGB" + ], + [ + "7b-instruct-q4_1", + "5.5\u202fGB" + ], + [ + "7b-instruct-q4_K_S", + "5.0\u202fGB" + ], + [ + "7b-instruct-q4_K_M", + "5.3\u202fGB" + ], + [ + "7b-instruct-q5_0", + "6.0\u202fGB" + ], + [ + "7b-instruct-q5_1", + "6.5\u202fGB" + ], + [ + "7b-instruct-q5_K_S", + "6.0\u202fGB" + ], + [ + "7b-instruct-q5_K_M", + "6.1\u202fGB" + ], + [ + "7b-instruct-q6_K", + "7.0\u202fGB" + ], + [ + "7b-instruct-q8_0", + "9.1\u202fGB" + ], + [ + "7b-instruct-v1.1-fp16", + "17\u202fGB" + ], + [ + "7b-instruct-v1.1-q2_K", + "3.5\u202fGB" + ], + [ + "7b-instruct-v1.1-q3_K_S", + "4.0\u202fGB" + ], + [ + "7b-instruct-v1.1-q3_K_M", + "4.4\u202fGB" + ], + [ + "7b-instruct-v1.1-q3_K_L", + "4.7\u202fGB" + ], + [ + "7b-instruct-v1.1-q4_0", + "5.0\u202fGB" + ], + [ + "7b-instruct-v1.1-q4_1", + "5.5\u202fGB" + ], + [ + "7b-instruct-v1.1-q4_K_S", + "5.0\u202fGB" + ], + [ + "7b-instruct-v1.1-q4_K_M", + "5.3\u202fGB" + ], + [ + "7b-instruct-v1.1-q5_0", + "6.0\u202fGB" + ], + [ + "7b-instruct-v1.1-q5_1", + "6.5\u202fGB" + ], + [ + "7b-instruct-v1.1-q5_K_S", + "6.0\u202fGB" + ], + [ + "7b-instruct-v1.1-q5_K_M", + "6.1\u202fGB" + ], + [ + "7b-instruct-v1.1-q6_K", + "7.0\u202fGB" + ], + [ + "7b-instruct-v1.1-q8_0", + "9.1\u202fGB" ] ], "image": false, @@ -2445,71 +2805,131 @@ "tags": [ [ "latest", - "20 GB" + "19\u202fGB" ], [ "35b", - "20 GB" + "19\u202fGB" ], [ "v0.1", - "20 GB" + "20\u202fGB" + ], + [ + "35b-08-2024-fp16", + "65\u202fGB" + ], + [ + "35b-08-2024-q2_K", + "13\u202fGB" + ], + [ + "35b-08-2024-q3_K_S", + "15\u202fGB" + ], + [ + "35b-08-2024-q3_K_M", + "16\u202fGB" + ], + [ + "35b-08-2024-q3_K_L", + "18\u202fGB" + ], + [ + "35b-08-2024-q4_0", + "19\u202fGB" + ], + [ + "35b-08-2024-q4_1", + "21\u202fGB" + ], + [ + "35b-08-2024-q4_K_S", + "19\u202fGB" + ], + [ + "35b-08-2024-q4_K_M", + "20\u202fGB" + ], + [ + "35b-08-2024-q5_0", + "22\u202fGB" + ], + [ + "35b-08-2024-q5_1", + "24\u202fGB" + ], + [ + "35b-08-2024-q5_K_S", + "22\u202fGB" + ], + [ + "35b-08-2024-q5_K_M", + "23\u202fGB" + ], + [ + "35b-08-2024-q6_K", + "27\u202fGB" + ], + [ + "35b-08-2024-q8_0", + "34\u202fGB" ], [ "35b-v0.1-fp16", - "70 GB" + "70\u202fGB" ], [ "35b-v0.1-q2_K", - "14 GB" - ], - [ - "35b-v0.1-q3_K_L", - "19 GB" - ], - [ - "35b-v0.1-q3_K_M", - "18 GB" + "14\u202fGB" ], [ "35b-v0.1-q3_K_S", - "16 GB" + "16\u202fGB" + ], + [ + "35b-v0.1-q3_K_M", + "18\u202fGB" + ], + [ + "35b-v0.1-q3_K_L", + "19\u202fGB" ], [ "35b-v0.1-q4_0", - "20 GB" + "20\u202fGB" ], [ "35b-v0.1-q4_1", - "22 GB" - ], - [ - "35b-v0.1-q4_K_M", - "22 GB" + "22\u202fGB" ], [ "35b-v0.1-q4_K_S", - "20 GB" + "20\u202fGB" + ], + [ + "35b-v0.1-q4_K_M", + "22\u202fGB" ], [ "35b-v0.1-q5_1", - "26 GB" - ], - [ - "35b-v0.1-q5_K_M", - "25 GB" + "26\u202fGB" ], [ "35b-v0.1-q5_K_S", - "24 GB" + "24\u202fGB" + ], + [ + "35b-v0.1-q5_K_M", + "25\u202fGB" ], [ "35b-v0.1-q6_K", - "29 GB" + "29\u202fGB" ], [ "35b-v0.1-q8_0", - "37 GB" + "37\u202fGB" ] ], "image": false, @@ -2521,27 +2941,87 @@ "tags": [ [ "latest", - "59 GB" + "59\u202fGB" ], [ "104b", - "59 GB" + "59\u202fGB" ], [ "104b-fp16", - "208 GB" + "208\u202fGB" ], [ "104b-q2_K", - "39 GB" + "39\u202fGB" ], [ "104b-q4_0", - "59 GB" + "59\u202fGB" ], [ "104b-q8_0", - "110 GB" + "110\u202fGB" + ], + [ + "104b-08-2024-fp16", + "208\u202fGB" + ], + [ + "104b-08-2024-q2_K", + "39\u202fGB" + ], + [ + "104b-08-2024-q3_K_S", + "46\u202fGB" + ], + [ + "104b-08-2024-q3_K_M", + "51\u202fGB" + ], + [ + "104b-08-2024-q3_K_L", + "55\u202fGB" + ], + [ + "104b-08-2024-q4_0", + "59\u202fGB" + ], + [ + "104b-08-2024-q4_1", + "66\u202fGB" + ], + [ + "104b-08-2024-q4_K_S", + "60\u202fGB" + ], + [ + "104b-08-2024-q4_K_M", + "63\u202fGB" + ], + [ + "104b-08-2024-q5_0", + "72\u202fGB" + ], + [ + "104b-08-2024-q5_1", + "78\u202fGB" + ], + [ + "104b-08-2024-q5_K_S", + "72\u202fGB" + ], + [ + "104b-08-2024-q5_K_M", + "74\u202fGB" + ], + [ + "104b-08-2024-q6_K", + "85\u202fGB" + ], + [ + "104b-08-2024-q8_0", + "110\u202fGB" ] ], "image": false, @@ -2553,395 +3033,395 @@ "tags": [ [ "latest", - "4.7 GB" - ], - [ - "34b", - "20 GB" - ], - [ - "13b", - "8.0 GB" + "4.7\u202fGB" ], [ "7b", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "13b", + "8.0\u202fGB" + ], + [ + "34b", + "20\u202fGB" ], [ "v1.6", - "4.7 GB" - ], - [ - "34b-v1.6", - "20 GB" - ], - [ - "34b-v1.6-fp16", - "69 GB" - ], - [ - "34b-v1.6-q2_K", - "14 GB" - ], - [ - "34b-v1.6-q3_K_L", - "19 GB" - ], - [ - "34b-v1.6-q3_K_M", - "17 GB" - ], - [ - "34b-v1.6-q3_K_S", - "16 GB" - ], - [ - "34b-v1.6-q4_0", - "20 GB" - ], - [ - "34b-v1.6-q4_1", - "22 GB" - ], - [ - "34b-v1.6-q4_K_M", - "21 GB" - ], - [ - "34b-v1.6-q4_K_S", - "20 GB" - ], - [ - "34b-v1.6-q5_0", - "24 GB" - ], - [ - "34b-v1.6-q5_1", - "27 GB" - ], - [ - "34b-v1.6-q5_K_M", - "25 GB" - ], - [ - "34b-v1.6-q5_K_S", - "24 GB" - ], - [ - "34b-v1.6-q6_K", - "29 GB" - ], - [ - "34b-v1.6-q8_0", - "37 GB" - ], - [ - "13b-v1.5-fp16", - "27 GB" - ], - [ - "13b-v1.5-q2_K", - "6.1 GB" - ], - [ - "13b-v1.5-q3_K_L", - "7.6 GB" - ], - [ - "13b-v1.5-q3_K_M", - "7.0 GB" - ], - [ - "13b-v1.5-q3_K_S", - "6.3 GB" - ], - [ - "13b-v1.5-q4_0", - "8.0 GB" - ], - [ - "13b-v1.5-q4_1", - "8.8 GB" - ], - [ - "13b-v1.5-q4_K_M", - "8.5 GB" - ], - [ - "13b-v1.5-q4_K_S", - "8.1 GB" - ], - [ - "13b-v1.5-q5_0", - "9.6 GB" - ], - [ - "13b-v1.5-q5_1", - "10 GB" - ], - [ - "13b-v1.5-q5_K_M", - "9.9 GB" - ], - [ - "13b-v1.5-q5_K_S", - "9.6 GB" - ], - [ - "13b-v1.5-q6_K", - "11 GB" - ], - [ - "13b-v1.5-q8_0", - "14 GB" - ], - [ - "13b-v1.6", - "8.0 GB" - ], - [ - "13b-v1.6-vicuna-fp16", - "27 GB" - ], - [ - "13b-v1.6-vicuna-q2_K", - "5.5 GB" - ], - [ - "13b-v1.6-vicuna-q3_K_L", - "7.6 GB" - ], - [ - "13b-v1.6-vicuna-q3_K_M", - "7.0 GB" - ], - [ - "13b-v1.6-vicuna-q3_K_S", - "6.3 GB" - ], - [ - "13b-v1.6-vicuna-q4_0", - "8.0 GB" - ], - [ - "13b-v1.6-vicuna-q4_1", - "8.8 GB" - ], - [ - "13b-v1.6-vicuna-q4_K_M", - "8.5 GB" - ], - [ - "13b-v1.6-vicuna-q4_K_S", - "8.1 GB" - ], - [ - "13b-v1.6-vicuna-q5_0", - "9.6 GB" - ], - [ - "13b-v1.6-vicuna-q5_1", - "10 GB" - ], - [ - "13b-v1.6-vicuna-q5_K_M", - "9.9 GB" - ], - [ - "13b-v1.6-vicuna-q5_K_S", - "9.6 GB" - ], - [ - "13b-v1.6-vicuna-q6_K", - "11 GB" - ], - [ - "13b-v1.6-vicuna-q8_0", - "14 GB" - ], - [ - "7b-v1.5-fp16", - "14 GB" - ], - [ - "7b-v1.5-q2_K", - "3.5 GB" - ], - [ - "7b-v1.5-q3_K_L", - "4.2 GB" - ], - [ - "7b-v1.5-q3_K_M", - "3.9 GB" - ], - [ - "7b-v1.5-q3_K_S", - "3.6 GB" - ], - [ - "7b-v1.5-q4_0", - "4.5 GB" - ], - [ - "7b-v1.5-q4_1", - "4.9 GB" - ], - [ - "7b-v1.5-q4_K_M", - "4.7 GB" - ], - [ - "7b-v1.5-q4_K_S", - "4.5 GB" - ], - [ - "7b-v1.5-q5_0", - "5.3 GB" - ], - [ - "7b-v1.5-q5_1", - "5.7 GB" - ], - [ - "7b-v1.5-q5_K_M", - "5.4 GB" - ], - [ - "7b-v1.5-q5_K_S", - "5.3 GB" - ], - [ - "7b-v1.5-q6_K", - "6.2 GB" - ], - [ - "7b-v1.5-q8_0", - "7.8 GB" + "4.7\u202fGB" ], [ "7b-v1.6", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "13b-v1.6", + "8.0\u202fGB" + ], + [ + "34b-v1.6", + "20\u202fGB" + ], + [ + "7b-v1.5-fp16", + "14\u202fGB" + ], + [ + "7b-v1.5-q2_K", + "3.5\u202fGB" + ], + [ + "7b-v1.5-q3_K_S", + "3.6\u202fGB" + ], + [ + "7b-v1.5-q3_K_M", + "3.9\u202fGB" + ], + [ + "7b-v1.5-q3_K_L", + "4.2\u202fGB" + ], + [ + "7b-v1.5-q4_0", + "4.5\u202fGB" + ], + [ + "7b-v1.5-q4_1", + "4.9\u202fGB" + ], + [ + "7b-v1.5-q4_K_S", + "4.5\u202fGB" + ], + [ + "7b-v1.5-q4_K_M", + "4.7\u202fGB" + ], + [ + "7b-v1.5-q5_0", + "5.3\u202fGB" + ], + [ + "7b-v1.5-q5_1", + "5.7\u202fGB" + ], + [ + "7b-v1.5-q5_K_S", + "5.3\u202fGB" + ], + [ + "7b-v1.5-q5_K_M", + "5.4\u202fGB" + ], + [ + "7b-v1.5-q6_K", + "6.2\u202fGB" + ], + [ + "7b-v1.5-q8_0", + "7.8\u202fGB" ], [ "7b-v1.6-mistral-fp16", - "15 GB" + "15\u202fGB" ], [ "7b-v1.6-mistral-q2_K", - "3.3 GB" - ], - [ - "7b-v1.6-mistral-q3_K_L", - "4.4 GB" - ], - [ - "7b-v1.6-mistral-q3_K_M", - "4.1 GB" + "3.3\u202fGB" ], [ "7b-v1.6-mistral-q3_K_S", - "3.8 GB" + "3.8\u202fGB" + ], + [ + "7b-v1.6-mistral-q3_K_M", + "4.1\u202fGB" + ], + [ + "7b-v1.6-mistral-q3_K_L", + "4.4\u202fGB" ], [ "7b-v1.6-mistral-q4_0", - "4.7 GB" + "4.7\u202fGB" ], [ "7b-v1.6-mistral-q4_1", - "5.2 GB" - ], - [ - "7b-v1.6-mistral-q4_K_M", - "5.0 GB" + "5.2\u202fGB" ], [ "7b-v1.6-mistral-q4_K_S", - "4.8 GB" + "4.8\u202fGB" + ], + [ + "7b-v1.6-mistral-q4_K_M", + "5.0\u202fGB" ], [ "7b-v1.6-mistral-q5_0", - "5.6 GB" + "5.6\u202fGB" ], [ "7b-v1.6-mistral-q5_1", - "6.1 GB" - ], - [ - "7b-v1.6-mistral-q5_K_M", - "5.8 GB" + "6.1\u202fGB" ], [ "7b-v1.6-mistral-q5_K_S", - "5.6 GB" + "5.6\u202fGB" + ], + [ + "7b-v1.6-mistral-q5_K_M", + "5.8\u202fGB" ], [ "7b-v1.6-mistral-q6_K", - "6.6 GB" + "6.6\u202fGB" ], [ "7b-v1.6-mistral-q8_0", - "8.3 GB" + "8.3\u202fGB" ], [ "7b-v1.6-vicuna-fp16", - "14 GB" + "14\u202fGB" ], [ "7b-v1.6-vicuna-q2_K", - "3.2 GB" - ], - [ - "7b-v1.6-vicuna-q3_K_L", - "4.2 GB" - ], - [ - "7b-v1.6-vicuna-q3_K_M", - "3.9 GB" + "3.2\u202fGB" ], [ "7b-v1.6-vicuna-q3_K_S", - "3.6 GB" + "3.6\u202fGB" + ], + [ + "7b-v1.6-vicuna-q3_K_M", + "3.9\u202fGB" + ], + [ + "7b-v1.6-vicuna-q3_K_L", + "4.2\u202fGB" ], [ "7b-v1.6-vicuna-q4_0", - "4.5 GB" + "4.5\u202fGB" ], [ "7b-v1.6-vicuna-q4_1", - "4.9 GB" - ], - [ - "7b-v1.6-vicuna-q4_K_M", - "4.7 GB" + "4.9\u202fGB" ], [ "7b-v1.6-vicuna-q4_K_S", - "4.5 GB" + "4.5\u202fGB" + ], + [ + "7b-v1.6-vicuna-q4_K_M", + "4.7\u202fGB" ], [ "7b-v1.6-vicuna-q5_0", - "5.3 GB" + "5.3\u202fGB" ], [ "7b-v1.6-vicuna-q5_1", - "5.7 GB" - ], - [ - "7b-v1.6-vicuna-q5_K_M", - "5.4 GB" + "5.7\u202fGB" ], [ "7b-v1.6-vicuna-q5_K_S", - "5.3 GB" + "5.3\u202fGB" + ], + [ + "7b-v1.6-vicuna-q5_K_M", + "5.4\u202fGB" ], [ "7b-v1.6-vicuna-q6_K", - "6.2 GB" + "6.2\u202fGB" ], [ "7b-v1.6-vicuna-q8_0", - "7.8 GB" + "7.8\u202fGB" + ], + [ + "13b-v1.5-fp16", + "27\u202fGB" + ], + [ + "13b-v1.5-q2_K", + "6.1\u202fGB" + ], + [ + "13b-v1.5-q3_K_S", + "6.3\u202fGB" + ], + [ + "13b-v1.5-q3_K_M", + "7.0\u202fGB" + ], + [ + "13b-v1.5-q3_K_L", + "7.6\u202fGB" + ], + [ + "13b-v1.5-q4_0", + "8.0\u202fGB" + ], + [ + "13b-v1.5-q4_1", + "8.8\u202fGB" + ], + [ + "13b-v1.5-q4_K_S", + "8.1\u202fGB" + ], + [ + "13b-v1.5-q4_K_M", + "8.5\u202fGB" + ], + [ + "13b-v1.5-q5_0", + "9.6\u202fGB" + ], + [ + "13b-v1.5-q5_1", + "10\u202fGB" + ], + [ + "13b-v1.5-q5_K_S", + "9.6\u202fGB" + ], + [ + "13b-v1.5-q5_K_M", + "9.9\u202fGB" + ], + [ + "13b-v1.5-q6_K", + "11\u202fGB" + ], + [ + "13b-v1.5-q8_0", + "14\u202fGB" + ], + [ + "13b-v1.6-vicuna-fp16", + "27\u202fGB" + ], + [ + "13b-v1.6-vicuna-q2_K", + "5.5\u202fGB" + ], + [ + "13b-v1.6-vicuna-q3_K_S", + "6.3\u202fGB" + ], + [ + "13b-v1.6-vicuna-q3_K_M", + "7.0\u202fGB" + ], + [ + "13b-v1.6-vicuna-q3_K_L", + "7.6\u202fGB" + ], + [ + "13b-v1.6-vicuna-q4_0", + "8.0\u202fGB" + ], + [ + "13b-v1.6-vicuna-q4_1", + "8.8\u202fGB" + ], + [ + "13b-v1.6-vicuna-q4_K_S", + "8.1\u202fGB" + ], + [ + "13b-v1.6-vicuna-q4_K_M", + "8.5\u202fGB" + ], + [ + "13b-v1.6-vicuna-q5_0", + "9.6\u202fGB" + ], + [ + "13b-v1.6-vicuna-q5_1", + "10\u202fGB" + ], + [ + "13b-v1.6-vicuna-q5_K_S", + "9.6\u202fGB" + ], + [ + "13b-v1.6-vicuna-q5_K_M", + "9.9\u202fGB" + ], + [ + "13b-v1.6-vicuna-q6_K", + "11\u202fGB" + ], + [ + "13b-v1.6-vicuna-q8_0", + "14\u202fGB" + ], + [ + "34b-v1.6-fp16", + "69\u202fGB" + ], + [ + "34b-v1.6-q2_K", + "14\u202fGB" + ], + [ + "34b-v1.6-q3_K_S", + "16\u202fGB" + ], + [ + "34b-v1.6-q3_K_M", + "17\u202fGB" + ], + [ + "34b-v1.6-q3_K_L", + "19\u202fGB" + ], + [ + "34b-v1.6-q4_0", + "20\u202fGB" + ], + [ + "34b-v1.6-q4_1", + "22\u202fGB" + ], + [ + "34b-v1.6-q4_K_S", + "20\u202fGB" + ], + [ + "34b-v1.6-q4_K_M", + "21\u202fGB" + ], + [ + "34b-v1.6-q5_0", + "24\u202fGB" + ], + [ + "34b-v1.6-q5_1", + "27\u202fGB" + ], + [ + "34b-v1.6-q5_K_S", + "24\u202fGB" + ], + [ + "34b-v1.6-q5_K_M", + "25\u202fGB" + ], + [ + "34b-v1.6-q6_K", + "29\u202fGB" + ], + [ + "34b-v1.6-q8_0", + "37\u202fGB" ] ], "image": true, @@ -2953,275 +3433,275 @@ "tags": [ [ "latest", - "4.7 GB" - ], - [ - "70b", - "40 GB" + "4.7\u202fGB" ], [ "8b", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "70b", + "40\u202fGB" ], [ "instruct", - "4.7 GB" + "4.7\u202fGB" ], [ "text", - "4.7 GB" - ], - [ - "70b-instruct", - "40 GB" - ], - [ - "70b-instruct-fp16", - "141 GB" - ], - [ - "70b-instruct-q2_K", - "26 GB" - ], - [ - "70b-instruct-q3_K_L", - "37 GB" - ], - [ - "70b-instruct-q3_K_M", - "34 GB" - ], - [ - "70b-instruct-q3_K_S", - "31 GB" - ], - [ - "70b-instruct-q4_0", - "40 GB" - ], - [ - "70b-instruct-q4_1", - "44 GB" - ], - [ - "70b-instruct-q4_K_M", - "43 GB" - ], - [ - "70b-instruct-q4_K_S", - "40 GB" - ], - [ - "70b-instruct-q5_0", - "49 GB" - ], - [ - "70b-instruct-q5_1", - "53 GB" - ], - [ - "70b-instruct-q5_K_M", - "50 GB" - ], - [ - "70b-instruct-q5_K_S", - "49 GB" - ], - [ - "70b-instruct-q6_K", - "58 GB" - ], - [ - "70b-instruct-q8_0", - "75 GB" - ], - [ - "70b-text", - "40 GB" - ], - [ - "70b-text-fp16", - "141 GB" - ], - [ - "70b-text-q2_K", - "26 GB" - ], - [ - "70b-text-q3_K_L", - "37 GB" - ], - [ - "70b-text-q3_K_M", - "34 GB" - ], - [ - "70b-text-q3_K_S", - "31 GB" - ], - [ - "70b-text-q4_0", - "40 GB" - ], - [ - "70b-text-q4_1", - "44 GB" - ], - [ - "70b-text-q4_K_M", - "43 GB" - ], - [ - "70b-text-q4_K_S", - "40 GB" - ], - [ - "70b-text-q5_0", - "49 GB" - ], - [ - "70b-text-q5_1", - "53 GB" - ], - [ - "70b-text-q5_K_M", - "50 GB" - ], - [ - "70b-text-q5_K_S", - "49 GB" - ], - [ - "70b-text-q6_K", - "58 GB" - ], - [ - "70b-text-q8_0", - "75 GB" - ], - [ - "8b-instruct-fp16", - "16 GB" - ], - [ - "8b-instruct-q2_K", - "3.2 GB" - ], - [ - "8b-instruct-q3_K_L", - "4.3 GB" - ], - [ - "8b-instruct-q3_K_M", - "4.0 GB" - ], - [ - "8b-instruct-q3_K_S", - "3.7 GB" - ], - [ - "8b-instruct-q4_0", - "4.7 GB" - ], - [ - "8b-instruct-q4_1", - "5.1 GB" - ], - [ - "8b-instruct-q4_K_M", - "4.9 GB" - ], - [ - "8b-instruct-q4_K_S", - "4.7 GB" - ], - [ - "8b-instruct-q5_0", - "5.6 GB" - ], - [ - "8b-instruct-q5_1", - "6.1 GB" - ], - [ - "8b-instruct-q5_K_M", - "5.7 GB" - ], - [ - "8b-instruct-q5_K_S", - "5.6 GB" - ], - [ - "8b-instruct-q6_K", - "6.6 GB" - ], - [ - "8b-instruct-q8_0", - "8.5 GB" + "4.7\u202fGB" ], [ "8b-text", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "70b-instruct", + "40\u202fGB" + ], + [ + "70b-text", + "40\u202fGB" + ], + [ + "8b-instruct-fp16", + "16\u202fGB" + ], + [ + "8b-instruct-q2_K", + "3.2\u202fGB" + ], + [ + "8b-instruct-q3_K_S", + "3.7\u202fGB" + ], + [ + "8b-instruct-q3_K_M", + "4.0\u202fGB" + ], + [ + "8b-instruct-q3_K_L", + "4.3\u202fGB" + ], + [ + "8b-instruct-q4_0", + "4.7\u202fGB" + ], + [ + "8b-instruct-q4_1", + "5.1\u202fGB" + ], + [ + "8b-instruct-q4_K_S", + "4.7\u202fGB" + ], + [ + "8b-instruct-q4_K_M", + "4.9\u202fGB" + ], + [ + "8b-instruct-q5_0", + "5.6\u202fGB" + ], + [ + "8b-instruct-q5_1", + "6.1\u202fGB" + ], + [ + "8b-instruct-q5_K_S", + "5.6\u202fGB" + ], + [ + "8b-instruct-q5_K_M", + "5.7\u202fGB" + ], + [ + "8b-instruct-q6_K", + "6.6\u202fGB" + ], + [ + "8b-instruct-q8_0", + "8.5\u202fGB" ], [ "8b-text-fp16", - "16 GB" + "16\u202fGB" ], [ "8b-text-q2_K", - "3.2 GB" - ], - [ - "8b-text-q3_K_L", - "4.3 GB" - ], - [ - "8b-text-q3_K_M", - "4.0 GB" + "3.2\u202fGB" ], [ "8b-text-q3_K_S", - "3.7 GB" + "3.7\u202fGB" + ], + [ + "8b-text-q3_K_M", + "4.0\u202fGB" + ], + [ + "8b-text-q3_K_L", + "4.3\u202fGB" ], [ "8b-text-q4_0", - "4.7 GB" + "4.7\u202fGB" ], [ "8b-text-q4_1", - "5.1 GB" - ], - [ - "8b-text-q4_K_M", - "4.9 GB" + "5.1\u202fGB" ], [ "8b-text-q4_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "8b-text-q4_K_M", + "4.9\u202fGB" ], [ "8b-text-q5_0", - "5.6 GB" + "5.6\u202fGB" ], [ "8b-text-q5_1", - "6.1 GB" - ], - [ - "8b-text-q5_K_M", - "5.7 GB" + "6.1\u202fGB" ], [ "8b-text-q5_K_S", - "5.6 GB" + "5.6\u202fGB" + ], + [ + "8b-text-q5_K_M", + "5.7\u202fGB" ], [ "8b-text-q6_K", - "6.6 GB" + "6.6\u202fGB" ], [ "8b-text-q8_0", - "8.5 GB" + "8.5\u202fGB" + ], + [ + "70b-instruct-fp16", + "141\u202fGB" + ], + [ + "70b-instruct-q2_K", + "26\u202fGB" + ], + [ + "70b-instruct-q3_K_S", + "31\u202fGB" + ], + [ + "70b-instruct-q3_K_M", + "34\u202fGB" + ], + [ + "70b-instruct-q3_K_L", + "37\u202fGB" + ], + [ + "70b-instruct-q4_0", + "40\u202fGB" + ], + [ + "70b-instruct-q4_1", + "44\u202fGB" + ], + [ + "70b-instruct-q4_K_S", + "40\u202fGB" + ], + [ + "70b-instruct-q4_K_M", + "43\u202fGB" + ], + [ + "70b-instruct-q5_0", + "49\u202fGB" + ], + [ + "70b-instruct-q5_1", + "53\u202fGB" + ], + [ + "70b-instruct-q5_K_S", + "49\u202fGB" + ], + [ + "70b-instruct-q5_K_M", + "50\u202fGB" + ], + [ + "70b-instruct-q6_K", + "58\u202fGB" + ], + [ + "70b-instruct-q8_0", + "75\u202fGB" + ], + [ + "70b-text-fp16", + "141\u202fGB" + ], + [ + "70b-text-q2_K", + "26\u202fGB" + ], + [ + "70b-text-q3_K_S", + "31\u202fGB" + ], + [ + "70b-text-q3_K_M", + "34\u202fGB" + ], + [ + "70b-text-q3_K_L", + "37\u202fGB" + ], + [ + "70b-text-q4_0", + "40\u202fGB" + ], + [ + "70b-text-q4_1", + "44\u202fGB" + ], + [ + "70b-text-q4_K_S", + "40\u202fGB" + ], + [ + "70b-text-q4_K_M", + "43\u202fGB" + ], + [ + "70b-text-q5_0", + "49\u202fGB" + ], + [ + "70b-text-q5_1", + "53\u202fGB" + ], + [ + "70b-text-q5_K_S", + "49\u202fGB" + ], + [ + "70b-text-q5_K_M", + "50\u202fGB" + ], + [ + "70b-text-q6_K", + "58\u202fGB" + ], + [ + "70b-text-q8_0", + "75\u202fGB" ] ], "image": false, @@ -3233,411 +3713,411 @@ "tags": [ [ "latest", - "5.0 GB" - ], - [ - "7b", - "5.0 GB" + "5.0\u202fGB" ], [ "2b", - "1.7 GB" + "1.7\u202fGB" + ], + [ + "7b", + "5.0\u202fGB" ], [ "instruct", - "5.0 GB" + "5.0\u202fGB" ], [ "text", - "5.2 GB" + "5.2\u202fGB" ], [ "v1.1", - "5.0 GB" - ], - [ - "7b-instruct", - "5.0 GB" - ], - [ - "7b-instruct-v1.1-fp16", - "17 GB" - ], - [ - "7b-instruct-v1.1-q2_K", - "3.5 GB" - ], - [ - "7b-instruct-v1.1-q3_K_L", - "4.7 GB" - ], - [ - "7b-instruct-v1.1-q3_K_M", - "4.4 GB" - ], - [ - "7b-instruct-v1.1-q3_K_S", - "4.0 GB" - ], - [ - "7b-instruct-v1.1-q4_0", - "5.0 GB" - ], - [ - "7b-instruct-v1.1-q4_1", - "5.5 GB" - ], - [ - "7b-instruct-v1.1-q4_K_M", - "5.3 GB" - ], - [ - "7b-instruct-v1.1-q4_K_S", - "5.0 GB" - ], - [ - "7b-instruct-v1.1-q5_0", - "6.0 GB" - ], - [ - "7b-instruct-v1.1-q5_1", - "6.5 GB" - ], - [ - "7b-instruct-v1.1-q5_K_M", - "6.1 GB" - ], - [ - "7b-instruct-v1.1-q5_K_S", - "6.0 GB" - ], - [ - "7b-instruct-v1.1-q6_K", - "7.0 GB" - ], - [ - "7b-instruct-q4_0", - "5.2 GB" - ], - [ - "7b-instruct-q2_K", - "3.7 GB" - ], - [ - "7b-instruct-q3_K_S", - "4.2 GB" - ], - [ - "7b-instruct-fp16", - "17 GB" - ], - [ - "7b-instruct-q3_K_L", - "4.9 GB" - ], - [ - "7b-instruct-v1.1-q8_0", - "9.1 GB" - ], - [ - "7b-instruct-q3_K_M", - "4.6 GB" - ], - [ - "7b-instruct-q4_1", - "5.7 GB" - ], - [ - "7b-instruct-q4_K_M", - "5.5 GB" - ], - [ - "7b-instruct-q4_K_S", - "5.2 GB" - ], - [ - "7b-instruct-q5_0", - "6.2 GB" - ], - [ - "7b-instruct-q5_1", - "6.7 GB" - ], - [ - "7b-instruct-q5_K_M", - "6.3 GB" - ], - [ - "7b-instruct-q5_K_S", - "6.2 GB" - ], - [ - "7b-instruct-q6_K", - "7.2 GB" - ], - [ - "7b-instruct-q8_0", - "9.1 GB" - ], - [ - "7b-text", - "5.2 GB" - ], - [ - "7b-text-fp16", - "16 GB" - ], - [ - "7b-text-q2_K", - "3.7 GB" - ], - [ - "7b-text-q3_K_L", - "4.9 GB" - ], - [ - "7b-text-q3_K_M", - "4.6 GB" - ], - [ - "7b-text-q3_K_S", - "4.2 GB" - ], - [ - "7b-text-q4_0", - "5.2 GB" - ], - [ - "7b-text-q4_1", - "5.7 GB" - ], - [ - "7b-text-q4_K_M", - "5.5 GB" - ], - [ - "7b-text-q4_K_S", - "5.2 GB" - ], - [ - "7b-text-q5_0", - "6.2 GB" - ], - [ - "7b-text-q5_1", - "6.7 GB" - ], - [ - "7b-text-q5_K_M", - "6.3 GB" - ], - [ - "7b-text-q5_K_S", - "6.2 GB" - ], - [ - "7b-text-q6_K", - "7.2 GB" - ], - [ - "7b-text-q8_0", - "9.1 GB" - ], - [ - "7b-v1.1", - "5.0 GB" + "5.0\u202fGB" ], [ "2b-instruct", - "1.6 GB" - ], - [ - "2b-instruct-fp16", - "4.5 GB" - ], - [ - "2b-instruct-q2_K", - "1.3 GB" - ], - [ - "2b-instruct-q3_K_L", - "1.6 GB" - ], - [ - "2b-instruct-q3_K_M", - "1.5 GB" - ], - [ - "2b-instruct-q3_K_S", - "1.4 GB" - ], - [ - "2b-instruct-q4_0", - "1.7 GB" - ], - [ - "2b-instruct-q4_1", - "1.8 GB" - ], - [ - "2b-instruct-q4_K_M", - "1.8 GB" - ], - [ - "2b-instruct-q4_K_S", - "1.7 GB" - ], - [ - "2b-instruct-q5_0", - "1.9 GB" - ], - [ - "2b-instruct-q5_1", - "2.1 GB" - ], - [ - "2b-instruct-q5_K_M", - "2.0 GB" - ], - [ - "2b-instruct-q5_K_S", - "1.9 GB" - ], - [ - "2b-instruct-q6_K", - "2.2 GB" - ], - [ - "2b-instruct-q8_0", - "2.7 GB" - ], - [ - "2b-instruct-v1.1-q5_K_M", - "1.8 GB" - ], - [ - "2b-instruct-v1.1-q5_1", - "1.9 GB" - ], - [ - "2b-instruct-v1.1-q2_K", - "1.2 GB" - ], - [ - "2b-instruct-v1.1-q3_K_M", - "1.4 GB" - ], - [ - "2b-instruct-v1.1-q4_0", - "1.6 GB" - ], - [ - "2b-instruct-v1.1-q3_K_L", - "1.5 GB" - ], - [ - "2b-instruct-v1.1-q3_K_S", - "1.3 GB" - ], - [ - "2b-instruct-v1.1-q4_K_S", - "1.6 GB" - ], - [ - "2b-instruct-v1.1-fp16", - "5.0 GB" - ], - [ - "2b-instruct-v1.1-q4_K_M", - "1.6 GB" - ], - [ - "2b-instruct-v1.1-q5_0", - "1.8 GB" - ], - [ - "2b-instruct-v1.1-q4_1", - "1.7 GB" - ], - [ - "2b-instruct-v1.1-q5_K_S", - "1.8 GB" - ], - [ - "2b-instruct-v1.1-q6_K", - "2.1 GB" - ], - [ - "2b-instruct-v1.1-q8_0", - "2.7 GB" + "1.6\u202fGB" ], [ "2b-text", - "1.7 GB" - ], - [ - "2b-text-fp16", - "4.5 GB" - ], - [ - "2b-text-q2_K", - "1.3 GB" - ], - [ - "2b-text-q3_K_L", - "1.6 GB" - ], - [ - "2b-text-q3_K_M", - "1.5 GB" - ], - [ - "2b-text-q3_K_S", - "1.4 GB" - ], - [ - "2b-text-q4_0", - "1.7 GB" - ], - [ - "2b-text-q4_1", - "1.8 GB" - ], - [ - "2b-text-q4_K_M", - "1.8 GB" - ], - [ - "2b-text-q4_K_S", - "1.7 GB" - ], - [ - "2b-text-q5_0", - "1.9 GB" - ], - [ - "2b-text-q5_1", - "2.1 GB" - ], - [ - "2b-text-q5_K_M", - "2.0 GB" - ], - [ - "2b-text-q5_K_S", - "1.9 GB" - ], - [ - "2b-text-q6_K", - "2.2 GB" - ], - [ - "2b-text-q8_0", - "2.7 GB" + "1.7\u202fGB" ], [ "2b-v1.1", - "1.6 GB" + "1.6\u202fGB" + ], + [ + "7b-instruct", + "5.0\u202fGB" + ], + [ + "7b-text", + "5.2\u202fGB" + ], + [ + "7b-v1.1", + "5.0\u202fGB" + ], + [ + "2b-instruct-fp16", + "4.5\u202fGB" + ], + [ + "2b-instruct-q2_K", + "1.3\u202fGB" + ], + [ + "2b-instruct-q3_K_S", + "1.4\u202fGB" + ], + [ + "2b-instruct-q3_K_M", + "1.5\u202fGB" + ], + [ + "2b-instruct-q3_K_L", + "1.6\u202fGB" + ], + [ + "2b-instruct-q4_0", + "1.7\u202fGB" + ], + [ + "2b-instruct-q4_1", + "1.8\u202fGB" + ], + [ + "2b-instruct-q4_K_S", + "1.7\u202fGB" + ], + [ + "2b-instruct-q4_K_M", + "1.8\u202fGB" + ], + [ + "2b-instruct-q5_0", + "1.9\u202fGB" + ], + [ + "2b-instruct-q5_1", + "2.1\u202fGB" + ], + [ + "2b-instruct-q5_K_S", + "1.9\u202fGB" + ], + [ + "2b-instruct-q5_K_M", + "2.0\u202fGB" + ], + [ + "2b-instruct-q6_K", + "2.2\u202fGB" + ], + [ + "2b-instruct-q8_0", + "2.7\u202fGB" + ], + [ + "2b-instruct-v1.1-fp16", + "5.0\u202fGB" + ], + [ + "2b-instruct-v1.1-q2_K", + "1.2\u202fGB" + ], + [ + "2b-instruct-v1.1-q3_K_S", + "1.3\u202fGB" + ], + [ + "2b-instruct-v1.1-q3_K_M", + "1.4\u202fGB" + ], + [ + "2b-instruct-v1.1-q3_K_L", + "1.5\u202fGB" + ], + [ + "2b-instruct-v1.1-q4_0", + "1.6\u202fGB" + ], + [ + "2b-instruct-v1.1-q4_1", + "1.7\u202fGB" + ], + [ + "2b-instruct-v1.1-q4_K_S", + "1.6\u202fGB" + ], + [ + "2b-instruct-v1.1-q4_K_M", + "1.6\u202fGB" + ], + [ + "2b-instruct-v1.1-q5_0", + "1.8\u202fGB" + ], + [ + "2b-instruct-v1.1-q5_1", + "1.9\u202fGB" + ], + [ + "2b-instruct-v1.1-q5_K_S", + "1.8\u202fGB" + ], + [ + "2b-instruct-v1.1-q5_K_M", + "1.8\u202fGB" + ], + [ + "2b-instruct-v1.1-q6_K", + "2.1\u202fGB" + ], + [ + "2b-instruct-v1.1-q8_0", + "2.7\u202fGB" + ], + [ + "2b-text-fp16", + "4.5\u202fGB" + ], + [ + "2b-text-q2_K", + "1.3\u202fGB" + ], + [ + "2b-text-q3_K_S", + "1.4\u202fGB" + ], + [ + "2b-text-q3_K_M", + "1.5\u202fGB" + ], + [ + "2b-text-q3_K_L", + "1.6\u202fGB" + ], + [ + "2b-text-q4_0", + "1.7\u202fGB" + ], + [ + "2b-text-q4_1", + "1.8\u202fGB" + ], + [ + "2b-text-q4_K_S", + "1.7\u202fGB" + ], + [ + "2b-text-q4_K_M", + "1.8\u202fGB" + ], + [ + "2b-text-q5_0", + "1.9\u202fGB" + ], + [ + "2b-text-q5_1", + "2.1\u202fGB" + ], + [ + "2b-text-q5_K_S", + "1.9\u202fGB" + ], + [ + "2b-text-q5_K_M", + "2.0\u202fGB" + ], + [ + "2b-text-q6_K", + "2.2\u202fGB" + ], + [ + "2b-text-q8_0", + "2.7\u202fGB" + ], + [ + "7b-instruct-fp16", + "17\u202fGB" + ], + [ + "7b-instruct-q2_K", + "3.7\u202fGB" + ], + [ + "7b-instruct-q3_K_S", + "4.2\u202fGB" + ], + [ + "7b-instruct-q3_K_M", + "4.6\u202fGB" + ], + [ + "7b-instruct-q3_K_L", + "4.9\u202fGB" + ], + [ + "7b-instruct-q4_0", + "5.2\u202fGB" + ], + [ + "7b-instruct-q4_1", + "5.7\u202fGB" + ], + [ + "7b-instruct-q4_K_S", + "5.2\u202fGB" + ], + [ + "7b-instruct-q4_K_M", + "5.5\u202fGB" + ], + [ + "7b-instruct-q5_0", + "6.2\u202fGB" + ], + [ + "7b-instruct-q5_1", + "6.7\u202fGB" + ], + [ + "7b-instruct-q5_K_S", + "6.2\u202fGB" + ], + [ + "7b-instruct-q5_K_M", + "6.3\u202fGB" + ], + [ + "7b-instruct-q6_K", + "7.2\u202fGB" + ], + [ + "7b-instruct-q8_0", + "9.1\u202fGB" + ], + [ + "7b-instruct-v1.1-fp16", + "17\u202fGB" + ], + [ + "7b-instruct-v1.1-q2_K", + "3.5\u202fGB" + ], + [ + "7b-instruct-v1.1-q3_K_S", + "4.0\u202fGB" + ], + [ + "7b-instruct-v1.1-q3_K_M", + "4.4\u202fGB" + ], + [ + "7b-instruct-v1.1-q3_K_L", + "4.7\u202fGB" + ], + [ + "7b-instruct-v1.1-q4_0", + "5.0\u202fGB" + ], + [ + "7b-instruct-v1.1-q4_1", + "5.5\u202fGB" + ], + [ + "7b-instruct-v1.1-q4_K_S", + "5.0\u202fGB" + ], + [ + "7b-instruct-v1.1-q4_K_M", + "5.3\u202fGB" + ], + [ + "7b-instruct-v1.1-q5_0", + "6.0\u202fGB" + ], + [ + "7b-instruct-v1.1-q5_1", + "6.5\u202fGB" + ], + [ + "7b-instruct-v1.1-q5_K_S", + "6.0\u202fGB" + ], + [ + "7b-instruct-v1.1-q5_K_M", + "6.1\u202fGB" + ], + [ + "7b-instruct-v1.1-q6_K", + "7.0\u202fGB" + ], + [ + "7b-instruct-v1.1-q8_0", + "9.1\u202fGB" + ], + [ + "7b-text-fp16", + "16\u202fGB" + ], + [ + "7b-text-q2_K", + "3.7\u202fGB" + ], + [ + "7b-text-q3_K_S", + "4.2\u202fGB" + ], + [ + "7b-text-q3_K_M", + "4.6\u202fGB" + ], + [ + "7b-text-q3_K_L", + "4.9\u202fGB" + ], + [ + "7b-text-q4_0", + "5.2\u202fGB" + ], + [ + "7b-text-q4_1", + "5.7\u202fGB" + ], + [ + "7b-text-q4_K_S", + "5.2\u202fGB" + ], + [ + "7b-text-q4_K_M", + "5.5\u202fGB" + ], + [ + "7b-text-q5_0", + "6.2\u202fGB" + ], + [ + "7b-text-q5_1", + "6.7\u202fGB" + ], + [ + "7b-text-q5_K_S", + "6.2\u202fGB" + ], + [ + "7b-text-q5_K_M", + "6.3\u202fGB" + ], + [ + "7b-text-q6_K", + "7.2\u202fGB" + ], + [ + "7b-text-q8_0", + "9.1\u202fGB" ] ], "image": false, @@ -3649,1519 +4129,1519 @@ "tags": [ [ "latest", - "2.3 GB" - ], - [ - "110b", - "63 GB" - ], - [ - "72b", - "41 GB" - ], - [ - "32b", - "18 GB" - ], - [ - "14b", - "8.2 GB" - ], - [ - "7b", - "4.5 GB" - ], - [ - "4b", - "2.3 GB" - ], - [ - "1.8b", - "1.1 GB" + "2.3\u202fGB" ], [ "0.5b", - "395 MB" + "395\u202fMB" ], [ - "110b-chat", - "63 GB" + "1.8b", + "1.1\u202fGB" ], [ - "110b-chat-v1.5-fp16", - "222 GB" + "4b", + "2.3\u202fGB" ], [ - "110b-chat-v1.5-q2_K", - "41 GB" + "7b", + "4.5\u202fGB" ], [ - "110b-chat-v1.5-q3_K_L", - "58 GB" + "14b", + "8.2\u202fGB" ], [ - "110b-chat-v1.5-q3_K_M", - "54 GB" + "32b", + "18\u202fGB" ], [ - "110b-chat-v1.5-q3_K_S", - "48 GB" + "72b", + "41\u202fGB" ], [ - "110b-chat-v1.5-q4_0", - "63 GB" - ], - [ - "110b-chat-v1.5-q4_1", - "70 GB" - ], - [ - "110b-chat-v1.5-q4_K_M", - "67 GB" - ], - [ - "110b-chat-v1.5-q4_K_S", - "63 GB" - ], - [ - "110b-chat-v1.5-q5_0", - "77 GB" - ], - [ - "110b-chat-v1.5-q5_1", - "84 GB" - ], - [ - "110b-chat-v1.5-q5_K_M", - "79 GB" - ], - [ - "110b-chat-v1.5-q5_K_S", - "77 GB" - ], - [ - "110b-chat-v1.5-q6_K", - "91 GB" - ], - [ - "110b-chat-v1.5-q8_0", - "118 GB" - ], - [ - "110b-text-v1.5-fp16", - "222 GB" - ], - [ - "110b-text-v1.5-q2_K", - "41 GB" - ], - [ - "110b-text-v1.5-q3_K_L", - "58 GB" - ], - [ - "110b-text-v1.5-q3_K_M", - "54 GB" - ], - [ - "110b-text-v1.5-q3_K_S", - "48 GB" - ], - [ - "110b-text-v1.5-q4_0", - "63 GB" - ], - [ - "110b-text-v1.5-q4_1", - "70 GB" - ], - [ - "110b-text-v1.5-q4_K_M", - "67 GB" - ], - [ - "110b-text-v1.5-q4_K_S", - "63 GB" - ], - [ - "110b-text-v1.5-q5_0", - "77 GB" - ], - [ - "110b-text-v1.5-q5_1", - "84 GB" - ], - [ - "110b-text-v1.5-q5_K_M", - "79 GB" - ], - [ - "110b-text-v1.5-q5_K_S", - "77 GB" - ], - [ - "110b-text-v1.5-q6_K", - "91 GB" - ], - [ - "110b-text-v1.5-q8_0", - "118 GB" - ], - [ - "72b-chat", - "41 GB" - ], - [ - "72b-chat-v1.5-fp16", - "145 GB" - ], - [ - "72b-chat-v1.5-q2_K", - "28 GB" - ], - [ - "72b-chat-v1.5-q3_K_L", - "38 GB" - ], - [ - "72b-chat-v1.5-q3_K_M", - "36 GB" - ], - [ - "72b-chat-v1.5-q3_K_S", - "33 GB" - ], - [ - "72b-chat-v1.5-q4_0", - "41 GB" - ], - [ - "72b-chat-v1.5-q4_1", - "45 GB" - ], - [ - "72b-chat-v1.5-q4_K_M", - "44 GB" - ], - [ - "72b-chat-v1.5-q4_K_S", - "42 GB" - ], - [ - "72b-chat-q4_K_M", - "45 GB" - ], - [ - "72b-chat-v1.5-q6_K", - "59 GB" - ], - [ - "72b-chat-v1.5-q5_K_S", - "50 GB" - ], - [ - "72b-chat-q4_0", - "41 GB" - ], - [ - "72b-chat-q3_K_S", - "32 GB" - ], - [ - "72b-chat-q3_K_L", - "39 GB" - ], - [ - "72b-chat-q3_K_M", - "37 GB" - ], - [ - "72b-chat-v1.5-q5_0", - "50 GB" - ], - [ - "72b-chat-fp16", - "145 GB" - ], - [ - "72b-chat-v1.5-q5_1", - "54 GB" - ], - [ - "72b-chat-q2_K", - "27 GB" - ], - [ - "72b-chat-v1.5-q8_0", - "77 GB" - ], - [ - "72b-chat-q4_1", - "45 GB" - ], - [ - "72b-chat-v1.5-q5_K_M", - "51 GB" - ], - [ - "72b-chat-q4_K_S", - "41 GB" - ], - [ - "72b-chat-q5_0", - "50 GB" - ], - [ - "72b-chat-q5_1", - "54 GB" - ], - [ - "72b-chat-q5_K_M", - "53 GB" - ], - [ - "72b-chat-q5_K_S", - "50 GB" - ], - [ - "72b-chat-q6_K", - "59 GB" - ], - [ - "72b-chat-q8_0", - "77 GB" - ], - [ - "72b-text", - "63 GB" - ], - [ - "72b-text-fp16", - "145 GB" - ], - [ - "72b-text-q2_K", - "27 GB" - ], - [ - "72b-text-q3_K_L", - "39 GB" - ], - [ - "72b-text-q3_K_M", - "37 GB" - ], - [ - "72b-text-q3_K_S", - "32 GB" - ], - [ - "72b-text-q4_0", - "41 GB" - ], - [ - "72b-text-q4_1", - "45 GB" - ], - [ - "72b-text-q4_K_M", - "45 GB" - ], - [ - "72b-text-q4_K_S", - "41 GB" - ], - [ - "72b-text-q5_0", - "50 GB" - ], - [ - "72b-text-q5_1", - "54 GB" - ], - [ - "72b-text-q5_K_M", - "53 GB" - ], - [ - "72b-text-q5_K_S", - "50 GB" - ], - [ - "72b-text-v1.5-q5_1", - "54 GB" - ], - [ - "72b-text-v1.5-q4_K_M", - "44 GB" - ], - [ - "72b-text-v1.5-q5_0", - "50 GB" - ], - [ - "72b-text-q6_K", - "59 GB" - ], - [ - "72b-text-v1.5-q2_K", - "28 GB" - ], - [ - "72b-text-v1.5-q3_K_S", - "33 GB" - ], - [ - "72b-text-v1.5-q3_K_M", - "36 GB" - ], - [ - "72b-text-v1.5-q4_1", - "45 GB" - ], - [ - "72b-text-v1.5-q4_0", - "41 GB" - ], - [ - "72b-text-v1.5-q4_K_S", - "42 GB" - ], - [ - "72b-text-v1.5-fp16", - "145 GB" - ], - [ - "72b-text-v1.5-q3_K_L", - "38 GB" - ], - [ - "72b-text-q8_0", - "77 GB" - ], - [ - "72b-text-v1.5-q5_K_M", - "51 GB" - ], - [ - "72b-text-v1.5-q5_K_S", - "50 GB" - ], - [ - "72b-text-v1.5-q6_K", - "59 GB" - ], - [ - "72b-text-v1.5-q8_0", - "77 GB" - ], - [ - "32b-chat", - "18 GB" - ], - [ - "32b-chat-v1.5-fp16", - "65 GB" - ], - [ - "32b-chat-v1.5-q2_K", - "12 GB" - ], - [ - "32b-chat-v1.5-q3_K_L", - "17 GB" - ], - [ - "32b-chat-v1.5-q3_K_M", - "16 GB" - ], - [ - "32b-chat-v1.5-q3_K_S", - "14 GB" - ], - [ - "32b-chat-v1.5-q4_0", - "18 GB" - ], - [ - "32b-chat-v1.5-q4_1", - "20 GB" - ], - [ - "32b-chat-v1.5-q4_K_M", - "20 GB" - ], - [ - "32b-chat-v1.5-q4_K_S", - "19 GB" - ], - [ - "32b-chat-v1.5-q5_0", - "22 GB" - ], - [ - "32b-chat-v1.5-q5_1", - "24 GB" - ], - [ - "32b-chat-v1.5-q5_K_M", - "23 GB" - ], - [ - "32b-chat-v1.5-q5_K_S", - "22 GB" - ], - [ - "32b-chat-v1.5-q6_K", - "27 GB" - ], - [ - "32b-chat-v1.5-q8_0", - "35 GB" - ], - [ - "32b-text", - "18 GB" - ], - [ - "32b-text-v1.5-q2_K", - "12 GB" - ], - [ - "32b-text-v1.5-q3_K_L", - "17 GB" - ], - [ - "32b-text-v1.5-q3_K_M", - "16 GB" - ], - [ - "32b-text-v1.5-q3_K_S", - "14 GB" - ], - [ - "32b-text-v1.5-q4_0", - "18 GB" - ], - [ - "32b-text-v1.5-q4_1", - "20 GB" - ], - [ - "32b-text-v1.5-q4_K_S", - "19 GB" - ], - [ - "32b-text-v1.5-q5_0", - "22 GB" - ], - [ - "32b-text-v1.5-q5_1", - "24 GB" - ], - [ - "32b-text-v1.5-q8_0", - "35 GB" - ], - [ - "14b-chat", - "8.2 GB" - ], - [ - "14b-chat-fp16", - "28 GB" - ], - [ - "14b-chat-q2_K", - "6.0 GB" - ], - [ - "14b-chat-q3_K_L", - "8.0 GB" - ], - [ - "14b-chat-q3_K_M", - "7.7 GB" - ], - [ - "14b-chat-q3_K_S", - "6.9 GB" - ], - [ - "14b-chat-q4_0", - "8.2 GB" - ], - [ - "14b-chat-q4_1", - "9.0 GB" - ], - [ - "14b-chat-q4_K_M", - "9.4 GB" - ], - [ - "14b-chat-q4_K_S", - "8.6 GB" - ], - [ - "14b-chat-q5_0", - "9.9 GB" - ], - [ - "14b-chat-q5_1", - "11 GB" - ], - [ - "14b-chat-v1.5-q2_K", - "6.1 GB" - ], - [ - "14b-chat-q8_0", - "15 GB" - ], - [ - "14b-chat-q5_K_M", - "11 GB" - ], - [ - "14b-chat-v1.5-fp16", - "28 GB" - ], - [ - "14b-chat-q5_K_S", - "10 GB" - ], - [ - "14b-chat-q6_K", - "12 GB" - ], - [ - "14b-chat-v1.5-q5_K_S", - "10 GB" - ], - [ - "14b-chat-v1.5-q4_1", - "9.0 GB" - ], - [ - "14b-chat-v1.5-q5_1", - "11 GB" - ], - [ - "14b-chat-v1.5-q5_K_M", - "11 GB" - ], - [ - "14b-chat-v1.5-q4_0", - "8.2 GB" - ], - [ - "14b-chat-v1.5-q3_K_S", - "6.9 GB" - ], - [ - "14b-chat-v1.5-q5_0", - "9.9 GB" - ], - [ - "14b-chat-v1.5-q3_K_L", - "7.8 GB" - ], - [ - "14b-chat-v1.5-q3_K_M", - "7.4 GB" - ], - [ - "14b-chat-v1.5-q4_K_S", - "8.6 GB" - ], - [ - "14b-chat-v1.5-q4_K_M", - "9.2 GB" - ], - [ - "14b-chat-v1.5-q6_K", - "12 GB" - ], - [ - "14b-chat-v1.5-q8_0", - "15 GB" - ], - [ - "14b-text", - "8.2 GB" - ], - [ - "14b-text-fp16", - "28 GB" - ], - [ - "14b-text-q2_K", - "6.0 GB" - ], - [ - "14b-text-q3_K_L", - "8.0 GB" - ], - [ - "14b-text-q3_K_M", - "7.7 GB" - ], - [ - "14b-text-q3_K_S", - "6.9 GB" - ], - [ - "14b-text-q4_0", - "8.2 GB" - ], - [ - "14b-text-q4_1", - "9.0 GB" - ], - [ - "14b-text-q4_K_M", - "9.4 GB" - ], - [ - "14b-text-q4_K_S", - "8.6 GB" - ], - [ - "14b-text-q5_0", - "9.9 GB" - ], - [ - "14b-text-q5_1", - "11 GB" - ], - [ - "14b-text-q5_K_M", - "11 GB" - ], - [ - "14b-text-q5_K_S", - "10 GB" - ], - [ - "14b-text-q6_K", - "12 GB" - ], - [ - "14b-text-v1.5-q5_K_M", - "11 GB" - ], - [ - "14b-text-v1.5-q4_K_S", - "8.6 GB" - ], - [ - "14b-text-v1.5-q4_1", - "9.0 GB" - ], - [ - "14b-text-v1.5-q5_1", - "11 GB" - ], - [ - "14b-text-q8_0", - "15 GB" - ], - [ - "14b-text-v1.5-fp16", - "28 GB" - ], - [ - "14b-text-v1.5-q3_K_L", - "7.8 GB" - ], - [ - "14b-text-v1.5-q3_K_S", - "6.9 GB" - ], - [ - "14b-text-v1.5-q4_0", - "8.2 GB" - ], - [ - "14b-text-v1.5-q2_K", - "6.1 GB" - ], - [ - "14b-text-v1.5-q5_0", - "9.9 GB" - ], - [ - "14b-text-v1.5-q3_K_M", - "7.4 GB" - ], - [ - "14b-text-v1.5-q4_K_M", - "9.2 GB" - ], - [ - "14b-text-v1.5-q5_K_S", - "10 GB" - ], - [ - "14b-text-v1.5-q6_K", - "12 GB" - ], - [ - "14b-text-v1.5-q8_0", - "15 GB" - ], - [ - "7b-chat", - "4.5 GB" - ], - [ - "7b-chat-fp16", - "15 GB" - ], - [ - "7b-chat-q2_K", - "3.0 GB" - ], - [ - "7b-chat-q3_K_L", - "4.3 GB" - ], - [ - "7b-chat-q3_K_M", - "4.1 GB" - ], - [ - "7b-chat-q3_K_S", - "3.6 GB" - ], - [ - "7b-chat-q4_0", - "4.5 GB" - ], - [ - "7b-chat-q4_1", - "5.0 GB" - ], - [ - "7b-chat-q4_K_M", - "4.9 GB" - ], - [ - "7b-chat-q4_K_S", - "4.5 GB" - ], - [ - "7b-chat-q5_0", - "5.4 GB" - ], - [ - "7b-chat-q5_1", - "5.8 GB" - ], - [ - "7b-chat-q5_K_M", - "5.7 GB" - ], - [ - "7b-chat-v1.5-q3_K_L", - "4.2 GB" - ], - [ - "7b-chat-q8_0", - "8.2 GB" - ], - [ - "7b-chat-v1.5-fp16", - "15 GB" - ], - [ - "7b-chat-v1.5-q2_K", - "3.1 GB" - ], - [ - "7b-chat-q5_K_S", - "5.4 GB" - ], - [ - "7b-chat-q6_K", - "6.3 GB" - ], - [ - "7b-chat-v1.5-q5_1", - "5.8 GB" - ], - [ - "7b-chat-v1.5-q3_K_S", - "3.6 GB" - ], - [ - "7b-chat-v1.5-q4_K_S", - "4.5 GB" - ], - [ - "7b-chat-v1.5-q4_0", - "4.5 GB" - ], - [ - "7b-chat-v1.5-q4_K_M", - "4.8 GB" - ], - [ - "7b-chat-v1.5-q4_1", - "5.0 GB" - ], - [ - "7b-chat-v1.5-q3_K_M", - "3.9 GB" - ], - [ - "7b-chat-v1.5-q5_0", - "5.4 GB" - ], - [ - "7b-chat-v1.5-q5_K_M", - "5.5 GB" - ], - [ - "7b-chat-v1.5-q5_K_S", - "5.4 GB" - ], - [ - "7b-chat-v1.5-q6_K", - "6.3 GB" - ], - [ - "7b-chat-v1.5-q8_0", - "8.2 GB" - ], - [ - "7b-text", - "4.5 GB" - ], - [ - "7b-text-v1.5-fp16", - "15 GB" - ], - [ - "7b-text-v1.5-q2_K", - "3.1 GB" - ], - [ - "7b-text-v1.5-q3_K_L", - "4.2 GB" - ], - [ - "7b-text-v1.5-q3_K_M", - "3.9 GB" - ], - [ - "7b-text-v1.5-q3_K_S", - "3.6 GB" - ], - [ - "7b-text-v1.5-q4_0", - "4.5 GB" - ], - [ - "7b-text-v1.5-q4_1", - "5.0 GB" - ], - [ - "7b-text-v1.5-q4_K_M", - "4.8 GB" - ], - [ - "7b-text-v1.5-q4_K_S", - "4.5 GB" - ], - [ - "7b-text-v1.5-q5_0", - "5.4 GB" - ], - [ - "7b-text-v1.5-q5_1", - "5.8 GB" - ], - [ - "7b-text-v1.5-q5_K_M", - "5.5 GB" - ], - [ - "7b-text-v1.5-q5_K_S", - "5.4 GB" - ], - [ - "7b-text-v1.5-q6_K", - "6.3 GB" - ], - [ - "7b-text-v1.5-q8_0", - "8.2 GB" - ], - [ - "4b-chat", - "2.3 GB" - ], - [ - "4b-text", - "2.3 GB" - ], - [ - "1.8b-chat", - "1.1 GB" - ], - [ - "7b-fp16", - "15 GB" - ], - [ - "7b-q2_K", - "3.0 GB" - ], - [ - "7b-q3_K_L", - "4.3 GB" - ], - [ - "7b-q3_K_M", - "4.1 GB" - ], - [ - "7b-q3_K_S", - "3.6 GB" - ], - [ - "7b-q4_0", - "4.5 GB" - ], - [ - "7b-q4_1", - "5.0 GB" - ], - [ - "7b-q4_K_M", - "4.9 GB" - ], - [ - "7b-q4_K_S", - "4.5 GB" - ], - [ - "7b-q5_0", - "5.4 GB" - ], - [ - "7b-q5_1", - "5.8 GB" - ], - [ - "7b-q5_K_M", - "5.7 GB" - ], - [ - "7b-q5_K_S", - "5.4 GB" - ], - [ - "7b-q6_K", - "6.3 GB" - ], - [ - "7b-q8_0", - "8.2 GB" - ], - [ - "4b-chat-v1.5-fp16", - "7.9 GB" - ], - [ - "4b-chat-v1.5-q2_K", - "1.6 GB" - ], - [ - "4b-chat-v1.5-q3_K_L", - "2.2 GB" - ], - [ - "4b-chat-v1.5-q3_K_M", - "2.0 GB" - ], - [ - "4b-chat-v1.5-q3_K_S", - "1.9 GB" - ], - [ - "4b-chat-v1.5-q4_0", - "2.3 GB" - ], - [ - "4b-chat-v1.5-q4_1", - "2.6 GB" - ], - [ - "4b-chat-v1.5-q4_K_M", - "2.5 GB" - ], - [ - "4b-chat-v1.5-q4_K_S", - "2.3 GB" - ], - [ - "4b-chat-v1.5-q5_0", - "2.8 GB" - ], - [ - "4b-chat-v1.5-q5_1", - "3.0 GB" - ], - [ - "4b-chat-v1.5-q5_K_M", - "2.8 GB" - ], - [ - "4b-chat-v1.5-q5_K_S", - "2.8 GB" - ], - [ - "4b-chat-v1.5-q6_K", - "3.2 GB" - ], - [ - "4b-chat-v1.5-q8_0", - "4.2 GB" - ], - [ - "4b-text-v1.5-fp16", - "7.9 GB" - ], - [ - "4b-text-v1.5-q2_K", - "1.6 GB" - ], - [ - "4b-text-v1.5-q3_K_L", - "2.2 GB" - ], - [ - "4b-text-v1.5-q3_K_M", - "2.0 GB" - ], - [ - "4b-text-v1.5-q3_K_S", - "1.9 GB" - ], - [ - "4b-text-v1.5-q4_0", - "2.3 GB" - ], - [ - "4b-text-v1.5-q4_1", - "2.6 GB" - ], - [ - "4b-text-v1.5-q4_K_M", - "2.5 GB" - ], - [ - "4b-text-v1.5-q4_K_S", - "2.3 GB" - ], - [ - "4b-text-v1.5-q5_0", - "2.8 GB" - ], - [ - "4b-text-v1.5-q5_1", - "3.0 GB" - ], - [ - "4b-text-v1.5-q5_K_M", - "2.8 GB" - ], - [ - "4b-text-v1.5-q5_K_S", - "2.8 GB" - ], - [ - "4b-text-v1.5-q6_K", - "3.2 GB" - ], - [ - "4b-text-v1.5-q8_0", - "4.2 GB" - ], - [ - "1.8b-chat-v1.5-fp16", - "3.7 GB" - ], - [ - "1.8b-chat-v1.5-q2_K", - "863 MB" - ], - [ - "1.8b-chat-v1.5-q3_K_L", - "1.1 GB" - ], - [ - "1.8b-chat-v1.5-q3_K_M", - "1.0 GB" - ], - [ - "1.8b-chat-v1.5-q3_K_S", - "970 MB" - ], - [ - "1.8b-chat-v1.5-q4_0", - "1.1 GB" - ], - [ - "1.8b-chat-v1.5-q4_1", - "1.2 GB" - ], - [ - "1.8b-chat-v1.5-q4_K_M", - "1.2 GB" - ], - [ - "1.8b-chat-v1.5-q4_K_S", - "1.2 GB" - ], - [ - "1.8b-chat-v1.5-q5_0", - "1.3 GB" - ], - [ - "1.8b-chat-v1.5-q5_1", - "1.4 GB" - ], - [ - "1.8b-chat-v1.5-q5_K_M", - "1.4 GB" - ], - [ - "1.8b-chat-v1.5-q5_K_S", - "1.3 GB" - ], - [ - "1.8b-chat-v1.5-q6_K", - "1.6 GB" - ], - [ - "1.8b-chat-q6_K", - "1.6 GB" - ], - [ - "1.8b-chat-q2_K", - "853 MB" - ], - [ - "1.8b-chat-v1.5-q8_0", - "2.0 GB" - ], - [ - "1.8b-chat-q3_K_L", - "1.1 GB" - ], - [ - "1.8b-chat-q5_0", - "1.3 GB" - ], - [ - "1.8b-chat-q4_K_S", - "1.2 GB" - ], - [ - "1.8b-chat-q5_K_M", - "1.4 GB" - ], - [ - "1.8b-chat-q4_K_M", - "1.2 GB" - ], - [ - "1.8b-chat-q5_K_S", - "1.3 GB" - ], - [ - "1.8b-chat-fp16", - "3.7 GB" - ], - [ - "1.8b-chat-q4_1", - "1.2 GB" - ], - [ - "1.8b-chat-q5_1", - "1.4 GB" - ], - [ - "1.8b-chat-q3_K_M", - "1.0 GB" - ], - [ - "1.8b-chat-q4_0", - "1.1 GB" - ], - [ - "1.8b-chat-q3_K_S", - "970 MB" - ], - [ - "1.8b-chat-q8_0", - "2.0 GB" - ], - [ - "1.8b-text", - "1.1 GB" - ], - [ - "1.8b-text-v1.5-fp16", - "3.7 GB" - ], - [ - "1.8b-text-v1.5-q2_K", - "863 MB" - ], - [ - "1.8b-text-q5_1", - "1.4 GB" - ], - [ - "1.8b-text-v1.5-q4_0", - "1.1 GB" - ], - [ - "1.8b-text-v1.5-q8_0", - "2.0 GB" - ], - [ - "1.8b-text-q4_0", - "1.1 GB" - ], - [ - "1.8b-text-v1.5-q6_K", - "1.6 GB" - ], - [ - "1.8b-text-v1.5-q5_1", - "1.4 GB" - ], - [ - "1.8b-text-q4_K_M", - "1.2 GB" - ], - [ - "1.8b-text-q3_K_S", - "970 MB" - ], - [ - "1.8b-text-q4_K_S", - "1.2 GB" - ], - [ - "1.8b-text-q2_K", - "853 MB" - ], - [ - "1.8b-text-v1.5-q3_K_L", - "1.1 GB" - ], - [ - "1.8b-text-q4_1", - "1.2 GB" - ], - [ - "1.8b-text-v1.5-q3_K_M", - "1.0 GB" - ], - [ - "1.8b-text-v1.5-q4_1", - "1.2 GB" - ], - [ - "1.8b-text-v1.5-q4_K_S", - "1.2 GB" - ], - [ - "1.8b-text-q3_K_M", - "1.0 GB" - ], - [ - "1.8b-text-v1.5-q3_K_S", - "970 MB" - ], - [ - "1.8b-text-v1.5-q4_K_M", - "1.2 GB" - ], - [ - "1.8b-text-fp16", - "3.7 GB" - ], - [ - "1.8b-text-v1.5-q5_K_M", - "1.4 GB" - ], - [ - "1.8b-text-q5_0", - "1.3 GB" - ], - [ - "1.8b-text-v1.5-q5_K_S", - "1.3 GB" - ], - [ - "1.8b-text-q3_K_L", - "1.1 GB" - ], - [ - "1.8b-text-v1.5-q5_0", - "1.3 GB" - ], - [ - "1.8b-text-q5_K_M", - "1.4 GB" - ], - [ - "1.8b-text-q5_K_S", - "1.3 GB" - ], - [ - "1.8b-text-q6_K", - "1.6 GB" - ], - [ - "1.8b-text-q8_0", - "2.0 GB" + "110b", + "63\u202fGB" ], [ "0.5b-chat", - "395 MB" - ], - [ - "0.5b-chat-v1.5-fp16", - "1.2 GB" - ], - [ - "0.5b-chat-v1.5-q2_K", - "298 MB" - ], - [ - "0.5b-chat-v1.5-q3_K_L", - "364 MB" - ], - [ - "0.5b-chat-v1.5-q3_K_M", - "350 MB" - ], - [ - "0.5b-chat-v1.5-q3_K_S", - "333 MB" - ], - [ - "0.5b-chat-v1.5-q4_0", - "395 MB" - ], - [ - "0.5b-chat-v1.5-q4_1", - "424 MB" - ], - [ - "0.5b-chat-v1.5-q4_K_M", - "407 MB" - ], - [ - "0.5b-chat-v1.5-q4_K_S", - "397 MB" - ], - [ - "0.5b-chat-v1.5-q5_0", - "453 MB" - ], - [ - "0.5b-chat-v1.5-q5_1", - "482 MB" - ], - [ - "0.5b-chat-v1.5-q5_K_M", - "459 MB" - ], - [ - "0.5b-chat-v1.5-q5_K_S", - "453 MB" - ], - [ - "0.5b-chat-v1.5-q6_K", - "515 MB" - ], - [ - "0.5b-chat-v1.5-q8_0", - "665 MB" + "395\u202fMB" ], [ "0.5b-text", - "395 MB" + "395\u202fMB" + ], + [ + "1.8b-chat", + "1.1\u202fGB" + ], + [ + "1.8b-text", + "1.1\u202fGB" + ], + [ + "4b-chat", + "2.3\u202fGB" + ], + [ + "4b-text", + "2.3\u202fGB" + ], + [ + "7b-chat", + "4.5\u202fGB" + ], + [ + "7b-text", + "4.5\u202fGB" + ], + [ + "14b-chat", + "8.2\u202fGB" + ], + [ + "14b-text", + "8.2\u202fGB" + ], + [ + "32b-chat", + "18\u202fGB" + ], + [ + "32b-text", + "18\u202fGB" + ], + [ + "72b-chat", + "41\u202fGB" + ], + [ + "72b-text", + "63\u202fGB" + ], + [ + "110b-chat", + "63\u202fGB" + ], + [ + "7b-fp16", + "15\u202fGB" + ], + [ + "7b-q2_K", + "3.0\u202fGB" + ], + [ + "7b-q3_K_S", + "3.6\u202fGB" + ], + [ + "7b-q3_K_M", + "4.1\u202fGB" + ], + [ + "7b-q3_K_L", + "4.3\u202fGB" + ], + [ + "7b-q4_0", + "4.5\u202fGB" + ], + [ + "7b-q4_1", + "5.0\u202fGB" + ], + [ + "7b-q4_K_S", + "4.5\u202fGB" + ], + [ + "7b-q4_K_M", + "4.9\u202fGB" + ], + [ + "7b-q5_0", + "5.4\u202fGB" + ], + [ + "7b-q5_1", + "5.8\u202fGB" + ], + [ + "7b-q5_K_S", + "5.4\u202fGB" + ], + [ + "7b-q5_K_M", + "5.7\u202fGB" + ], + [ + "7b-q6_K", + "6.3\u202fGB" + ], + [ + "7b-q8_0", + "8.2\u202fGB" + ], + [ + "0.5b-chat-v1.5-fp16", + "1.2\u202fGB" + ], + [ + "0.5b-chat-v1.5-q2_K", + "298\u202fMB" + ], + [ + "0.5b-chat-v1.5-q3_K_S", + "333\u202fMB" + ], + [ + "0.5b-chat-v1.5-q3_K_M", + "350\u202fMB" + ], + [ + "0.5b-chat-v1.5-q3_K_L", + "364\u202fMB" + ], + [ + "0.5b-chat-v1.5-q4_0", + "395\u202fMB" + ], + [ + "0.5b-chat-v1.5-q4_1", + "424\u202fMB" + ], + [ + "0.5b-chat-v1.5-q4_K_S", + "397\u202fMB" + ], + [ + "0.5b-chat-v1.5-q4_K_M", + "407\u202fMB" + ], + [ + "0.5b-chat-v1.5-q5_0", + "453\u202fMB" + ], + [ + "0.5b-chat-v1.5-q5_1", + "482\u202fMB" + ], + [ + "0.5b-chat-v1.5-q5_K_S", + "453\u202fMB" + ], + [ + "0.5b-chat-v1.5-q5_K_M", + "459\u202fMB" + ], + [ + "0.5b-chat-v1.5-q6_K", + "515\u202fMB" + ], + [ + "0.5b-chat-v1.5-q8_0", + "665\u202fMB" ], [ "0.5b-text-v1.5-fp16", - "1.2 GB" + "1.2\u202fGB" ], [ "0.5b-text-v1.5-q2_K", - "298 MB" - ], - [ - "0.5b-text-v1.5-q3_K_L", - "364 MB" - ], - [ - "0.5b-text-v1.5-q3_K_M", - "350 MB" + "298\u202fMB" ], [ "0.5b-text-v1.5-q3_K_S", - "333 MB" + "333\u202fMB" + ], + [ + "0.5b-text-v1.5-q3_K_M", + "350\u202fMB" + ], + [ + "0.5b-text-v1.5-q3_K_L", + "364\u202fMB" ], [ "0.5b-text-v1.5-q4_0", - "395 MB" + "395\u202fMB" ], [ "0.5b-text-v1.5-q4_1", - "424 MB" - ], - [ - "0.5b-text-v1.5-q4_K_M", - "407 MB" + "424\u202fMB" ], [ "0.5b-text-v1.5-q4_K_S", - "397 MB" + "397\u202fMB" + ], + [ + "0.5b-text-v1.5-q4_K_M", + "407\u202fMB" ], [ "0.5b-text-v1.5-q5_0", - "453 MB" + "453\u202fMB" ], [ "0.5b-text-v1.5-q5_1", - "482 MB" - ], - [ - "0.5b-text-v1.5-q5_K_M", - "459 MB" + "482\u202fMB" ], [ "0.5b-text-v1.5-q5_K_S", - "453 MB" + "453\u202fMB" + ], + [ + "0.5b-text-v1.5-q5_K_M", + "459\u202fMB" ], [ "0.5b-text-v1.5-q6_K", - "515 MB" + "515\u202fMB" ], [ "0.5b-text-v1.5-q8_0", - "665 MB" + "665\u202fMB" + ], + [ + "1.8b-chat-fp16", + "3.7\u202fGB" + ], + [ + "1.8b-chat-q2_K", + "853\u202fMB" + ], + [ + "1.8b-chat-q3_K_S", + "970\u202fMB" + ], + [ + "1.8b-chat-q3_K_M", + "1.0\u202fGB" + ], + [ + "1.8b-chat-q3_K_L", + "1.1\u202fGB" + ], + [ + "1.8b-chat-q4_0", + "1.1\u202fGB" + ], + [ + "1.8b-chat-q4_1", + "1.2\u202fGB" + ], + [ + "1.8b-chat-q4_K_S", + "1.2\u202fGB" + ], + [ + "1.8b-chat-q4_K_M", + "1.2\u202fGB" + ], + [ + "1.8b-chat-q5_0", + "1.3\u202fGB" + ], + [ + "1.8b-chat-q5_1", + "1.4\u202fGB" + ], + [ + "1.8b-chat-q5_K_S", + "1.3\u202fGB" + ], + [ + "1.8b-chat-q5_K_M", + "1.4\u202fGB" + ], + [ + "1.8b-chat-q6_K", + "1.6\u202fGB" + ], + [ + "1.8b-chat-q8_0", + "2.0\u202fGB" + ], + [ + "1.8b-chat-v1.5-fp16", + "3.7\u202fGB" + ], + [ + "1.8b-chat-v1.5-q2_K", + "863\u202fMB" + ], + [ + "1.8b-chat-v1.5-q3_K_S", + "970\u202fMB" + ], + [ + "1.8b-chat-v1.5-q3_K_M", + "1.0\u202fGB" + ], + [ + "1.8b-chat-v1.5-q3_K_L", + "1.1\u202fGB" + ], + [ + "1.8b-chat-v1.5-q4_0", + "1.1\u202fGB" + ], + [ + "1.8b-chat-v1.5-q4_1", + "1.2\u202fGB" + ], + [ + "1.8b-chat-v1.5-q4_K_S", + "1.2\u202fGB" + ], + [ + "1.8b-chat-v1.5-q4_K_M", + "1.2\u202fGB" + ], + [ + "1.8b-chat-v1.5-q5_0", + "1.3\u202fGB" + ], + [ + "1.8b-chat-v1.5-q5_1", + "1.4\u202fGB" + ], + [ + "1.8b-chat-v1.5-q5_K_S", + "1.3\u202fGB" + ], + [ + "1.8b-chat-v1.5-q5_K_M", + "1.4\u202fGB" + ], + [ + "1.8b-chat-v1.5-q6_K", + "1.6\u202fGB" + ], + [ + "1.8b-chat-v1.5-q8_0", + "2.0\u202fGB" + ], + [ + "1.8b-text-fp16", + "3.7\u202fGB" + ], + [ + "1.8b-text-q2_K", + "853\u202fMB" + ], + [ + "1.8b-text-q3_K_S", + "970\u202fMB" + ], + [ + "1.8b-text-q3_K_M", + "1.0\u202fGB" + ], + [ + "1.8b-text-q3_K_L", + "1.1\u202fGB" + ], + [ + "1.8b-text-q4_0", + "1.1\u202fGB" + ], + [ + "1.8b-text-q4_1", + "1.2\u202fGB" + ], + [ + "1.8b-text-q4_K_S", + "1.2\u202fGB" + ], + [ + "1.8b-text-q4_K_M", + "1.2\u202fGB" + ], + [ + "1.8b-text-q5_0", + "1.3\u202fGB" + ], + [ + "1.8b-text-q5_1", + "1.4\u202fGB" + ], + [ + "1.8b-text-q5_K_S", + "1.3\u202fGB" + ], + [ + "1.8b-text-q5_K_M", + "1.4\u202fGB" + ], + [ + "1.8b-text-q6_K", + "1.6\u202fGB" + ], + [ + "1.8b-text-q8_0", + "2.0\u202fGB" + ], + [ + "1.8b-text-v1.5-fp16", + "3.7\u202fGB" + ], + [ + "1.8b-text-v1.5-q2_K", + "863\u202fMB" + ], + [ + "1.8b-text-v1.5-q3_K_S", + "970\u202fMB" + ], + [ + "1.8b-text-v1.5-q3_K_M", + "1.0\u202fGB" + ], + [ + "1.8b-text-v1.5-q3_K_L", + "1.1\u202fGB" + ], + [ + "1.8b-text-v1.5-q4_0", + "1.1\u202fGB" + ], + [ + "1.8b-text-v1.5-q4_1", + "1.2\u202fGB" + ], + [ + "1.8b-text-v1.5-q4_K_S", + "1.2\u202fGB" + ], + [ + "1.8b-text-v1.5-q4_K_M", + "1.2\u202fGB" + ], + [ + "1.8b-text-v1.5-q5_0", + "1.3\u202fGB" + ], + [ + "1.8b-text-v1.5-q5_1", + "1.4\u202fGB" + ], + [ + "1.8b-text-v1.5-q5_K_S", + "1.3\u202fGB" + ], + [ + "1.8b-text-v1.5-q5_K_M", + "1.4\u202fGB" + ], + [ + "1.8b-text-v1.5-q6_K", + "1.6\u202fGB" + ], + [ + "1.8b-text-v1.5-q8_0", + "2.0\u202fGB" + ], + [ + "4b-chat-v1.5-fp16", + "7.9\u202fGB" + ], + [ + "4b-chat-v1.5-q2_K", + "1.6\u202fGB" + ], + [ + "4b-chat-v1.5-q3_K_S", + "1.9\u202fGB" + ], + [ + "4b-chat-v1.5-q3_K_M", + "2.0\u202fGB" + ], + [ + "4b-chat-v1.5-q3_K_L", + "2.2\u202fGB" + ], + [ + "4b-chat-v1.5-q4_0", + "2.3\u202fGB" + ], + [ + "4b-chat-v1.5-q4_1", + "2.6\u202fGB" + ], + [ + "4b-chat-v1.5-q4_K_S", + "2.3\u202fGB" + ], + [ + "4b-chat-v1.5-q4_K_M", + "2.5\u202fGB" + ], + [ + "4b-chat-v1.5-q5_0", + "2.8\u202fGB" + ], + [ + "4b-chat-v1.5-q5_1", + "3.0\u202fGB" + ], + [ + "4b-chat-v1.5-q5_K_S", + "2.8\u202fGB" + ], + [ + "4b-chat-v1.5-q5_K_M", + "2.8\u202fGB" + ], + [ + "4b-chat-v1.5-q6_K", + "3.2\u202fGB" + ], + [ + "4b-chat-v1.5-q8_0", + "4.2\u202fGB" + ], + [ + "4b-text-v1.5-fp16", + "7.9\u202fGB" + ], + [ + "4b-text-v1.5-q2_K", + "1.6\u202fGB" + ], + [ + "4b-text-v1.5-q3_K_S", + "1.9\u202fGB" + ], + [ + "4b-text-v1.5-q3_K_M", + "2.0\u202fGB" + ], + [ + "4b-text-v1.5-q3_K_L", + "2.2\u202fGB" + ], + [ + "4b-text-v1.5-q4_0", + "2.3\u202fGB" + ], + [ + "4b-text-v1.5-q4_1", + "2.6\u202fGB" + ], + [ + "4b-text-v1.5-q4_K_S", + "2.3\u202fGB" + ], + [ + "4b-text-v1.5-q4_K_M", + "2.5\u202fGB" + ], + [ + "4b-text-v1.5-q5_0", + "2.8\u202fGB" + ], + [ + "4b-text-v1.5-q5_1", + "3.0\u202fGB" + ], + [ + "4b-text-v1.5-q5_K_S", + "2.8\u202fGB" + ], + [ + "4b-text-v1.5-q5_K_M", + "2.8\u202fGB" + ], + [ + "4b-text-v1.5-q6_K", + "3.2\u202fGB" + ], + [ + "4b-text-v1.5-q8_0", + "4.2\u202fGB" + ], + [ + "7b-chat-fp16", + "15\u202fGB" + ], + [ + "7b-chat-q2_K", + "3.0\u202fGB" + ], + [ + "7b-chat-q3_K_S", + "3.6\u202fGB" + ], + [ + "7b-chat-q3_K_M", + "4.1\u202fGB" + ], + [ + "7b-chat-q3_K_L", + "4.3\u202fGB" + ], + [ + "7b-chat-q4_0", + "4.5\u202fGB" + ], + [ + "7b-chat-q4_1", + "5.0\u202fGB" + ], + [ + "7b-chat-q4_K_S", + "4.5\u202fGB" + ], + [ + "7b-chat-q4_K_M", + "4.9\u202fGB" + ], + [ + "7b-chat-q5_0", + "5.4\u202fGB" + ], + [ + "7b-chat-q5_1", + "5.8\u202fGB" + ], + [ + "7b-chat-q5_K_S", + "5.4\u202fGB" + ], + [ + "7b-chat-q5_K_M", + "5.7\u202fGB" + ], + [ + "7b-chat-q6_K", + "6.3\u202fGB" + ], + [ + "7b-chat-q8_0", + "8.2\u202fGB" + ], + [ + "7b-chat-v1.5-fp16", + "15\u202fGB" + ], + [ + "7b-chat-v1.5-q2_K", + "3.1\u202fGB" + ], + [ + "7b-chat-v1.5-q3_K_S", + "3.6\u202fGB" + ], + [ + "7b-chat-v1.5-q3_K_M", + "3.9\u202fGB" + ], + [ + "7b-chat-v1.5-q3_K_L", + "4.2\u202fGB" + ], + [ + "7b-chat-v1.5-q4_0", + "4.5\u202fGB" + ], + [ + "7b-chat-v1.5-q4_1", + "5.0\u202fGB" + ], + [ + "7b-chat-v1.5-q4_K_S", + "4.5\u202fGB" + ], + [ + "7b-chat-v1.5-q4_K_M", + "4.8\u202fGB" + ], + [ + "7b-chat-v1.5-q5_0", + "5.4\u202fGB" + ], + [ + "7b-chat-v1.5-q5_1", + "5.8\u202fGB" + ], + [ + "7b-chat-v1.5-q5_K_S", + "5.4\u202fGB" + ], + [ + "7b-chat-v1.5-q5_K_M", + "5.5\u202fGB" + ], + [ + "7b-chat-v1.5-q6_K", + "6.3\u202fGB" + ], + [ + "7b-chat-v1.5-q8_0", + "8.2\u202fGB" + ], + [ + "7b-text-v1.5-fp16", + "15\u202fGB" + ], + [ + "7b-text-v1.5-q2_K", + "3.1\u202fGB" + ], + [ + "7b-text-v1.5-q3_K_S", + "3.6\u202fGB" + ], + [ + "7b-text-v1.5-q3_K_M", + "3.9\u202fGB" + ], + [ + "7b-text-v1.5-q3_K_L", + "4.2\u202fGB" + ], + [ + "7b-text-v1.5-q4_0", + "4.5\u202fGB" + ], + [ + "7b-text-v1.5-q4_1", + "5.0\u202fGB" + ], + [ + "7b-text-v1.5-q4_K_S", + "4.5\u202fGB" + ], + [ + "7b-text-v1.5-q4_K_M", + "4.8\u202fGB" + ], + [ + "7b-text-v1.5-q5_0", + "5.4\u202fGB" + ], + [ + "7b-text-v1.5-q5_1", + "5.8\u202fGB" + ], + [ + "7b-text-v1.5-q5_K_S", + "5.4\u202fGB" + ], + [ + "7b-text-v1.5-q5_K_M", + "5.5\u202fGB" + ], + [ + "7b-text-v1.5-q6_K", + "6.3\u202fGB" + ], + [ + "7b-text-v1.5-q8_0", + "8.2\u202fGB" + ], + [ + "14b-chat-fp16", + "28\u202fGB" + ], + [ + "14b-chat-q2_K", + "6.0\u202fGB" + ], + [ + "14b-chat-q3_K_S", + "6.9\u202fGB" + ], + [ + "14b-chat-q3_K_M", + "7.7\u202fGB" + ], + [ + "14b-chat-q3_K_L", + "8.0\u202fGB" + ], + [ + "14b-chat-q4_0", + "8.2\u202fGB" + ], + [ + "14b-chat-q4_1", + "9.0\u202fGB" + ], + [ + "14b-chat-q4_K_S", + "8.6\u202fGB" + ], + [ + "14b-chat-q4_K_M", + "9.4\u202fGB" + ], + [ + "14b-chat-q5_0", + "9.9\u202fGB" + ], + [ + "14b-chat-q5_1", + "11\u202fGB" + ], + [ + "14b-chat-q5_K_S", + "10\u202fGB" + ], + [ + "14b-chat-q5_K_M", + "11\u202fGB" + ], + [ + "14b-chat-q6_K", + "12\u202fGB" + ], + [ + "14b-chat-q8_0", + "15\u202fGB" + ], + [ + "14b-chat-v1.5-fp16", + "28\u202fGB" + ], + [ + "14b-chat-v1.5-q2_K", + "6.1\u202fGB" + ], + [ + "14b-chat-v1.5-q3_K_S", + "6.9\u202fGB" + ], + [ + "14b-chat-v1.5-q3_K_M", + "7.4\u202fGB" + ], + [ + "14b-chat-v1.5-q3_K_L", + "7.8\u202fGB" + ], + [ + "14b-chat-v1.5-q4_0", + "8.2\u202fGB" + ], + [ + "14b-chat-v1.5-q4_1", + "9.0\u202fGB" + ], + [ + "14b-chat-v1.5-q4_K_S", + "8.6\u202fGB" + ], + [ + "14b-chat-v1.5-q4_K_M", + "9.2\u202fGB" + ], + [ + "14b-chat-v1.5-q5_0", + "9.9\u202fGB" + ], + [ + "14b-chat-v1.5-q5_1", + "11\u202fGB" + ], + [ + "14b-chat-v1.5-q5_K_S", + "10\u202fGB" + ], + [ + "14b-chat-v1.5-q5_K_M", + "11\u202fGB" + ], + [ + "14b-chat-v1.5-q6_K", + "12\u202fGB" + ], + [ + "14b-chat-v1.5-q8_0", + "15\u202fGB" + ], + [ + "14b-text-fp16", + "28\u202fGB" + ], + [ + "14b-text-q2_K", + "6.0\u202fGB" + ], + [ + "14b-text-q3_K_S", + "6.9\u202fGB" + ], + [ + "14b-text-q3_K_M", + "7.7\u202fGB" + ], + [ + "14b-text-q3_K_L", + "8.0\u202fGB" + ], + [ + "14b-text-q4_0", + "8.2\u202fGB" + ], + [ + "14b-text-q4_1", + "9.0\u202fGB" + ], + [ + "14b-text-q4_K_S", + "8.6\u202fGB" + ], + [ + "14b-text-q4_K_M", + "9.4\u202fGB" + ], + [ + "14b-text-q5_0", + "9.9\u202fGB" + ], + [ + "14b-text-q5_1", + "11\u202fGB" + ], + [ + "14b-text-q5_K_S", + "10\u202fGB" + ], + [ + "14b-text-q5_K_M", + "11\u202fGB" + ], + [ + "14b-text-q6_K", + "12\u202fGB" + ], + [ + "14b-text-q8_0", + "15\u202fGB" + ], + [ + "14b-text-v1.5-fp16", + "28\u202fGB" + ], + [ + "14b-text-v1.5-q2_K", + "6.1\u202fGB" + ], + [ + "14b-text-v1.5-q3_K_S", + "6.9\u202fGB" + ], + [ + "14b-text-v1.5-q3_K_M", + "7.4\u202fGB" + ], + [ + "14b-text-v1.5-q3_K_L", + "7.8\u202fGB" + ], + [ + "14b-text-v1.5-q4_0", + "8.2\u202fGB" + ], + [ + "14b-text-v1.5-q4_1", + "9.0\u202fGB" + ], + [ + "14b-text-v1.5-q4_K_S", + "8.6\u202fGB" + ], + [ + "14b-text-v1.5-q4_K_M", + "9.2\u202fGB" + ], + [ + "14b-text-v1.5-q5_0", + "9.9\u202fGB" + ], + [ + "14b-text-v1.5-q5_1", + "11\u202fGB" + ], + [ + "14b-text-v1.5-q5_K_S", + "10\u202fGB" + ], + [ + "14b-text-v1.5-q5_K_M", + "11\u202fGB" + ], + [ + "14b-text-v1.5-q6_K", + "12\u202fGB" + ], + [ + "14b-text-v1.5-q8_0", + "15\u202fGB" + ], + [ + "32b-chat-v1.5-fp16", + "65\u202fGB" + ], + [ + "32b-chat-v1.5-q2_K", + "12\u202fGB" + ], + [ + "32b-chat-v1.5-q3_K_S", + "14\u202fGB" + ], + [ + "32b-chat-v1.5-q3_K_M", + "16\u202fGB" + ], + [ + "32b-chat-v1.5-q3_K_L", + "17\u202fGB" + ], + [ + "32b-chat-v1.5-q4_0", + "18\u202fGB" + ], + [ + "32b-chat-v1.5-q4_1", + "20\u202fGB" + ], + [ + "32b-chat-v1.5-q4_K_S", + "19\u202fGB" + ], + [ + "32b-chat-v1.5-q4_K_M", + "20\u202fGB" + ], + [ + "32b-chat-v1.5-q5_0", + "22\u202fGB" + ], + [ + "32b-chat-v1.5-q5_1", + "24\u202fGB" + ], + [ + "32b-chat-v1.5-q5_K_S", + "22\u202fGB" + ], + [ + "32b-chat-v1.5-q5_K_M", + "23\u202fGB" + ], + [ + "32b-chat-v1.5-q6_K", + "27\u202fGB" + ], + [ + "32b-chat-v1.5-q8_0", + "35\u202fGB" + ], + [ + "32b-text-v1.5-q2_K", + "12\u202fGB" + ], + [ + "32b-text-v1.5-q3_K_S", + "14\u202fGB" + ], + [ + "32b-text-v1.5-q3_K_M", + "16\u202fGB" + ], + [ + "32b-text-v1.5-q3_K_L", + "17\u202fGB" + ], + [ + "32b-text-v1.5-q4_0", + "18\u202fGB" + ], + [ + "32b-text-v1.5-q4_1", + "20\u202fGB" + ], + [ + "32b-text-v1.5-q4_K_S", + "19\u202fGB" + ], + [ + "32b-text-v1.5-q5_0", + "22\u202fGB" + ], + [ + "32b-text-v1.5-q5_1", + "24\u202fGB" + ], + [ + "32b-text-v1.5-q8_0", + "35\u202fGB" + ], + [ + "72b-chat-fp16", + "145\u202fGB" + ], + [ + "72b-chat-q2_K", + "27\u202fGB" + ], + [ + "72b-chat-q3_K_S", + "32\u202fGB" + ], + [ + "72b-chat-q3_K_M", + "37\u202fGB" + ], + [ + "72b-chat-q3_K_L", + "39\u202fGB" + ], + [ + "72b-chat-q4_0", + "41\u202fGB" + ], + [ + "72b-chat-q4_1", + "45\u202fGB" + ], + [ + "72b-chat-q4_K_S", + "41\u202fGB" + ], + [ + "72b-chat-q4_K_M", + "45\u202fGB" + ], + [ + "72b-chat-q5_0", + "50\u202fGB" + ], + [ + "72b-chat-q5_1", + "54\u202fGB" + ], + [ + "72b-chat-q5_K_S", + "50\u202fGB" + ], + [ + "72b-chat-q5_K_M", + "53\u202fGB" + ], + [ + "72b-chat-q6_K", + "59\u202fGB" + ], + [ + "72b-chat-q8_0", + "77\u202fGB" + ], + [ + "72b-chat-v1.5-fp16", + "145\u202fGB" + ], + [ + "72b-chat-v1.5-q2_K", + "28\u202fGB" + ], + [ + "72b-chat-v1.5-q3_K_S", + "33\u202fGB" + ], + [ + "72b-chat-v1.5-q3_K_M", + "36\u202fGB" + ], + [ + "72b-chat-v1.5-q3_K_L", + "38\u202fGB" + ], + [ + "72b-chat-v1.5-q4_0", + "41\u202fGB" + ], + [ + "72b-chat-v1.5-q4_1", + "45\u202fGB" + ], + [ + "72b-chat-v1.5-q4_K_S", + "42\u202fGB" + ], + [ + "72b-chat-v1.5-q4_K_M", + "44\u202fGB" + ], + [ + "72b-chat-v1.5-q5_0", + "50\u202fGB" + ], + [ + "72b-chat-v1.5-q5_1", + "54\u202fGB" + ], + [ + "72b-chat-v1.5-q5_K_S", + "50\u202fGB" + ], + [ + "72b-chat-v1.5-q5_K_M", + "51\u202fGB" + ], + [ + "72b-chat-v1.5-q6_K", + "59\u202fGB" + ], + [ + "72b-chat-v1.5-q8_0", + "77\u202fGB" + ], + [ + "72b-text-fp16", + "145\u202fGB" + ], + [ + "72b-text-q2_K", + "27\u202fGB" + ], + [ + "72b-text-q3_K_S", + "32\u202fGB" + ], + [ + "72b-text-q3_K_M", + "37\u202fGB" + ], + [ + "72b-text-q3_K_L", + "39\u202fGB" + ], + [ + "72b-text-q4_0", + "41\u202fGB" + ], + [ + "72b-text-q4_1", + "45\u202fGB" + ], + [ + "72b-text-q4_K_S", + "41\u202fGB" + ], + [ + "72b-text-q4_K_M", + "45\u202fGB" + ], + [ + "72b-text-q5_0", + "50\u202fGB" + ], + [ + "72b-text-q5_1", + "54\u202fGB" + ], + [ + "72b-text-q5_K_S", + "50\u202fGB" + ], + [ + "72b-text-q5_K_M", + "53\u202fGB" + ], + [ + "72b-text-q6_K", + "59\u202fGB" + ], + [ + "72b-text-q8_0", + "77\u202fGB" + ], + [ + "72b-text-v1.5-fp16", + "145\u202fGB" + ], + [ + "72b-text-v1.5-q2_K", + "28\u202fGB" + ], + [ + "72b-text-v1.5-q3_K_S", + "33\u202fGB" + ], + [ + "72b-text-v1.5-q3_K_M", + "36\u202fGB" + ], + [ + "72b-text-v1.5-q3_K_L", + "38\u202fGB" + ], + [ + "72b-text-v1.5-q4_0", + "41\u202fGB" + ], + [ + "72b-text-v1.5-q4_1", + "45\u202fGB" + ], + [ + "72b-text-v1.5-q4_K_S", + "42\u202fGB" + ], + [ + "72b-text-v1.5-q4_K_M", + "44\u202fGB" + ], + [ + "72b-text-v1.5-q5_0", + "50\u202fGB" + ], + [ + "72b-text-v1.5-q5_1", + "54\u202fGB" + ], + [ + "72b-text-v1.5-q5_K_S", + "50\u202fGB" + ], + [ + "72b-text-v1.5-q5_K_M", + "51\u202fGB" + ], + [ + "72b-text-v1.5-q6_K", + "59\u202fGB" + ], + [ + "72b-text-v1.5-q8_0", + "77\u202fGB" + ], + [ + "110b-chat-v1.5-fp16", + "222\u202fGB" + ], + [ + "110b-chat-v1.5-q2_K", + "41\u202fGB" + ], + [ + "110b-chat-v1.5-q3_K_S", + "48\u202fGB" + ], + [ + "110b-chat-v1.5-q3_K_M", + "54\u202fGB" + ], + [ + "110b-chat-v1.5-q3_K_L", + "58\u202fGB" + ], + [ + "110b-chat-v1.5-q4_0", + "63\u202fGB" + ], + [ + "110b-chat-v1.5-q4_1", + "70\u202fGB" + ], + [ + "110b-chat-v1.5-q4_K_S", + "63\u202fGB" + ], + [ + "110b-chat-v1.5-q4_K_M", + "67\u202fGB" + ], + [ + "110b-chat-v1.5-q5_0", + "77\u202fGB" + ], + [ + "110b-chat-v1.5-q5_1", + "84\u202fGB" + ], + [ + "110b-chat-v1.5-q5_K_S", + "77\u202fGB" + ], + [ + "110b-chat-v1.5-q5_K_M", + "79\u202fGB" + ], + [ + "110b-chat-v1.5-q6_K", + "91\u202fGB" + ], + [ + "110b-chat-v1.5-q8_0", + "118\u202fGB" + ], + [ + "110b-text-v1.5-fp16", + "222\u202fGB" + ], + [ + "110b-text-v1.5-q2_K", + "41\u202fGB" + ], + [ + "110b-text-v1.5-q3_K_S", + "48\u202fGB" + ], + [ + "110b-text-v1.5-q3_K_M", + "54\u202fGB" + ], + [ + "110b-text-v1.5-q3_K_L", + "58\u202fGB" + ], + [ + "110b-text-v1.5-q4_0", + "63\u202fGB" + ], + [ + "110b-text-v1.5-q4_1", + "70\u202fGB" + ], + [ + "110b-text-v1.5-q4_K_S", + "63\u202fGB" + ], + [ + "110b-text-v1.5-q4_K_M", + "67\u202fGB" + ], + [ + "110b-text-v1.5-q5_0", + "77\u202fGB" + ], + [ + "110b-text-v1.5-q5_1", + "84\u202fGB" + ], + [ + "110b-text-v1.5-q5_K_S", + "77\u202fGB" + ], + [ + "110b-text-v1.5-q5_K_M", + "79\u202fGB" + ], + [ + "110b-text-v1.5-q6_K", + "91\u202fGB" + ], + [ + "110b-text-v1.5-q8_0", + "118\u202fGB" ] ], "image": false, @@ -5173,411 +5653,411 @@ "tags": [ [ "latest", - "3.8 GB" - ], - [ - "70b", - "39 GB" - ], - [ - "13b", - "7.4 GB" + "3.8\u202fGB" ], [ "7b", - "3.8 GB" + "3.8\u202fGB" + ], + [ + "13b", + "7.4\u202fGB" + ], + [ + "70b", + "39\u202fGB" ], [ "chat", - "3.8 GB" + "3.8\u202fGB" ], [ "text", - "3.8 GB" - ], - [ - "70b-chat", - "39 GB" - ], - [ - "70b-chat-fp16", - "138 GB" - ], - [ - "70b-chat-q2_K", - "29 GB" - ], - [ - "70b-chat-q3_K_L", - "36 GB" - ], - [ - "70b-chat-q3_K_M", - "33 GB" - ], - [ - "70b-chat-q3_K_S", - "30 GB" - ], - [ - "70b-chat-q4_0", - "39 GB" - ], - [ - "70b-chat-q4_1", - "43 GB" - ], - [ - "70b-chat-q4_K_M", - "41 GB" - ], - [ - "70b-chat-q4_K_S", - "39 GB" - ], - [ - "70b-chat-q5_0", - "47 GB" - ], - [ - "70b-chat-q5_1", - "52 GB" - ], - [ - "70b-chat-q5_K_M", - "49 GB" - ], - [ - "70b-chat-q5_K_S", - "47 GB" - ], - [ - "70b-chat-q6_K", - "57 GB" - ], - [ - "70b-chat-q8_0", - "73 GB" - ], - [ - "70b-text", - "39 GB" - ], - [ - "70b-text-fp16", - "138 GB" - ], - [ - "70b-text-q2_K", - "29 GB" - ], - [ - "70b-text-q3_K_L", - "36 GB" - ], - [ - "70b-text-q3_K_M", - "33 GB" - ], - [ - "70b-text-q3_K_S", - "30 GB" - ], - [ - "70b-text-q4_0", - "39 GB" - ], - [ - "70b-text-q4_1", - "43 GB" - ], - [ - "70b-text-q4_K_M", - "41 GB" - ], - [ - "70b-text-q4_K_S", - "39 GB" - ], - [ - "70b-text-q5_0", - "47 GB" - ], - [ - "70b-text-q5_1", - "52 GB" - ], - [ - "70b-text-q5_K_M", - "49 GB" - ], - [ - "70b-text-q5_K_S", - "47 GB" - ], - [ - "70b-text-q6_K", - "57 GB" - ], - [ - "70b-text-q8_0", - "73 GB" - ], - [ - "13b-chat", - "7.4 GB" - ], - [ - "13b-chat-fp16", - "26 GB" - ], - [ - "13b-chat-q2_K", - "5.4 GB" - ], - [ - "13b-chat-q3_K_L", - "6.9 GB" - ], - [ - "13b-chat-q3_K_M", - "6.3 GB" - ], - [ - "13b-chat-q3_K_S", - "5.7 GB" - ], - [ - "13b-chat-q4_0", - "7.4 GB" - ], - [ - "13b-chat-q4_1", - "8.2 GB" - ], - [ - "13b-chat-q4_K_M", - "7.9 GB" - ], - [ - "13b-chat-q4_K_S", - "7.4 GB" - ], - [ - "13b-chat-q5_0", - "9.0 GB" - ], - [ - "13b-chat-q5_1", - "9.8 GB" - ], - [ - "13b-chat-q5_K_M", - "9.2 GB" - ], - [ - "13b-chat-q5_K_S", - "9.0 GB" - ], - [ - "13b-chat-q6_K", - "11 GB" - ], - [ - "13b-chat-q8_0", - "14 GB" - ], - [ - "13b-text", - "7.4 GB" - ], - [ - "13b-text-fp16", - "26 GB" - ], - [ - "13b-text-q2_K", - "5.4 GB" - ], - [ - "13b-text-q3_K_L", - "6.9 GB" - ], - [ - "13b-text-q3_K_M", - "6.3 GB" - ], - [ - "13b-text-q3_K_S", - "5.7 GB" - ], - [ - "13b-text-q4_0", - "7.4 GB" - ], - [ - "13b-text-q4_1", - "8.2 GB" - ], - [ - "13b-text-q4_K_M", - "7.9 GB" - ], - [ - "13b-text-q4_K_S", - "7.4 GB" - ], - [ - "13b-text-q5_0", - "9.0 GB" - ], - [ - "13b-text-q5_1", - "9.8 GB" - ], - [ - "13b-text-q5_K_M", - "9.2 GB" - ], - [ - "13b-text-q5_K_S", - "9.0 GB" - ], - [ - "13b-text-q6_K", - "11 GB" - ], - [ - "13b-text-q8_0", - "14 GB" + "3.8\u202fGB" ], [ "7b-chat", - "3.8 GB" - ], - [ - "7b-chat-fp16", - "13 GB" - ], - [ - "7b-chat-q2_K", - "2.8 GB" - ], - [ - "7b-chat-q3_K_L", - "3.6 GB" - ], - [ - "7b-chat-q3_K_M", - "3.3 GB" - ], - [ - "7b-chat-q3_K_S", - "2.9 GB" - ], - [ - "7b-chat-q4_0", - "3.8 GB" - ], - [ - "7b-chat-q4_1", - "4.2 GB" - ], - [ - "7b-chat-q4_K_M", - "4.1 GB" - ], - [ - "7b-chat-q4_K_S", - "3.9 GB" - ], - [ - "7b-chat-q5_0", - "4.7 GB" - ], - [ - "7b-chat-q5_1", - "5.1 GB" - ], - [ - "7b-chat-q5_K_M", - "4.8 GB" - ], - [ - "7b-chat-q5_K_S", - "4.7 GB" - ], - [ - "7b-chat-q6_K", - "5.5 GB" - ], - [ - "7b-chat-q8_0", - "7.2 GB" + "3.8\u202fGB" ], [ "7b-text", - "3.8 GB" + "3.8\u202fGB" + ], + [ + "13b-chat", + "7.4\u202fGB" + ], + [ + "13b-text", + "7.4\u202fGB" + ], + [ + "70b-chat", + "39\u202fGB" + ], + [ + "70b-text", + "39\u202fGB" + ], + [ + "7b-chat-fp16", + "13\u202fGB" + ], + [ + "7b-chat-q2_K", + "2.8\u202fGB" + ], + [ + "7b-chat-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-chat-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-chat-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-chat-q4_0", + "3.8\u202fGB" + ], + [ + "7b-chat-q4_1", + "4.2\u202fGB" + ], + [ + "7b-chat-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-chat-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-chat-q5_0", + "4.7\u202fGB" + ], + [ + "7b-chat-q5_1", + "5.1\u202fGB" + ], + [ + "7b-chat-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-chat-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-chat-q6_K", + "5.5\u202fGB" + ], + [ + "7b-chat-q8_0", + "7.2\u202fGB" ], [ "7b-text-fp16", - "13 GB" + "13\u202fGB" ], [ "7b-text-q2_K", - "2.8 GB" - ], - [ - "7b-text-q3_K_L", - "3.6 GB" - ], - [ - "7b-text-q3_K_M", - "3.3 GB" + "2.8\u202fGB" ], [ "7b-text-q3_K_S", - "2.9 GB" + "2.9\u202fGB" + ], + [ + "7b-text-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-text-q3_K_L", + "3.6\u202fGB" ], [ "7b-text-q4_0", - "3.8 GB" + "3.8\u202fGB" ], [ "7b-text-q4_1", - "4.2 GB" - ], - [ - "7b-text-q4_K_M", - "4.1 GB" + "4.2\u202fGB" ], [ "7b-text-q4_K_S", - "3.9 GB" + "3.9\u202fGB" + ], + [ + "7b-text-q4_K_M", + "4.1\u202fGB" ], [ "7b-text-q5_0", - "4.7 GB" + "4.7\u202fGB" ], [ "7b-text-q5_1", - "5.1 GB" - ], - [ - "7b-text-q5_K_M", - "4.8 GB" + "5.1\u202fGB" ], [ "7b-text-q5_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "7b-text-q5_K_M", + "4.8\u202fGB" ], [ "7b-text-q6_K", - "5.5 GB" + "5.5\u202fGB" ], [ "7b-text-q8_0", - "7.2 GB" + "7.2\u202fGB" + ], + [ + "13b-chat-fp16", + "26\u202fGB" + ], + [ + "13b-chat-q2_K", + "5.4\u202fGB" + ], + [ + "13b-chat-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-chat-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-chat-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-chat-q4_0", + "7.4\u202fGB" + ], + [ + "13b-chat-q4_1", + "8.2\u202fGB" + ], + [ + "13b-chat-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-chat-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-chat-q5_0", + "9.0\u202fGB" + ], + [ + "13b-chat-q5_1", + "9.8\u202fGB" + ], + [ + "13b-chat-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-chat-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-chat-q6_K", + "11\u202fGB" + ], + [ + "13b-chat-q8_0", + "14\u202fGB" + ], + [ + "13b-text-fp16", + "26\u202fGB" + ], + [ + "13b-text-q2_K", + "5.4\u202fGB" + ], + [ + "13b-text-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-text-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-text-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-text-q4_0", + "7.4\u202fGB" + ], + [ + "13b-text-q4_1", + "8.2\u202fGB" + ], + [ + "13b-text-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-text-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-text-q5_0", + "9.0\u202fGB" + ], + [ + "13b-text-q5_1", + "9.8\u202fGB" + ], + [ + "13b-text-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-text-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-text-q6_K", + "11\u202fGB" + ], + [ + "13b-text-q8_0", + "14\u202fGB" + ], + [ + "70b-chat-fp16", + "138\u202fGB" + ], + [ + "70b-chat-q2_K", + "29\u202fGB" + ], + [ + "70b-chat-q3_K_S", + "30\u202fGB" + ], + [ + "70b-chat-q3_K_M", + "33\u202fGB" + ], + [ + "70b-chat-q3_K_L", + "36\u202fGB" + ], + [ + "70b-chat-q4_0", + "39\u202fGB" + ], + [ + "70b-chat-q4_1", + "43\u202fGB" + ], + [ + "70b-chat-q4_K_S", + "39\u202fGB" + ], + [ + "70b-chat-q4_K_M", + "41\u202fGB" + ], + [ + "70b-chat-q5_0", + "47\u202fGB" + ], + [ + "70b-chat-q5_1", + "52\u202fGB" + ], + [ + "70b-chat-q5_K_S", + "47\u202fGB" + ], + [ + "70b-chat-q5_K_M", + "49\u202fGB" + ], + [ + "70b-chat-q6_K", + "57\u202fGB" + ], + [ + "70b-chat-q8_0", + "73\u202fGB" + ], + [ + "70b-text-fp16", + "138\u202fGB" + ], + [ + "70b-text-q2_K", + "29\u202fGB" + ], + [ + "70b-text-q3_K_S", + "30\u202fGB" + ], + [ + "70b-text-q3_K_M", + "33\u202fGB" + ], + [ + "70b-text-q3_K_L", + "36\u202fGB" + ], + [ + "70b-text-q4_0", + "39\u202fGB" + ], + [ + "70b-text-q4_1", + "43\u202fGB" + ], + [ + "70b-text-q4_K_S", + "39\u202fGB" + ], + [ + "70b-text-q4_K_M", + "41\u202fGB" + ], + [ + "70b-text-q5_0", + "47\u202fGB" + ], + [ + "70b-text-q5_1", + "52\u202fGB" + ], + [ + "70b-text-q5_K_S", + "47\u202fGB" + ], + [ + "70b-text-q5_K_M", + "49\u202fGB" + ], + [ + "70b-text-q6_K", + "57\u202fGB" + ], + [ + "70b-text-q8_0", + "73\u202fGB" ] ], "image": false, @@ -5589,1323 +6069,1179 @@ "tags": [ [ "latest", - "3.8 GB" - ], - [ - "70b", - "39 GB" - ], - [ - "34b", - "19 GB" - ], - [ - "13b", - "7.4 GB" + "3.8\u202fGB" ], [ "7b", - "3.8 GB" + "3.8\u202fGB" + ], + [ + "13b", + "7.4\u202fGB" + ], + [ + "34b", + "19\u202fGB" + ], + [ + "70b", + "39\u202fGB" ], [ "code", - "3.8 GB" + "3.8\u202fGB" ], [ "instruct", - "3.8 GB" + "3.8\u202fGB" ], [ "python", - "3.8 GB" - ], - [ - "70b-code", - "39 GB" - ], - [ - "70b-code-fp16", - "138 GB" - ], - [ - "70b-code-q2_K", - "25 GB" - ], - [ - "70b-code-q3_K_L", - "36 GB" - ], - [ - "70b-code-q3_K_M", - "33 GB" - ], - [ - "70b-code-q3_K_S", - "30 GB" - ], - [ - "70b-code-q4_0", - "39 GB" - ], - [ - "70b-code-q4_1", - "43 GB" - ], - [ - "70b-code-q4_K_M", - "41 GB" - ], - [ - "70b-code-q4_K_S", - "39 GB" - ], - [ - "70b-code-q5_0", - "47 GB" - ], - [ - "70b-code-q5_1", - "52 GB" - ], - [ - "70b-code-q5_K_M", - "49 GB" - ], - [ - "70b-code-q5_K_S", - "47 GB" - ], - [ - "70b-code-q6_K", - "57 GB" - ], - [ - "70b-code-q8_0", - "73 GB" - ], - [ - "70b-instruct", - "39 GB" - ], - [ - "70b-instruct-fp16", - "138 GB" - ], - [ - "70b-instruct-q2_K", - "25 GB" - ], - [ - "70b-instruct-q3_K_L", - "36 GB" - ], - [ - "70b-instruct-q3_K_M", - "33 GB" - ], - [ - "70b-instruct-q3_K_S", - "30 GB" - ], - [ - "70b-instruct-q4_0", - "39 GB" - ], - [ - "70b-instruct-q4_1", - "43 GB" - ], - [ - "70b-instruct-q4_K_M", - "41 GB" - ], - [ - "70b-instruct-q4_K_S", - "39 GB" - ], - [ - "70b-instruct-q5_0", - "47 GB" - ], - [ - "70b-instruct-q5_1", - "52 GB" - ], - [ - "70b-instruct-q5_K_M", - "49 GB" - ], - [ - "70b-instruct-q5_K_S", - "47 GB" - ], - [ - "70b-instruct-q6_K", - "57 GB" - ], - [ - "70b-instruct-q8_0", - "73 GB" - ], - [ - "70b-python", - "39 GB" - ], - [ - "70b-python-fp16", - "138 GB" - ], - [ - "70b-python-q2_K", - "25 GB" - ], - [ - "70b-python-q3_K_L", - "36 GB" - ], - [ - "70b-python-q3_K_M", - "33 GB" - ], - [ - "70b-python-q3_K_S", - "30 GB" - ], - [ - "70b-python-q4_0", - "39 GB" - ], - [ - "70b-python-q4_1", - "43 GB" - ], - [ - "70b-python-q4_K_M", - "41 GB" - ], - [ - "70b-python-q4_K_S", - "39 GB" - ], - [ - "70b-python-q5_0", - "47 GB" - ], - [ - "70b-python-q5_1", - "52 GB" - ], - [ - "70b-python-q5_K_M", - "49 GB" - ], - [ - "70b-python-q5_K_S", - "47 GB" - ], - [ - "70b-python-q6_K", - "57 GB" - ], - [ - "70b-python-q8_0", - "73 GB" - ], - [ - "34b-code", - "19 GB" - ], - [ - "34b-code-q2_K", - "14 GB" - ], - [ - "34b-code-q3_K_L", - "18 GB" - ], - [ - "34b-code-q3_K_M", - "16 GB" - ], - [ - "34b-code-q3_K_S", - "15 GB" - ], - [ - "34b-code-q4_0", - "19 GB" - ], - [ - "34b-code-q4_1", - "21 GB" - ], - [ - "34b-code-q4_K_M", - "20 GB" - ], - [ - "34b-code-q4_K_S", - "19 GB" - ], - [ - "34b-code-q5_0", - "23 GB" - ], - [ - "34b-code-q5_1", - "25 GB" - ], - [ - "34b-code-q5_K_M", - "24 GB" - ], - [ - "34b-code-q5_K_S", - "23 GB" - ], - [ - "34b-code-q6_K", - "28 GB" - ], - [ - "34b-code-q8_0", - "36 GB" - ], - [ - "34b-instruct", - "19 GB" - ], - [ - "34b-instruct-fp16", - "67 GB" - ], - [ - "34b-instruct-q2_K", - "14 GB" - ], - [ - "34b-instruct-q3_K_L", - "18 GB" - ], - [ - "34b-instruct-q3_K_M", - "16 GB" - ], - [ - "34b-instruct-q3_K_S", - "15 GB" - ], - [ - "34b-instruct-q4_0", - "19 GB" - ], - [ - "34b-instruct-q4_1", - "21 GB" - ], - [ - "34b-instruct-q4_K_M", - "20 GB" - ], - [ - "34b-instruct-q4_K_S", - "19 GB" - ], - [ - "34b-instruct-q5_0", - "23 GB" - ], - [ - "34b-instruct-q5_1", - "25 GB" - ], - [ - "34b-instruct-q5_K_M", - "24 GB" - ], - [ - "34b-instruct-q5_K_S", - "23 GB" - ], - [ - "34b-instruct-q6_K", - "28 GB" - ], - [ - "34b-instruct-q8_0", - "36 GB" - ], - [ - "34b-python", - "19 GB" - ], - [ - "34b-python-fp16", - "67 GB" - ], - [ - "34b-python-q2_K", - "14 GB" - ], - [ - "34b-python-q3_K_L", - "18 GB" - ], - [ - "34b-python-q3_K_M", - "16 GB" - ], - [ - "34b-python-q3_K_S", - "15 GB" - ], - [ - "34b-python-q4_0", - "19 GB" - ], - [ - "34b-python-q4_1", - "21 GB" - ], - [ - "34b-python-q4_K_M", - "20 GB" - ], - [ - "34b-python-q4_K_S", - "19 GB" - ], - [ - "34b-python-q5_0", - "23 GB" - ], - [ - "34b-python-q5_1", - "25 GB" - ], - [ - "34b-python-q5_K_M", - "24 GB" - ], - [ - "34b-python-q5_K_S", - "23 GB" - ], - [ - "34b-python-q6_K", - "28 GB" - ], - [ - "34b-python-q8_0", - "36 GB" - ], - [ - "13b-code", - "7.4 GB" - ], - [ - "13b-code-fp16", - "26 GB" - ], - [ - "13b-code-q2_K", - "5.4 GB" - ], - [ - "13b-code-q3_K_L", - "6.9 GB" - ], - [ - "13b-code-q3_K_M", - "6.3 GB" - ], - [ - "13b-code-q3_K_S", - "5.7 GB" - ], - [ - "13b-code-q4_0", - "7.4 GB" - ], - [ - "13b-code-q4_1", - "8.2 GB" - ], - [ - "13b-code-q4_K_M", - "7.9 GB" - ], - [ - "13b-code-q4_K_S", - "7.4 GB" - ], - [ - "13b-code-q5_0", - "9.0 GB" - ], - [ - "13b-code-q5_1", - "9.8 GB" - ], - [ - "13b-code-q5_K_M", - "9.2 GB" - ], - [ - "13b-code-q5_K_S", - "9.0 GB" - ], - [ - "13b-code-q6_K", - "11 GB" - ], - [ - "13b-code-q8_0", - "14 GB" - ], - [ - "13b-instruct", - "7.4 GB" - ], - [ - "13b-instruct-fp16", - "26 GB" - ], - [ - "13b-instruct-q2_K", - "5.4 GB" - ], - [ - "13b-instruct-q3_K_L", - "6.9 GB" - ], - [ - "13b-instruct-q3_K_M", - "6.3 GB" - ], - [ - "13b-instruct-q3_K_S", - "5.7 GB" - ], - [ - "13b-instruct-q4_0", - "7.4 GB" - ], - [ - "13b-instruct-q4_1", - "8.2 GB" - ], - [ - "13b-instruct-q4_K_M", - "7.9 GB" - ], - [ - "13b-instruct-q4_K_S", - "7.4 GB" - ], - [ - "13b-instruct-q5_0", - "9.0 GB" - ], - [ - "13b-instruct-q5_1", - "9.8 GB" - ], - [ - "13b-instruct-q5_K_M", - "9.2 GB" - ], - [ - "13b-instruct-q5_K_S", - "9.0 GB" - ], - [ - "13b-instruct-q6_K", - "11 GB" - ], - [ - "13b-instruct-q8_0", - "14 GB" - ], - [ - "13b-python", - "7.4 GB" - ], - [ - "13b-python-fp16", - "26 GB" - ], - [ - "13b-python-q2_K", - "5.4 GB" - ], - [ - "13b-python-q3_K_L", - "6.9 GB" - ], - [ - "13b-python-q3_K_M", - "6.3 GB" - ], - [ - "13b-python-q3_K_S", - "5.7 GB" - ], - [ - "13b-python-q4_0", - "7.4 GB" - ], - [ - "13b-python-q4_1", - "8.2 GB" - ], - [ - "13b-python-q4_K_M", - "7.9 GB" - ], - [ - "13b-python-q4_K_S", - "7.4 GB" - ], - [ - "13b-python-q5_0", - "9.0 GB" - ], - [ - "13b-python-q5_1", - "9.8 GB" - ], - [ - "13b-python-q5_K_M", - "9.2 GB" - ], - [ - "13b-python-q5_K_S", - "9.0 GB" - ], - [ - "13b-python-q6_K", - "11 GB" - ], - [ - "13b-python-q8_0", - "14 GB" + "3.8\u202fGB" ], [ "7b-code", - "3.8 GB" - ], - [ - "7b-code-fp16", - "13 GB" - ], - [ - "7b-code-q2_K", - "2.8 GB" - ], - [ - "7b-code-q3_K_L", - "3.6 GB" - ], - [ - "7b-code-q3_K_M", - "3.3 GB" - ], - [ - "7b-code-q3_K_S", - "2.9 GB" - ], - [ - "7b-code-q4_0", - "3.8 GB" - ], - [ - "7b-code-q4_1", - "4.2 GB" - ], - [ - "7b-code-q4_K_M", - "4.1 GB" - ], - [ - "7b-code-q4_K_S", - "3.9 GB" - ], - [ - "7b-code-q5_0", - "4.7 GB" - ], - [ - "7b-code-q5_1", - "5.1 GB" - ], - [ - "7b-code-q5_K_M", - "4.8 GB" - ], - [ - "7b-code-q5_K_S", - "4.7 GB" - ], - [ - "7b-code-q6_K", - "5.5 GB" - ], - [ - "7b-code-q8_0", - "7.2 GB" + "3.8\u202fGB" ], [ "7b-instruct", - "3.8 GB" - ], - [ - "7b-instruct-fp16", - "13 GB" - ], - [ - "7b-instruct-q2_K", - "2.8 GB" - ], - [ - "7b-instruct-q3_K_L", - "3.6 GB" - ], - [ - "7b-instruct-q3_K_M", - "3.3 GB" - ], - [ - "7b-instruct-q3_K_S", - "2.9 GB" - ], - [ - "7b-instruct-q4_0", - "3.8 GB" - ], - [ - "7b-instruct-q4_1", - "4.2 GB" - ], - [ - "7b-instruct-q4_K_M", - "4.1 GB" - ], - [ - "7b-instruct-q4_K_S", - "3.9 GB" - ], - [ - "7b-instruct-q5_0", - "4.7 GB" - ], - [ - "7b-instruct-q5_1", - "5.1 GB" - ], - [ - "7b-instruct-q5_K_M", - "4.8 GB" - ], - [ - "7b-instruct-q5_K_S", - "4.7 GB" - ], - [ - "7b-instruct-q6_K", - "5.5 GB" - ], - [ - "7b-instruct-q8_0", - "7.2 GB" + "3.8\u202fGB" ], [ "7b-python", - "3.8 GB" + "3.8\u202fGB" + ], + [ + "13b-code", + "7.4\u202fGB" + ], + [ + "13b-instruct", + "7.4\u202fGB" + ], + [ + "13b-python", + "7.4\u202fGB" + ], + [ + "34b-code", + "19\u202fGB" + ], + [ + "34b-instruct", + "19\u202fGB" + ], + [ + "34b-python", + "19\u202fGB" + ], + [ + "70b-code", + "39\u202fGB" + ], + [ + "70b-instruct", + "39\u202fGB" + ], + [ + "70b-python", + "39\u202fGB" + ], + [ + "7b-code-fp16", + "13\u202fGB" + ], + [ + "7b-code-q2_K", + "2.8\u202fGB" + ], + [ + "7b-code-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-code-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-code-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-code-q4_0", + "3.8\u202fGB" + ], + [ + "7b-code-q4_1", + "4.2\u202fGB" + ], + [ + "7b-code-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-code-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-code-q5_0", + "4.7\u202fGB" + ], + [ + "7b-code-q5_1", + "5.1\u202fGB" + ], + [ + "7b-code-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-code-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-code-q6_K", + "5.5\u202fGB" + ], + [ + "7b-code-q8_0", + "7.2\u202fGB" + ], + [ + "7b-instruct-fp16", + "13\u202fGB" + ], + [ + "7b-instruct-q2_K", + "2.8\u202fGB" + ], + [ + "7b-instruct-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-instruct-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-instruct-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-instruct-q4_0", + "3.8\u202fGB" + ], + [ + "7b-instruct-q4_1", + "4.2\u202fGB" + ], + [ + "7b-instruct-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-instruct-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-instruct-q5_0", + "4.7\u202fGB" + ], + [ + "7b-instruct-q5_1", + "5.1\u202fGB" + ], + [ + "7b-instruct-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-instruct-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-instruct-q6_K", + "5.5\u202fGB" + ], + [ + "7b-instruct-q8_0", + "7.2\u202fGB" ], [ "7b-python-fp16", - "13 GB" + "13\u202fGB" ], [ "7b-python-q2_K", - "2.8 GB" - ], - [ - "7b-python-q3_K_L", - "3.6 GB" - ], - [ - "7b-python-q3_K_M", - "3.3 GB" + "2.8\u202fGB" ], [ "7b-python-q3_K_S", - "2.9 GB" + "2.9\u202fGB" + ], + [ + "7b-python-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-python-q3_K_L", + "3.6\u202fGB" ], [ "7b-python-q4_0", - "3.8 GB" + "3.8\u202fGB" ], [ "7b-python-q4_1", - "4.2 GB" - ], - [ - "7b-python-q4_K_M", - "4.1 GB" + "4.2\u202fGB" ], [ "7b-python-q4_K_S", - "3.9 GB" + "3.9\u202fGB" + ], + [ + "7b-python-q4_K_M", + "4.1\u202fGB" ], [ "7b-python-q5_0", - "4.7 GB" + "4.7\u202fGB" ], [ "7b-python-q5_1", - "5.1 GB" - ], - [ - "7b-python-q5_K_M", - "4.8 GB" + "5.1\u202fGB" ], [ "7b-python-q5_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "7b-python-q5_K_M", + "4.8\u202fGB" ], [ "7b-python-q6_K", - "5.5 GB" + "5.5\u202fGB" ], [ "7b-python-q8_0", - "7.2 GB" + "7.2\u202fGB" + ], + [ + "13b-code-fp16", + "26\u202fGB" + ], + [ + "13b-code-q2_K", + "5.4\u202fGB" + ], + [ + "13b-code-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-code-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-code-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-code-q4_0", + "7.4\u202fGB" + ], + [ + "13b-code-q4_1", + "8.2\u202fGB" + ], + [ + "13b-code-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-code-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-code-q5_0", + "9.0\u202fGB" + ], + [ + "13b-code-q5_1", + "9.8\u202fGB" + ], + [ + "13b-code-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-code-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-code-q6_K", + "11\u202fGB" + ], + [ + "13b-code-q8_0", + "14\u202fGB" + ], + [ + "13b-instruct-fp16", + "26\u202fGB" + ], + [ + "13b-instruct-q2_K", + "5.4\u202fGB" + ], + [ + "13b-instruct-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-instruct-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-instruct-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-instruct-q4_0", + "7.4\u202fGB" + ], + [ + "13b-instruct-q4_1", + "8.2\u202fGB" + ], + [ + "13b-instruct-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-instruct-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-instruct-q5_0", + "9.0\u202fGB" + ], + [ + "13b-instruct-q5_1", + "9.8\u202fGB" + ], + [ + "13b-instruct-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-instruct-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-instruct-q6_K", + "11\u202fGB" + ], + [ + "13b-instruct-q8_0", + "14\u202fGB" + ], + [ + "13b-python-fp16", + "26\u202fGB" + ], + [ + "13b-python-q2_K", + "5.4\u202fGB" + ], + [ + "13b-python-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-python-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-python-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-python-q4_0", + "7.4\u202fGB" + ], + [ + "13b-python-q4_1", + "8.2\u202fGB" + ], + [ + "13b-python-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-python-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-python-q5_0", + "9.0\u202fGB" + ], + [ + "13b-python-q5_1", + "9.8\u202fGB" + ], + [ + "13b-python-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-python-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-python-q6_K", + "11\u202fGB" + ], + [ + "13b-python-q8_0", + "14\u202fGB" + ], + [ + "34b-code-q2_K", + "14\u202fGB" + ], + [ + "34b-code-q3_K_S", + "15\u202fGB" + ], + [ + "34b-code-q3_K_M", + "16\u202fGB" + ], + [ + "34b-code-q3_K_L", + "18\u202fGB" + ], + [ + "34b-code-q4_0", + "19\u202fGB" + ], + [ + "34b-code-q4_1", + "21\u202fGB" + ], + [ + "34b-code-q4_K_S", + "19\u202fGB" + ], + [ + "34b-code-q4_K_M", + "20\u202fGB" + ], + [ + "34b-code-q5_0", + "23\u202fGB" + ], + [ + "34b-code-q5_1", + "25\u202fGB" + ], + [ + "34b-code-q5_K_S", + "23\u202fGB" + ], + [ + "34b-code-q5_K_M", + "24\u202fGB" + ], + [ + "34b-code-q6_K", + "28\u202fGB" + ], + [ + "34b-code-q8_0", + "36\u202fGB" + ], + [ + "34b-instruct-fp16", + "67\u202fGB" + ], + [ + "34b-instruct-q2_K", + "14\u202fGB" + ], + [ + "34b-instruct-q3_K_S", + "15\u202fGB" + ], + [ + "34b-instruct-q3_K_M", + "16\u202fGB" + ], + [ + "34b-instruct-q3_K_L", + "18\u202fGB" + ], + [ + "34b-instruct-q4_0", + "19\u202fGB" + ], + [ + "34b-instruct-q4_1", + "21\u202fGB" + ], + [ + "34b-instruct-q4_K_S", + "19\u202fGB" + ], + [ + "34b-instruct-q4_K_M", + "20\u202fGB" + ], + [ + "34b-instruct-q5_0", + "23\u202fGB" + ], + [ + "34b-instruct-q5_1", + "25\u202fGB" + ], + [ + "34b-instruct-q5_K_S", + "23\u202fGB" + ], + [ + "34b-instruct-q5_K_M", + "24\u202fGB" + ], + [ + "34b-instruct-q6_K", + "28\u202fGB" + ], + [ + "34b-instruct-q8_0", + "36\u202fGB" + ], + [ + "34b-python-fp16", + "67\u202fGB" + ], + [ + "34b-python-q2_K", + "14\u202fGB" + ], + [ + "34b-python-q3_K_S", + "15\u202fGB" + ], + [ + "34b-python-q3_K_M", + "16\u202fGB" + ], + [ + "34b-python-q3_K_L", + "18\u202fGB" + ], + [ + "34b-python-q4_0", + "19\u202fGB" + ], + [ + "34b-python-q4_1", + "21\u202fGB" + ], + [ + "34b-python-q4_K_S", + "19\u202fGB" + ], + [ + "34b-python-q4_K_M", + "20\u202fGB" + ], + [ + "34b-python-q5_0", + "23\u202fGB" + ], + [ + "34b-python-q5_1", + "25\u202fGB" + ], + [ + "34b-python-q5_K_S", + "23\u202fGB" + ], + [ + "34b-python-q5_K_M", + "24\u202fGB" + ], + [ + "34b-python-q6_K", + "28\u202fGB" + ], + [ + "34b-python-q8_0", + "36\u202fGB" + ], + [ + "70b-code-fp16", + "138\u202fGB" + ], + [ + "70b-code-q2_K", + "25\u202fGB" + ], + [ + "70b-code-q3_K_S", + "30\u202fGB" + ], + [ + "70b-code-q3_K_M", + "33\u202fGB" + ], + [ + "70b-code-q3_K_L", + "36\u202fGB" + ], + [ + "70b-code-q4_0", + "39\u202fGB" + ], + [ + "70b-code-q4_1", + "43\u202fGB" + ], + [ + "70b-code-q4_K_S", + "39\u202fGB" + ], + [ + "70b-code-q4_K_M", + "41\u202fGB" + ], + [ + "70b-code-q5_0", + "47\u202fGB" + ], + [ + "70b-code-q5_1", + "52\u202fGB" + ], + [ + "70b-code-q5_K_S", + "47\u202fGB" + ], + [ + "70b-code-q5_K_M", + "49\u202fGB" + ], + [ + "70b-code-q6_K", + "57\u202fGB" + ], + [ + "70b-code-q8_0", + "73\u202fGB" + ], + [ + "70b-instruct-fp16", + "138\u202fGB" + ], + [ + "70b-instruct-q2_K", + "25\u202fGB" + ], + [ + "70b-instruct-q3_K_S", + "30\u202fGB" + ], + [ + "70b-instruct-q3_K_M", + "33\u202fGB" + ], + [ + "70b-instruct-q3_K_L", + "36\u202fGB" + ], + [ + "70b-instruct-q4_0", + "39\u202fGB" + ], + [ + "70b-instruct-q4_1", + "43\u202fGB" + ], + [ + "70b-instruct-q4_K_S", + "39\u202fGB" + ], + [ + "70b-instruct-q4_K_M", + "41\u202fGB" + ], + [ + "70b-instruct-q5_0", + "47\u202fGB" + ], + [ + "70b-instruct-q5_1", + "52\u202fGB" + ], + [ + "70b-instruct-q5_K_S", + "47\u202fGB" + ], + [ + "70b-instruct-q5_K_M", + "49\u202fGB" + ], + [ + "70b-instruct-q6_K", + "57\u202fGB" + ], + [ + "70b-instruct-q8_0", + "73\u202fGB" + ], + [ + "70b-python-fp16", + "138\u202fGB" + ], + [ + "70b-python-q2_K", + "25\u202fGB" + ], + [ + "70b-python-q3_K_S", + "30\u202fGB" + ], + [ + "70b-python-q3_K_M", + "33\u202fGB" + ], + [ + "70b-python-q3_K_L", + "36\u202fGB" + ], + [ + "70b-python-q4_0", + "39\u202fGB" + ], + [ + "70b-python-q4_1", + "43\u202fGB" + ], + [ + "70b-python-q4_K_S", + "39\u202fGB" + ], + [ + "70b-python-q4_K_M", + "41\u202fGB" + ], + [ + "70b-python-q5_0", + "47\u202fGB" + ], + [ + "70b-python-q5_1", + "52\u202fGB" + ], + [ + "70b-python-q5_K_S", + "47\u202fGB" + ], + [ + "70b-python-q5_K_M", + "49\u202fGB" + ], + [ + "70b-python-q6_K", + "57\u202fGB" + ], + [ + "70b-python-q8_0", + "73\u202fGB" ] ], "image": false, "author": "Meta" }, - "dolphin-mixtral": { - "url": "https://ollama.com/library/dolphin-mixtral", - "description": "Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of experts models that excels at coding tasks. Created by Eric Hartford.", - "tags": [ - [ - "latest", - "26 GB" - ], - [ - "8x22b", - "80 GB" - ], - [ - "8x7b", - "26 GB" - ], - [ - "v2.5", - "26 GB" - ], - [ - "v2.6", - "26 GB" - ], - [ - "v2.6.1", - "26 GB" - ], - [ - "v2.7", - "26 GB" - ], - [ - "8x7b-v2.5", - "26 GB" - ], - [ - "8x7b-v2.5-fp16", - "93 GB" - ], - [ - "8x7b-v2.5-q2_K", - "16 GB" - ], - [ - "8x7b-v2.5-q3_K_L", - "20 GB" - ], - [ - "8x7b-v2.5-q3_K_M", - "20 GB" - ], - [ - "8x7b-v2.5-q3_K_S", - "20 GB" - ], - [ - "8x7b-v2.5-q4_0", - "26 GB" - ], - [ - "8x7b-v2.5-q4_1", - "29 GB" - ], - [ - "8x7b-v2.5-q4_K_M", - "26 GB" - ], - [ - "8x7b-v2.5-q4_K_S", - "26 GB" - ], - [ - "8x7b-v2.5-q5_0", - "32 GB" - ], - [ - "8x7b-v2.5-q5_1", - "35 GB" - ], - [ - "8x7b-v2.5-q5_K_M", - "32 GB" - ], - [ - "8x7b-v2.5-q5_K_S", - "32 GB" - ], - [ - "8x7b-v2.5-q6_K", - "38 GB" - ], - [ - "8x7b-v2.5-q8_0", - "50 GB" - ], - [ - "8x7b-v2.6", - "26 GB" - ], - [ - "8x7b-v2.6-fp16", - "93 GB" - ], - [ - "8x7b-v2.6-q2_K", - "16 GB" - ], - [ - "8x7b-v2.6-q3_K_L", - "20 GB" - ], - [ - "8x7b-v2.6-q3_K_M", - "20 GB" - ], - [ - "8x7b-v2.6-q3_K_S", - "20 GB" - ], - [ - "8x7b-v2.6-q4_0", - "26 GB" - ], - [ - "8x7b-v2.6-q4_1", - "29 GB" - ], - [ - "8x7b-v2.6-q4_K_M", - "26 GB" - ], - [ - "8x7b-v2.6-q4_K_S", - "26 GB" - ], - [ - "8x7b-v2.6-q5_0", - "32 GB" - ], - [ - "8x7b-v2.6-q5_1", - "35 GB" - ], - [ - "8x7b-v2.6-q5_K_M", - "32 GB" - ], - [ - "8x7b-v2.6-q5_K_S", - "32 GB" - ], - [ - "8x7b-v2.6-q6_K", - "38 GB" - ], - [ - "8x7b-v2.6-q8_0", - "50 GB" - ], - [ - "8x7b-v2.6.1", - "26 GB" - ], - [ - "8x7b-v2.6.1-fp16", - "93 GB" - ], - [ - "8x7b-v2.6.1-q2_K", - "16 GB" - ], - [ - "8x7b-v2.6.1-q3_K_L", - "20 GB" - ], - [ - "8x7b-v2.6.1-q3_K_M", - "20 GB" - ], - [ - "8x7b-v2.6.1-q3_K_S", - "20 GB" - ], - [ - "8x7b-v2.6.1-q4_0", - "26 GB" - ], - [ - "8x7b-v2.6.1-q4_1", - "29 GB" - ], - [ - "8x7b-v2.6.1-q4_K_M", - "26 GB" - ], - [ - "8x7b-v2.6.1-q4_K_S", - "26 GB" - ], - [ - "8x7b-v2.6.1-q5_0", - "32 GB" - ], - [ - "8x7b-v2.6.1-q5_1", - "35 GB" - ], - [ - "8x7b-v2.6.1-q5_K_M", - "32 GB" - ], - [ - "8x7b-v2.6.1-q5_K_S", - "32 GB" - ], - [ - "8x7b-v2.6.1-q6_K", - "38 GB" - ], - [ - "8x7b-v2.6.1-q8_0", - "50 GB" - ], - [ - "8x7b-v2.7", - "26 GB" - ], - [ - "8x7b-v2.7-fp16", - "93 GB" - ], - [ - "8x7b-v2.7-q2_K", - "16 GB" - ], - [ - "8x7b-v2.7-q3_K_L", - "20 GB" - ], - [ - "8x7b-v2.7-q3_K_M", - "20 GB" - ], - [ - "8x7b-v2.7-q3_K_S", - "20 GB" - ], - [ - "8x7b-v2.7-q4_0", - "26 GB" - ], - [ - "8x7b-v2.7-q4_1", - "29 GB" - ], - [ - "8x7b-v2.7-q4_K_M", - "26 GB" - ], - [ - "8x7b-v2.7-q4_K_S", - "26 GB" - ], - [ - "8x7b-v2.7-q5_0", - "32 GB" - ], - [ - "8x7b-v2.7-q5_1", - "35 GB" - ], - [ - "8x7b-v2.7-q5_K_M", - "32 GB" - ], - [ - "8x7b-v2.7-q5_K_S", - "32 GB" - ], - [ - "8x7b-v2.7-q6_K", - "38 GB" - ], - [ - "8x7b-v2.7-q8_0", - "50 GB" - ], - [ - "8x22b-v2.9", - "80 GB" - ], - [ - "8x22b-v2.9-fp16", - "281 GB" - ], - [ - "8x22b-v2.9-q2_K", - "52 GB" - ], - [ - "8x22b-v2.9-q3_K_L", - "73 GB" - ], - [ - "8x22b-v2.9-q3_K_M", - "68 GB" - ], - [ - "8x22b-v2.9-q3_K_S", - "61 GB" - ], - [ - "8x22b-v2.9-q4_0", - "80 GB" - ], - [ - "8x22b-v2.9-q4_1", - "88 GB" - ], - [ - "8x22b-v2.9-q4_K_M", - "86 GB" - ], - [ - "8x22b-v2.9-q4_K_S", - "80 GB" - ], - [ - "8x22b-v2.9-q5_0", - "97 GB" - ], - [ - "8x22b-v2.9-q5_1", - "106 GB" - ], - [ - "8x22b-v2.9-q5_K_M", - "100 GB" - ], - [ - "8x22b-v2.9-q5_K_S", - "97 GB" - ], - [ - "8x22b-v2.9-q6_K", - "116 GB" - ], - [ - "8x22b-v2.9-q8_0", - "149 GB" - ] - ], - "image": false, - "author": "Eric Hartford" - }, "nomic-embed-text": { "url": "https://ollama.com/library/nomic-embed-text", "description": "A high-performing open embedding model with a large token context window.", "tags": [ [ "latest", - "274 MB" + "274\u202fMB" ], [ "v1.5", - "274 MB" + "274\u202fMB" ], [ "137m-v1.5-fp16", - "274 MB" + "274\u202fMB" ] ], "image": false, "author": "Nomic AI" }, - "llama2-uncensored": { - "url": "https://ollama.com/library/llama2-uncensored", - "description": "Uncensored Llama 2 model by George Sung and Jarrad Hope.", + "dolphin-mixtral": { + "url": "https://ollama.com/library/dolphin-mixtral", + "description": "Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of experts models that excels at coding tasks. Created by Eric Hartford.", "tags": [ [ "latest", - "3.8 GB" + "26\u202fGB" ], [ - "70b", - "39 GB" + "8x7b", + "26\u202fGB" ], [ - "7b", - "3.8 GB" + "8x22b", + "80\u202fGB" ], [ - "70b-chat", - "39 GB" + "v2.5", + "26\u202fGB" ], [ - "70b-chat-q2_K", - "29 GB" + "v2.6", + "26\u202fGB" ], [ - "70b-chat-q3_K_L", - "36 GB" + "v2.6.1", + "26\u202fGB" ], [ - "70b-chat-q3_K_M", - "33 GB" + "v2.7", + "26\u202fGB" ], [ - "70b-chat-q3_K_S", - "30 GB" + "8x7b-v2.5", + "26\u202fGB" ], [ - "70b-chat-q4_0", - "39 GB" + "8x7b-v2.6", + "26\u202fGB" ], [ - "70b-chat-q4_1", - "43 GB" + "8x7b-v2.6.1", + "26\u202fGB" ], [ - "70b-chat-q4_K_M", - "41 GB" + "8x7b-v2.7", + "26\u202fGB" ], [ - "70b-chat-q4_K_S", - "39 GB" + "8x22b-v2.9", + "80\u202fGB" ], [ - "70b-chat-q5_0", - "47 GB" + "8x7b-v2.5-fp16", + "93\u202fGB" ], [ - "70b-chat-q5_1", - "52 GB" + "8x7b-v2.5-q2_K", + "16\u202fGB" ], [ - "70b-chat-q5_K_M", - "49 GB" + "8x7b-v2.5-q3_K_S", + "20\u202fGB" ], [ - "70b-chat-q5_K_S", - "47 GB" + "8x7b-v2.5-q3_K_M", + "20\u202fGB" ], [ - "70b-chat-q6_K", - "57 GB" + "8x7b-v2.5-q3_K_L", + "20\u202fGB" ], [ - "70b-chat-q8_0", - "73 GB" + "8x7b-v2.5-q4_0", + "26\u202fGB" ], [ - "7b-chat", - "3.8 GB" + "8x7b-v2.5-q4_1", + "29\u202fGB" ], [ - "7b-chat-fp16", - "13 GB" + "8x7b-v2.5-q4_K_S", + "26\u202fGB" ], [ - "7b-chat-q2_K", - "2.8 GB" + "8x7b-v2.5-q4_K_M", + "26\u202fGB" ], [ - "7b-chat-q3_K_L", - "3.6 GB" + "8x7b-v2.5-q5_0", + "32\u202fGB" ], [ - "7b-chat-q3_K_M", - "3.3 GB" + "8x7b-v2.5-q5_1", + "35\u202fGB" ], [ - "7b-chat-q3_K_S", - "2.9 GB" + "8x7b-v2.5-q5_K_S", + "32\u202fGB" ], [ - "7b-chat-q4_0", - "3.8 GB" + "8x7b-v2.5-q5_K_M", + "32\u202fGB" ], [ - "7b-chat-q4_1", - "4.2 GB" + "8x7b-v2.5-q6_K", + "38\u202fGB" ], [ - "7b-chat-q4_K_M", - "4.1 GB" + "8x7b-v2.5-q8_0", + "50\u202fGB" ], [ - "7b-chat-q4_K_S", - "3.9 GB" + "8x7b-v2.6-fp16", + "93\u202fGB" ], [ - "7b-chat-q5_0", - "4.7 GB" + "8x7b-v2.6-q2_K", + "16\u202fGB" ], [ - "7b-chat-q5_1", - "5.1 GB" + "8x7b-v2.6-q3_K_S", + "20\u202fGB" ], [ - "7b-chat-q5_K_M", - "4.8 GB" + "8x7b-v2.6-q3_K_M", + "20\u202fGB" ], [ - "7b-chat-q5_K_S", - "4.7 GB" + "8x7b-v2.6-q3_K_L", + "20\u202fGB" ], [ - "7b-chat-q6_K", - "5.5 GB" + "8x7b-v2.6-q4_0", + "26\u202fGB" ], [ - "7b-chat-q8_0", - "7.2 GB" + "8x7b-v2.6-q4_1", + "29\u202fGB" + ], + [ + "8x7b-v2.6-q4_K_S", + "26\u202fGB" + ], + [ + "8x7b-v2.6-q4_K_M", + "26\u202fGB" + ], + [ + "8x7b-v2.6-q5_0", + "32\u202fGB" + ], + [ + "8x7b-v2.6-q5_1", + "35\u202fGB" + ], + [ + "8x7b-v2.6-q5_K_S", + "32\u202fGB" + ], + [ + "8x7b-v2.6-q5_K_M", + "32\u202fGB" + ], + [ + "8x7b-v2.6-q6_K", + "38\u202fGB" + ], + [ + "8x7b-v2.6-q8_0", + "50\u202fGB" + ], + [ + "8x7b-v2.6.1-fp16", + "93\u202fGB" + ], + [ + "8x7b-v2.6.1-q2_K", + "16\u202fGB" + ], + [ + "8x7b-v2.6.1-q3_K_S", + "20\u202fGB" + ], + [ + "8x7b-v2.6.1-q3_K_M", + "20\u202fGB" + ], + [ + "8x7b-v2.6.1-q3_K_L", + "20\u202fGB" + ], + [ + "8x7b-v2.6.1-q4_0", + "26\u202fGB" + ], + [ + "8x7b-v2.6.1-q4_1", + "29\u202fGB" + ], + [ + "8x7b-v2.6.1-q4_K_S", + "26\u202fGB" + ], + [ + "8x7b-v2.6.1-q4_K_M", + "26\u202fGB" + ], + [ + "8x7b-v2.6.1-q5_0", + "32\u202fGB" + ], + [ + "8x7b-v2.6.1-q5_1", + "35\u202fGB" + ], + [ + "8x7b-v2.6.1-q5_K_S", + "32\u202fGB" + ], + [ + "8x7b-v2.6.1-q5_K_M", + "32\u202fGB" + ], + [ + "8x7b-v2.6.1-q6_K", + "38\u202fGB" + ], + [ + "8x7b-v2.6.1-q8_0", + "50\u202fGB" + ], + [ + "8x7b-v2.7-fp16", + "93\u202fGB" + ], + [ + "8x7b-v2.7-q2_K", + "16\u202fGB" + ], + [ + "8x7b-v2.7-q3_K_S", + "20\u202fGB" + ], + [ + "8x7b-v2.7-q3_K_M", + "20\u202fGB" + ], + [ + "8x7b-v2.7-q3_K_L", + "20\u202fGB" + ], + [ + "8x7b-v2.7-q4_0", + "26\u202fGB" + ], + [ + "8x7b-v2.7-q4_1", + "29\u202fGB" + ], + [ + "8x7b-v2.7-q4_K_S", + "26\u202fGB" + ], + [ + "8x7b-v2.7-q4_K_M", + "26\u202fGB" + ], + [ + "8x7b-v2.7-q5_0", + "32\u202fGB" + ], + [ + "8x7b-v2.7-q5_1", + "35\u202fGB" + ], + [ + "8x7b-v2.7-q5_K_S", + "32\u202fGB" + ], + [ + "8x7b-v2.7-q5_K_M", + "32\u202fGB" + ], + [ + "8x7b-v2.7-q6_K", + "38\u202fGB" + ], + [ + "8x7b-v2.7-q8_0", + "50\u202fGB" + ], + [ + "8x22b-v2.9-fp16", + "281\u202fGB" + ], + [ + "8x22b-v2.9-q2_K", + "52\u202fGB" + ], + [ + "8x22b-v2.9-q3_K_S", + "61\u202fGB" + ], + [ + "8x22b-v2.9-q3_K_M", + "68\u202fGB" + ], + [ + "8x22b-v2.9-q3_K_L", + "73\u202fGB" + ], + [ + "8x22b-v2.9-q4_0", + "80\u202fGB" + ], + [ + "8x22b-v2.9-q4_1", + "88\u202fGB" + ], + [ + "8x22b-v2.9-q4_K_S", + "80\u202fGB" + ], + [ + "8x22b-v2.9-q4_K_M", + "86\u202fGB" + ], + [ + "8x22b-v2.9-q5_0", + "97\u202fGB" + ], + [ + "8x22b-v2.9-q5_1", + "106\u202fGB" + ], + [ + "8x22b-v2.9-q5_K_S", + "97\u202fGB" + ], + [ + "8x22b-v2.9-q5_K_M", + "100\u202fGB" + ], + [ + "8x22b-v2.9-q6_K", + "116\u202fGB" + ], + [ + "8x22b-v2.9-q8_0", + "149\u202fGB" ] ], "image": false, - "author": "George Sung, Jarrad Hope" + "author": "Eric Hartford" }, "phi": { "url": "https://ollama.com/library/phi", @@ -6913,1463 +7249,2075 @@ "tags": [ [ "latest", - "1.6 GB" + "1.6\u202fGB" ], [ "2.7b", - "1.6 GB" + "1.6\u202fGB" ], [ "chat", - "1.6 GB" + "1.6\u202fGB" ], [ "2.7b-chat-v2-fp16", - "5.6 GB" + "5.6\u202fGB" ], [ "2.7b-chat-v2-q2_K", - "1.2 GB" - ], - [ - "2.7b-chat-v2-q3_K_L", - "1.6 GB" - ], - [ - "2.7b-chat-v2-q3_K_M", - "1.5 GB" + "1.2\u202fGB" ], [ "2.7b-chat-v2-q3_K_S", - "1.3 GB" + "1.3\u202fGB" + ], + [ + "2.7b-chat-v2-q3_K_M", + "1.5\u202fGB" + ], + [ + "2.7b-chat-v2-q3_K_L", + "1.6\u202fGB" ], [ "2.7b-chat-v2-q4_0", - "1.6 GB" + "1.6\u202fGB" ], [ "2.7b-chat-v2-q4_1", - "1.8 GB" - ], - [ - "2.7b-chat-v2-q4_K_M", - "1.8 GB" + "1.8\u202fGB" ], [ "2.7b-chat-v2-q4_K_S", - "1.6 GB" + "1.6\u202fGB" + ], + [ + "2.7b-chat-v2-q4_K_M", + "1.8\u202fGB" ], [ "2.7b-chat-v2-q5_0", - "1.9 GB" + "1.9\u202fGB" ], [ "2.7b-chat-v2-q5_1", - "2.1 GB" - ], - [ - "2.7b-chat-v2-q5_K_M", - "2.1 GB" + "2.1\u202fGB" ], [ "2.7b-chat-v2-q5_K_S", - "1.9 GB" + "1.9\u202fGB" + ], + [ + "2.7b-chat-v2-q5_K_M", + "2.1\u202fGB" ], [ "2.7b-chat-v2-q6_K", - "2.3 GB" + "2.3\u202fGB" ], [ "2.7b-chat-v2-q8_0", - "3.0 GB" + "3.0\u202fGB" ] ], "image": false, "author": "Microsoft" }, + "llama2-uncensored": { + "url": "https://ollama.com/library/llama2-uncensored", + "description": "Uncensored Llama 2 model by George Sung and Jarrad Hope.", + "tags": [ + [ + "latest", + "3.8\u202fGB" + ], + [ + "7b", + "3.8\u202fGB" + ], + [ + "70b", + "39\u202fGB" + ], + [ + "7b-chat", + "3.8\u202fGB" + ], + [ + "70b-chat", + "39\u202fGB" + ], + [ + "7b-chat-fp16", + "13\u202fGB" + ], + [ + "7b-chat-q2_K", + "2.8\u202fGB" + ], + [ + "7b-chat-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-chat-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-chat-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-chat-q4_0", + "3.8\u202fGB" + ], + [ + "7b-chat-q4_1", + "4.2\u202fGB" + ], + [ + "7b-chat-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-chat-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-chat-q5_0", + "4.7\u202fGB" + ], + [ + "7b-chat-q5_1", + "5.1\u202fGB" + ], + [ + "7b-chat-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-chat-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-chat-q6_K", + "5.5\u202fGB" + ], + [ + "7b-chat-q8_0", + "7.2\u202fGB" + ], + [ + "70b-chat-q2_K", + "29\u202fGB" + ], + [ + "70b-chat-q3_K_S", + "30\u202fGB" + ], + [ + "70b-chat-q3_K_M", + "33\u202fGB" + ], + [ + "70b-chat-q3_K_L", + "36\u202fGB" + ], + [ + "70b-chat-q4_0", + "39\u202fGB" + ], + [ + "70b-chat-q4_1", + "43\u202fGB" + ], + [ + "70b-chat-q4_K_S", + "39\u202fGB" + ], + [ + "70b-chat-q4_K_M", + "41\u202fGB" + ], + [ + "70b-chat-q5_0", + "47\u202fGB" + ], + [ + "70b-chat-q5_1", + "52\u202fGB" + ], + [ + "70b-chat-q5_K_S", + "47\u202fGB" + ], + [ + "70b-chat-q5_K_M", + "49\u202fGB" + ], + [ + "70b-chat-q6_K", + "57\u202fGB" + ], + [ + "70b-chat-q8_0", + "73\u202fGB" + ] + ], + "image": false, + "author": "George Sung, Jarrad Hope" + }, "deepseek-coder": { "url": "https://ollama.com/library/deepseek-coder", "description": "DeepSeek Coder is a capable coding model trained on two trillion code and natural language tokens.", "tags": [ [ "latest", - "776 MB" - ], - [ - "33b", - "19 GB" - ], - [ - "6.7b", - "3.8 GB" + "776\u202fMB" ], [ "1.3b", - "776 MB" + "776\u202fMB" + ], + [ + "6.7b", + "3.8\u202fGB" + ], + [ + "33b", + "19\u202fGB" ], [ "base", - "776 MB" + "776\u202fMB" ], [ "instruct", - "776 MB" - ], - [ - "33b-base", - "19 GB" - ], - [ - "33b-base-fp16", - "67 GB" - ], - [ - "33b-base-q2_K", - "14 GB" - ], - [ - "33b-base-q3_K_L", - "18 GB" - ], - [ - "33b-base-q3_K_M", - "16 GB" - ], - [ - "33b-base-q3_K_S", - "14 GB" - ], - [ - "33b-base-q4_0", - "19 GB" - ], - [ - "33b-base-q4_1", - "21 GB" - ], - [ - "33b-base-q4_K_M", - "20 GB" - ], - [ - "33b-base-q4_K_S", - "19 GB" - ], - [ - "33b-base-q5_0", - "23 GB" - ], - [ - "33b-base-q5_1", - "25 GB" - ], - [ - "33b-base-q5_K_M", - "24 GB" - ], - [ - "33b-base-q5_K_S", - "23 GB" - ], - [ - "33b-base-q6_K", - "27 GB" - ], - [ - "33b-base-q8_0", - "35 GB" - ], - [ - "33b-instruct", - "19 GB" - ], - [ - "33b-instruct-fp16", - "67 GB" - ], - [ - "33b-instruct-q2_K", - "14 GB" - ], - [ - "33b-instruct-q3_K_L", - "18 GB" - ], - [ - "33b-instruct-q3_K_M", - "16 GB" - ], - [ - "33b-instruct-q3_K_S", - "14 GB" - ], - [ - "33b-instruct-q4_0", - "19 GB" - ], - [ - "33b-instruct-q4_1", - "21 GB" - ], - [ - "33b-instruct-q4_K_M", - "20 GB" - ], - [ - "33b-instruct-q4_K_S", - "19 GB" - ], - [ - "33b-instruct-q5_0", - "23 GB" - ], - [ - "33b-instruct-q5_1", - "25 GB" - ], - [ - "33b-instruct-q5_K_M", - "24 GB" - ], - [ - "33b-instruct-q5_K_S", - "23 GB" - ], - [ - "33b-instruct-q6_K", - "27 GB" - ], - [ - "33b-instruct-q8_0", - "35 GB" - ], - [ - "6.7b-base", - "3.8 GB" - ], - [ - "6.7b-base-fp16", - "13 GB" - ], - [ - "6.7b-base-q2_K", - "2.8 GB" - ], - [ - "6.7b-base-q3_K_L", - "3.6 GB" - ], - [ - "6.7b-base-q3_K_M", - "3.3 GB" - ], - [ - "6.7b-base-q3_K_S", - "3.0 GB" - ], - [ - "6.7b-base-q4_0", - "3.8 GB" - ], - [ - "6.7b-base-q4_1", - "4.2 GB" - ], - [ - "6.7b-base-q4_K_M", - "4.1 GB" - ], - [ - "6.7b-base-q4_K_S", - "3.9 GB" - ], - [ - "6.7b-base-q5_0", - "4.7 GB" - ], - [ - "6.7b-base-q5_1", - "5.1 GB" - ], - [ - "6.7b-base-q5_K_M", - "4.8 GB" - ], - [ - "6.7b-base-q5_K_S", - "4.7 GB" - ], - [ - "6.7b-base-q6_K", - "5.5 GB" - ], - [ - "6.7b-base-q8_0", - "7.2 GB" - ], - [ - "6.7b-instruct", - "3.8 GB" - ], - [ - "6.7b-instruct-fp16", - "13 GB" - ], - [ - "6.7b-instruct-q2_K", - "2.8 GB" - ], - [ - "6.7b-instruct-q3_K_L", - "3.6 GB" - ], - [ - "6.7b-instruct-q3_K_M", - "3.3 GB" - ], - [ - "6.7b-instruct-q3_K_S", - "3.0 GB" - ], - [ - "6.7b-instruct-q4_0", - "3.8 GB" - ], - [ - "6.7b-instruct-q4_1", - "4.2 GB" - ], - [ - "6.7b-instruct-q4_K_M", - "4.1 GB" - ], - [ - "6.7b-instruct-q4_K_S", - "3.9 GB" - ], - [ - "6.7b-instruct-q5_0", - "4.7 GB" - ], - [ - "6.7b-instruct-q5_1", - "5.1 GB" - ], - [ - "6.7b-instruct-q5_K_M", - "4.8 GB" - ], - [ - "6.7b-instruct-q5_K_S", - "4.7 GB" - ], - [ - "6.7b-instruct-q6_K", - "5.5 GB" - ], - [ - "6.7b-instruct-q8_0", - "7.2 GB" + "776\u202fMB" ], [ "1.3b-base", - "776 MB" - ], - [ - "1.3b-base-fp16", - "2.7 GB" - ], - [ - "1.3b-base-q2_K", - "632 MB" - ], - [ - "1.3b-base-q3_K_L", - "745 MB" - ], - [ - "1.3b-base-q3_K_M", - "705 MB" - ], - [ - "1.3b-base-q3_K_S", - "659 MB" - ], - [ - "1.3b-base-q4_0", - "776 MB" - ], - [ - "1.3b-base-q4_1", - "856 MB" - ], - [ - "1.3b-base-q4_K_M", - "874 MB" - ], - [ - "1.3b-base-q4_K_S", - "815 MB" - ], - [ - "1.3b-base-q5_0", - "936 MB" - ], - [ - "1.3b-base-q5_1", - "1.0 GB" - ], - [ - "1.3b-base-q5_K_M", - "1.0 GB" - ], - [ - "1.3b-base-q5_K_S", - "953 MB" - ], - [ - "1.3b-base-q6_K", - "1.2 GB" - ], - [ - "1.3b-base-q8_0", - "1.4 GB" + "776\u202fMB" ], [ "1.3b-instruct", - "776 MB" + "776\u202fMB" + ], + [ + "6.7b-base", + "3.8\u202fGB" + ], + [ + "6.7b-instruct", + "3.8\u202fGB" + ], + [ + "33b-base", + "19\u202fGB" + ], + [ + "33b-instruct", + "19\u202fGB" + ], + [ + "1.3b-base-fp16", + "2.7\u202fGB" + ], + [ + "1.3b-base-q2_K", + "632\u202fMB" + ], + [ + "1.3b-base-q3_K_S", + "659\u202fMB" + ], + [ + "1.3b-base-q3_K_M", + "705\u202fMB" + ], + [ + "1.3b-base-q3_K_L", + "745\u202fMB" + ], + [ + "1.3b-base-q4_0", + "776\u202fMB" + ], + [ + "1.3b-base-q4_1", + "856\u202fMB" + ], + [ + "1.3b-base-q4_K_S", + "815\u202fMB" + ], + [ + "1.3b-base-q4_K_M", + "874\u202fMB" + ], + [ + "1.3b-base-q5_0", + "936\u202fMB" + ], + [ + "1.3b-base-q5_1", + "1.0\u202fGB" + ], + [ + "1.3b-base-q5_K_S", + "953\u202fMB" + ], + [ + "1.3b-base-q5_K_M", + "1.0\u202fGB" + ], + [ + "1.3b-base-q6_K", + "1.2\u202fGB" + ], + [ + "1.3b-base-q8_0", + "1.4\u202fGB" ], [ "1.3b-instruct-fp16", - "2.7 GB" + "2.7\u202fGB" ], [ "1.3b-instruct-q2_K", - "632 MB" - ], - [ - "1.3b-instruct-q3_K_L", - "745 MB" - ], - [ - "1.3b-instruct-q3_K_M", - "705 MB" + "632\u202fMB" ], [ "1.3b-instruct-q3_K_S", - "659 MB" + "659\u202fMB" + ], + [ + "1.3b-instruct-q3_K_M", + "705\u202fMB" + ], + [ + "1.3b-instruct-q3_K_L", + "745\u202fMB" ], [ "1.3b-instruct-q4_0", - "776 MB" + "776\u202fMB" ], [ "1.3b-instruct-q4_1", - "856 MB" - ], - [ - "1.3b-instruct-q4_K_M", - "874 MB" + "856\u202fMB" ], [ "1.3b-instruct-q4_K_S", - "815 MB" + "815\u202fMB" + ], + [ + "1.3b-instruct-q4_K_M", + "874\u202fMB" ], [ "1.3b-instruct-q5_0", - "936 MB" + "936\u202fMB" ], [ "1.3b-instruct-q5_1", - "1.0 GB" - ], - [ - "1.3b-instruct-q5_K_M", - "1.0 GB" + "1.0\u202fGB" ], [ "1.3b-instruct-q5_K_S", - "953 MB" + "953\u202fMB" + ], + [ + "1.3b-instruct-q5_K_M", + "1.0\u202fGB" ], [ "1.3b-instruct-q6_K", - "1.2 GB" + "1.2\u202fGB" ], [ "1.3b-instruct-q8_0", - "1.4 GB" + "1.4\u202fGB" + ], + [ + "6.7b-base-fp16", + "13\u202fGB" + ], + [ + "6.7b-base-q2_K", + "2.8\u202fGB" + ], + [ + "6.7b-base-q3_K_S", + "3.0\u202fGB" + ], + [ + "6.7b-base-q3_K_M", + "3.3\u202fGB" + ], + [ + "6.7b-base-q3_K_L", + "3.6\u202fGB" + ], + [ + "6.7b-base-q4_0", + "3.8\u202fGB" + ], + [ + "6.7b-base-q4_1", + "4.2\u202fGB" + ], + [ + "6.7b-base-q4_K_S", + "3.9\u202fGB" + ], + [ + "6.7b-base-q4_K_M", + "4.1\u202fGB" + ], + [ + "6.7b-base-q5_0", + "4.7\u202fGB" + ], + [ + "6.7b-base-q5_1", + "5.1\u202fGB" + ], + [ + "6.7b-base-q5_K_S", + "4.7\u202fGB" + ], + [ + "6.7b-base-q5_K_M", + "4.8\u202fGB" + ], + [ + "6.7b-base-q6_K", + "5.5\u202fGB" + ], + [ + "6.7b-base-q8_0", + "7.2\u202fGB" + ], + [ + "6.7b-instruct-fp16", + "13\u202fGB" + ], + [ + "6.7b-instruct-q2_K", + "2.8\u202fGB" + ], + [ + "6.7b-instruct-q3_K_S", + "3.0\u202fGB" + ], + [ + "6.7b-instruct-q3_K_M", + "3.3\u202fGB" + ], + [ + "6.7b-instruct-q3_K_L", + "3.6\u202fGB" + ], + [ + "6.7b-instruct-q4_0", + "3.8\u202fGB" + ], + [ + "6.7b-instruct-q4_1", + "4.2\u202fGB" + ], + [ + "6.7b-instruct-q4_K_S", + "3.9\u202fGB" + ], + [ + "6.7b-instruct-q4_K_M", + "4.1\u202fGB" + ], + [ + "6.7b-instruct-q5_0", + "4.7\u202fGB" + ], + [ + "6.7b-instruct-q5_1", + "5.1\u202fGB" + ], + [ + "6.7b-instruct-q5_K_S", + "4.7\u202fGB" + ], + [ + "6.7b-instruct-q5_K_M", + "4.8\u202fGB" + ], + [ + "6.7b-instruct-q6_K", + "5.5\u202fGB" + ], + [ + "6.7b-instruct-q8_0", + "7.2\u202fGB" + ], + [ + "33b-base-fp16", + "67\u202fGB" + ], + [ + "33b-base-q2_K", + "14\u202fGB" + ], + [ + "33b-base-q3_K_S", + "14\u202fGB" + ], + [ + "33b-base-q3_K_M", + "16\u202fGB" + ], + [ + "33b-base-q3_K_L", + "18\u202fGB" + ], + [ + "33b-base-q4_0", + "19\u202fGB" + ], + [ + "33b-base-q4_1", + "21\u202fGB" + ], + [ + "33b-base-q4_K_S", + "19\u202fGB" + ], + [ + "33b-base-q4_K_M", + "20\u202fGB" + ], + [ + "33b-base-q5_0", + "23\u202fGB" + ], + [ + "33b-base-q5_1", + "25\u202fGB" + ], + [ + "33b-base-q5_K_S", + "23\u202fGB" + ], + [ + "33b-base-q5_K_M", + "24\u202fGB" + ], + [ + "33b-base-q6_K", + "27\u202fGB" + ], + [ + "33b-base-q8_0", + "35\u202fGB" + ], + [ + "33b-instruct-fp16", + "67\u202fGB" + ], + [ + "33b-instruct-q2_K", + "14\u202fGB" + ], + [ + "33b-instruct-q3_K_S", + "14\u202fGB" + ], + [ + "33b-instruct-q3_K_M", + "16\u202fGB" + ], + [ + "33b-instruct-q3_K_L", + "18\u202fGB" + ], + [ + "33b-instruct-q4_0", + "19\u202fGB" + ], + [ + "33b-instruct-q4_1", + "21\u202fGB" + ], + [ + "33b-instruct-q4_K_S", + "19\u202fGB" + ], + [ + "33b-instruct-q4_K_M", + "20\u202fGB" + ], + [ + "33b-instruct-q5_0", + "23\u202fGB" + ], + [ + "33b-instruct-q5_1", + "25\u202fGB" + ], + [ + "33b-instruct-q5_K_S", + "23\u202fGB" + ], + [ + "33b-instruct-q5_K_M", + "24\u202fGB" + ], + [ + "33b-instruct-q6_K", + "27\u202fGB" + ], + [ + "33b-instruct-q8_0", + "35\u202fGB" ] ], "image": false, "author": "DeepSeek Team" }, + "mxbai-embed-large": { + "url": "https://ollama.com/library/mxbai-embed-large", + "description": "State-of-the-art large embedding model from mixedbread.ai", + "tags": [ + [ + "latest", + "670\u202fMB" + ], + [ + "335m", + "670\u202fMB" + ], + [ + "v1", + "670\u202fMB" + ], + [ + "335m-v1-fp16", + "670\u202fMB" + ] + ], + "image": false, + "author": "Mixedbread.ai" + }, + "zephyr": { + "url": "https://ollama.com/library/zephyr", + "description": "Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models that are trained to act as helpful assistants.", + "tags": [ + [ + "latest", + "4.1\u202fGB" + ], + [ + "7b", + "4.1\u202fGB" + ], + [ + "141b", + "80\u202fGB" + ], + [ + "7b-alpha", + "4.1\u202fGB" + ], + [ + "7b-beta", + "4.1\u202fGB" + ], + [ + "141b-v0.1", + "80\u202fGB" + ], + [ + "7b-alpha-fp16", + "14\u202fGB" + ], + [ + "7b-alpha-q2_K", + "3.1\u202fGB" + ], + [ + "7b-alpha-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-alpha-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-alpha-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-alpha-q4_0", + "4.1\u202fGB" + ], + [ + "7b-alpha-q4_1", + "4.6\u202fGB" + ], + [ + "7b-alpha-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-alpha-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-alpha-q5_0", + "5.0\u202fGB" + ], + [ + "7b-alpha-q5_1", + "5.4\u202fGB" + ], + [ + "7b-alpha-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-alpha-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-alpha-q6_K", + "5.9\u202fGB" + ], + [ + "7b-alpha-q8_0", + "7.7\u202fGB" + ], + [ + "7b-beta-fp16", + "14\u202fGB" + ], + [ + "7b-beta-q2_K", + "3.1\u202fGB" + ], + [ + "7b-beta-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-beta-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-beta-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-beta-q4_0", + "4.1\u202fGB" + ], + [ + "7b-beta-q4_1", + "4.6\u202fGB" + ], + [ + "7b-beta-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-beta-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-beta-q5_0", + "5.0\u202fGB" + ], + [ + "7b-beta-q5_1", + "5.4\u202fGB" + ], + [ + "7b-beta-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-beta-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-beta-q6_K", + "5.9\u202fGB" + ], + [ + "7b-beta-q8_0", + "7.7\u202fGB" + ], + [ + "141b-v0.1-fp16", + "281\u202fGB" + ], + [ + "141b-v0.1-q2_K", + "52\u202fGB" + ], + [ + "141b-v0.1-q4_0", + "80\u202fGB" + ], + [ + "141b-v0.1-q8_0", + "149\u202fGB" + ] + ], + "image": false, + "author": "Hugging Face H4" + }, "dolphin-mistral": { "url": "https://ollama.com/library/dolphin-mistral", "description": "The uncensored Dolphin model based on Mistral that excels at coding tasks. Updated to version 2.8.", "tags": [ [ "latest", - "4.1 GB" + "4.1\u202fGB" ], [ "7b", - "4.1 GB" + "4.1\u202fGB" ], [ "v2", - "4.1 GB" + "4.1\u202fGB" ], [ "v2.1", - "4.1 GB" + "4.1\u202fGB" ], [ "v2.2", - "4.1 GB" + "4.1\u202fGB" ], [ "v2.2.1", - "4.1 GB" + "4.1\u202fGB" ], [ "v2.6", - "4.1 GB" + "4.1\u202fGB" ], [ "v2.8", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v2", - "4.1 GB" - ], - [ - "7b-v2-fp16", - "14 GB" - ], - [ - "7b-v2-q2_K", - "3.1 GB" - ], - [ - "7b-v2-q3_K_L", - "3.8 GB" - ], - [ - "7b-v2-q3_K_M", - "3.5 GB" - ], - [ - "7b-v2-q3_K_S", - "3.2 GB" - ], - [ - "7b-v2-q4_0", - "4.1 GB" - ], - [ - "7b-v2-q4_1", - "4.6 GB" - ], - [ - "7b-v2-q4_K_M", - "4.4 GB" - ], - [ - "7b-v2-q4_K_S", - "4.1 GB" - ], - [ - "7b-v2-q5_0", - "5.0 GB" - ], - [ - "7b-v2-q5_1", - "5.4 GB" - ], - [ - "7b-v2-q5_K_M", - "5.1 GB" - ], - [ - "7b-v2-q5_K_S", - "5.0 GB" - ], - [ - "7b-v2-q6_K", - "5.9 GB" - ], - [ - "7b-v2-q8_0", - "7.7 GB" + "4.1\u202fGB" ], [ "7b-v2.1", - "4.1 GB" - ], - [ - "7b-v2.1-fp16", - "14 GB" - ], - [ - "7b-v2.1-q2_K", - "3.1 GB" - ], - [ - "7b-v2.1-q3_K_L", - "3.8 GB" - ], - [ - "7b-v2.1-q3_K_M", - "3.5 GB" - ], - [ - "7b-v2.1-q3_K_S", - "3.2 GB" - ], - [ - "7b-v2.1-q4_0", - "4.1 GB" - ], - [ - "7b-v2.1-q4_1", - "4.6 GB" - ], - [ - "7b-v2.1-q4_K_M", - "4.4 GB" - ], - [ - "7b-v2.1-q4_K_S", - "4.1 GB" - ], - [ - "7b-v2.1-q5_0", - "5.0 GB" - ], - [ - "7b-v2.1-q5_1", - "5.4 GB" - ], - [ - "7b-v2.1-q5_K_M", - "5.1 GB" - ], - [ - "7b-v2.1-q5_K_S", - "5.0 GB" - ], - [ - "7b-v2.1-q6_K", - "5.9 GB" - ], - [ - "7b-v2.1-q8_0", - "7.7 GB" + "4.1\u202fGB" ], [ "7b-v2.2", - "4.1 GB" - ], - [ - "7b-v2.2-fp16", - "14 GB" - ], - [ - "7b-v2.2-q2_K", - "3.1 GB" - ], - [ - "7b-v2.2-q3_K_L", - "3.8 GB" - ], - [ - "7b-v2.2-q3_K_M", - "3.5 GB" - ], - [ - "7b-v2.2-q3_K_S", - "3.2 GB" - ], - [ - "7b-v2.2-q4_0", - "4.1 GB" - ], - [ - "7b-v2.2-q4_1", - "4.6 GB" - ], - [ - "7b-v2.2-q4_K_M", - "4.4 GB" - ], - [ - "7b-v2.2-q4_K_S", - "4.1 GB" - ], - [ - "7b-v2.2-q5_0", - "5.0 GB" - ], - [ - "7b-v2.2-q5_1", - "5.4 GB" - ], - [ - "7b-v2.2-q5_K_M", - "5.1 GB" - ], - [ - "7b-v2.2-q5_K_S", - "5.0 GB" - ], - [ - "7b-v2.2-q6_K", - "5.9 GB" - ], - [ - "7b-v2.2-q8_0", - "7.7 GB" + "4.1\u202fGB" ], [ "7b-v2.2.1", - "4.1 GB" - ], - [ - "7b-v2.2.1-fp16", - "14 GB" - ], - [ - "7b-v2.2.1-q2_K", - "3.1 GB" - ], - [ - "7b-v2.2.1-q3_K_L", - "3.8 GB" - ], - [ - "7b-v2.2.1-q3_K_M", - "3.5 GB" - ], - [ - "7b-v2.2.1-q3_K_S", - "3.2 GB" - ], - [ - "7b-v2.2.1-q4_0", - "4.1 GB" - ], - [ - "7b-v2.2.1-q4_1", - "4.6 GB" - ], - [ - "7b-v2.2.1-q4_K_M", - "4.4 GB" - ], - [ - "7b-v2.2.1-q4_K_S", - "4.1 GB" - ], - [ - "7b-v2.2.1-q5_0", - "5.0 GB" - ], - [ - "7b-v2.2.1-q5_1", - "5.4 GB" - ], - [ - "7b-v2.2.1-q5_K_M", - "5.1 GB" - ], - [ - "7b-v2.2.1-q5_K_S", - "5.0 GB" - ], - [ - "7b-v2.2.1-q6_K", - "5.9 GB" - ], - [ - "7b-v2.2.1-q8_0", - "7.7 GB" - ], - [ - "7b-v2.6-dpo-laser", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v2.6", - "4.1 GB" + "4.1\u202fGB" ], [ - "7b-v2.6-fp16", - "14 GB" - ], - [ - "7b-v2.6-q2_K", - "3.1 GB" - ], - [ - "7b-v2.6-q3_K_L", - "3.8 GB" - ], - [ - "7b-v2.6-q3_K_M", - "3.5 GB" - ], - [ - "7b-v2.6-q3_K_S", - "3.2 GB" - ], - [ - "7b-v2.6-q4_0", - "4.1 GB" - ], - [ - "7b-v2.6-q4_1", - "4.6 GB" - ], - [ - "7b-v2.6-dpo-laser-q8_0", - "7.7 GB" - ], - [ - "7b-v2.6-dpo-laser-q4_1", - "4.6 GB" - ], - [ - "7b-v2.6-dpo-laser-q5_K_S", - "5.0 GB" - ], - [ - "7b-v2.6-q4_K_M", - "4.4 GB" - ], - [ - "7b-v2.6-q5_0", - "5.0 GB" - ], - [ - "7b-v2.6-q5_K_M", - "5.1 GB" - ], - [ - "7b-v2.6-dpo-laser-q5_0", - "5.0 GB" - ], - [ - "7b-v2.6-dpo-laser-q4_K_M", - "4.4 GB" - ], - [ - "7b-v2.6-dpo-laser-q3_K_S", - "3.2 GB" - ], - [ - "7b-v2.6-q5_K_S", - "5.0 GB" - ], - [ - "7b-v2.6-dpo-laser-fp16", - "14 GB" - ], - [ - "7b-v2.6-dpo-laser-q3_K_M", - "3.5 GB" - ], - [ - "7b-v2.6-dpo-laser-q6_K", - "5.9 GB" - ], - [ - "7b-v2.6-q8_0", - "7.7 GB" - ], - [ - "7b-v2.6-dpo-laser-q3_K_L", - "3.8 GB" - ], - [ - "7b-v2.6-dpo-laser-q4_K_S", - "4.1 GB" - ], - [ - "7b-v2.6-q4_K_S", - "4.1 GB" - ], - [ - "7b-v2.6-q6_K", - "5.9 GB" - ], - [ - "7b-v2.6-dpo-laser-q2_K", - "3.1 GB" - ], - [ - "7b-v2.6-dpo-laser-q5_1", - "5.4 GB" - ], - [ - "7b-v2.6-dpo-laser-q5_K_M", - "5.1 GB" - ], - [ - "7b-v2.6-q5_1", - "5.4 GB" - ], - [ - "7b-v2.6-dpo-laser-q4_0", - "4.1 GB" + "7b-v2.6-dpo-laser", + "4.1\u202fGB" ], [ "7b-v2.8", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-v2-fp16", + "14\u202fGB" + ], + [ + "7b-v2-q2_K", + "3.1\u202fGB" + ], + [ + "7b-v2-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-v2-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v2-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-v2-q4_0", + "4.1\u202fGB" + ], + [ + "7b-v2-q4_1", + "4.6\u202fGB" + ], + [ + "7b-v2-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-v2-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-v2-q5_0", + "5.0\u202fGB" + ], + [ + "7b-v2-q5_1", + "5.4\u202fGB" + ], + [ + "7b-v2-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-v2-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-v2-q6_K", + "5.9\u202fGB" + ], + [ + "7b-v2-q8_0", + "7.7\u202fGB" + ], + [ + "7b-v2.1-fp16", + "14\u202fGB" + ], + [ + "7b-v2.1-q2_K", + "3.1\u202fGB" + ], + [ + "7b-v2.1-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-v2.1-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v2.1-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-v2.1-q4_0", + "4.1\u202fGB" + ], + [ + "7b-v2.1-q4_1", + "4.6\u202fGB" + ], + [ + "7b-v2.1-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-v2.1-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-v2.1-q5_0", + "5.0\u202fGB" + ], + [ + "7b-v2.1-q5_1", + "5.4\u202fGB" + ], + [ + "7b-v2.1-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-v2.1-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-v2.1-q6_K", + "5.9\u202fGB" + ], + [ + "7b-v2.1-q8_0", + "7.7\u202fGB" + ], + [ + "7b-v2.2-fp16", + "14\u202fGB" + ], + [ + "7b-v2.2-q2_K", + "3.1\u202fGB" + ], + [ + "7b-v2.2-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-v2.2-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v2.2-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-v2.2-q4_0", + "4.1\u202fGB" + ], + [ + "7b-v2.2-q4_1", + "4.6\u202fGB" + ], + [ + "7b-v2.2-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-v2.2-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-v2.2-q5_0", + "5.0\u202fGB" + ], + [ + "7b-v2.2-q5_1", + "5.4\u202fGB" + ], + [ + "7b-v2.2-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-v2.2-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-v2.2-q6_K", + "5.9\u202fGB" + ], + [ + "7b-v2.2-q8_0", + "7.7\u202fGB" + ], + [ + "7b-v2.2.1-fp16", + "14\u202fGB" + ], + [ + "7b-v2.2.1-q2_K", + "3.1\u202fGB" + ], + [ + "7b-v2.2.1-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-v2.2.1-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v2.2.1-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-v2.2.1-q4_0", + "4.1\u202fGB" + ], + [ + "7b-v2.2.1-q4_1", + "4.6\u202fGB" + ], + [ + "7b-v2.2.1-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-v2.2.1-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-v2.2.1-q5_0", + "5.0\u202fGB" + ], + [ + "7b-v2.2.1-q5_1", + "5.4\u202fGB" + ], + [ + "7b-v2.2.1-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-v2.2.1-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-v2.2.1-q6_K", + "5.9\u202fGB" + ], + [ + "7b-v2.2.1-q8_0", + "7.7\u202fGB" + ], + [ + "7b-v2.6-fp16", + "14\u202fGB" + ], + [ + "7b-v2.6-q2_K", + "3.1\u202fGB" + ], + [ + "7b-v2.6-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-v2.6-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v2.6-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-v2.6-q4_0", + "4.1\u202fGB" + ], + [ + "7b-v2.6-q4_1", + "4.6\u202fGB" + ], + [ + "7b-v2.6-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-v2.6-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-v2.6-q5_0", + "5.0\u202fGB" + ], + [ + "7b-v2.6-q5_1", + "5.4\u202fGB" + ], + [ + "7b-v2.6-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-v2.6-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-v2.6-q6_K", + "5.9\u202fGB" + ], + [ + "7b-v2.6-q8_0", + "7.7\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-fp16", + "14\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q2_K", + "3.1\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q4_0", + "4.1\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q4_1", + "4.6\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q5_0", + "5.0\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q5_1", + "5.4\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q6_K", + "5.9\u202fGB" + ], + [ + "7b-v2.6-dpo-laser-q8_0", + "7.7\u202fGB" ], [ "7b-v2.8-fp16", - "14 GB" + "14\u202fGB" ], [ "7b-v2.8-q2_K", - "2.7 GB" - ], - [ - "7b-v2.8-q3_K_L", - "3.8 GB" - ], - [ - "7b-v2.8-q3_K_M", - "3.5 GB" + "2.7\u202fGB" ], [ "7b-v2.8-q3_K_S", - "3.2 GB" + "3.2\u202fGB" + ], + [ + "7b-v2.8-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v2.8-q3_K_L", + "3.8\u202fGB" ], [ "7b-v2.8-q4_0", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v2.8-q4_1", - "4.6 GB" - ], - [ - "7b-v2.8-q4_K_M", - "4.4 GB" + "4.6\u202fGB" ], [ "7b-v2.8-q4_K_S", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-v2.8-q4_K_M", + "4.4\u202fGB" ], [ "7b-v2.8-q5_0", - "5.0 GB" + "5.0\u202fGB" ], [ "7b-v2.8-q5_1", - "5.4 GB" - ], - [ - "7b-v2.8-q5_K_M", - "5.1 GB" + "5.4\u202fGB" ], [ "7b-v2.8-q5_K_S", - "5.0 GB" + "5.0\u202fGB" + ], + [ + "7b-v2.8-q5_K_M", + "5.1\u202fGB" ], [ "7b-v2.8-q6_K", - "5.9 GB" + "5.9\u202fGB" ], [ "7b-v2.8-q8_0", - "7.7 GB" + "7.7\u202fGB" ] ], "image": false, "author": "Eric Hartford" }, + "starcoder2": { + "url": "https://ollama.com/library/starcoder2", + "description": "StarCoder2 is the next generation of transparently trained open code LLMs that comes in three sizes: 3B, 7B and 15B parameters.", + "tags": [ + [ + "latest", + "1.7\u202fGB" + ], + [ + "3b", + "1.7\u202fGB" + ], + [ + "7b", + "4.0\u202fGB" + ], + [ + "15b", + "9.1\u202fGB" + ], + [ + "instruct", + "9.1\u202fGB" + ], + [ + "15b-instruct", + "9.1\u202fGB" + ], + [ + "3b-fp16", + "6.1\u202fGB" + ], + [ + "3b-q2_K", + "1.1\u202fGB" + ], + [ + "3b-q3_K_S", + "1.3\u202fGB" + ], + [ + "3b-q3_K_M", + "1.5\u202fGB" + ], + [ + "3b-q3_K_L", + "1.7\u202fGB" + ], + [ + "3b-q4_0", + "1.7\u202fGB" + ], + [ + "3b-q4_1", + "1.9\u202fGB" + ], + [ + "3b-q4_K_S", + "1.7\u202fGB" + ], + [ + "3b-q4_K_M", + "1.8\u202fGB" + ], + [ + "3b-q5_0", + "2.1\u202fGB" + ], + [ + "3b-q5_1", + "2.3\u202fGB" + ], + [ + "3b-q5_K_S", + "2.1\u202fGB" + ], + [ + "3b-q5_K_M", + "2.2\u202fGB" + ], + [ + "3b-q6_K", + "2.5\u202fGB" + ], + [ + "3b-q8_0", + "3.2\u202fGB" + ], + [ + "7b-fp16", + "14\u202fGB" + ], + [ + "7b-q2_K", + "2.7\u202fGB" + ], + [ + "7b-q3_K_S", + "3.1\u202fGB" + ], + [ + "7b-q3_K_M", + "3.6\u202fGB" + ], + [ + "7b-q3_K_L", + "4.0\u202fGB" + ], + [ + "7b-q4_0", + "4.0\u202fGB" + ], + [ + "7b-q4_1", + "4.5\u202fGB" + ], + [ + "7b-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-q5_0", + "4.9\u202fGB" + ], + [ + "7b-q5_1", + "5.4\u202fGB" + ], + [ + "7b-q5_K_S", + "4.9\u202fGB" + ], + [ + "7b-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-q6_K", + "5.9\u202fGB" + ], + [ + "7b-q8_0", + "7.6\u202fGB" + ], + [ + "15b-fp16", + "32\u202fGB" + ], + [ + "15b-q2_K", + "6.2\u202fGB" + ], + [ + "15b-q3_K_S", + "7.0\u202fGB" + ], + [ + "15b-q3_K_M", + "8.1\u202fGB" + ], + [ + "15b-q3_K_L", + "9.0\u202fGB" + ], + [ + "15b-q4_0", + "9.1\u202fGB" + ], + [ + "15b-q4_1", + "10\u202fGB" + ], + [ + "15b-q4_K_S", + "9.3\u202fGB" + ], + [ + "15b-q4_K_M", + "9.9\u202fGB" + ], + [ + "15b-q5_0", + "11\u202fGB" + ], + [ + "15b-q5_1", + "12\u202fGB" + ], + [ + "15b-q5_K_S", + "11\u202fGB" + ], + [ + "15b-q5_K_M", + "11\u202fGB" + ], + [ + "15b-q6_K", + "13\u202fGB" + ], + [ + "15b-q8_0", + "17\u202fGB" + ], + [ + "15b-instruct-q4_0", + "9.1\u202fGB" + ], + [ + "15b-instruct-v0.1-fp16", + "32\u202fGB" + ], + [ + "15b-instruct-v0.1-q2_K", + "6.2\u202fGB" + ], + [ + "15b-instruct-v0.1-q3_K_S", + "7.0\u202fGB" + ], + [ + "15b-instruct-v0.1-q3_K_M", + "8.0\u202fGB" + ], + [ + "15b-instruct-v0.1-q3_K_L", + "9.0\u202fGB" + ], + [ + "15b-instruct-v0.1-q4_0", + "9.1\u202fGB" + ], + [ + "15b-instruct-v0.1-q4_1", + "10\u202fGB" + ], + [ + "15b-instruct-v0.1-q4_K_S", + "9.2\u202fGB" + ], + [ + "15b-instruct-v0.1-q4_K_M", + "9.9\u202fGB" + ], + [ + "15b-instruct-v0.1-q5_0", + "11\u202fGB" + ], + [ + "15b-instruct-v0.1-q5_1", + "12\u202fGB" + ], + [ + "15b-instruct-v0.1-q5_K_S", + "11\u202fGB" + ], + [ + "15b-instruct-v0.1-q5_K_M", + "11\u202fGB" + ], + [ + "15b-instruct-v0.1-q6_K", + "13\u202fGB" + ], + [ + "15b-instruct-v0.1-q8_0", + "17\u202fGB" + ] + ], + "image": false, + "author": "BigCode" + }, "orca-mini": { "url": "https://ollama.com/library/orca-mini", "description": "A general-purpose model ranging from 3 billion parameters to 70 billion, suitable for entry-level hardware.", "tags": [ [ "latest", - "2.0 GB" - ], - [ - "70b", - "39 GB" - ], - [ - "13b", - "7.4 GB" - ], - [ - "7b", - "3.8 GB" + "2.0\u202fGB" ], [ "3b", - "2.0 GB" + "2.0\u202fGB" ], [ - "70b-v3", - "39 GB" + "7b", + "3.8\u202fGB" ], [ - "70b-v3-fp16", - "138 GB" + "13b", + "7.4\u202fGB" ], [ - "70b-v3-q2_K", - "29 GB" - ], - [ - "70b-v3-q3_K_L", - "36 GB" - ], - [ - "70b-v3-q3_K_M", - "33 GB" - ], - [ - "70b-v3-q3_K_S", - "30 GB" - ], - [ - "70b-v3-q4_0", - "39 GB" - ], - [ - "70b-v3-q4_1", - "43 GB" - ], - [ - "70b-v3-q4_K_M", - "41 GB" - ], - [ - "70b-v3-q4_K_S", - "39 GB" - ], - [ - "70b-v3-q5_0", - "47 GB" - ], - [ - "70b-v3-q5_1", - "52 GB" - ], - [ - "70b-v3-q5_K_M", - "49 GB" - ], - [ - "70b-v3-q5_K_S", - "47 GB" - ], - [ - "70b-v3-q6_K", - "57 GB" - ], - [ - "70b-v3-q8_0", - "73 GB" - ], - [ - "13b-v2-fp16", - "26 GB" - ], - [ - "13b-v2-q2_K", - "5.4 GB" - ], - [ - "13b-v2-q3_K_L", - "6.9 GB" - ], - [ - "13b-v2-q3_K_M", - "6.3 GB" - ], - [ - "13b-v2-q3_K_S", - "5.7 GB" - ], - [ - "13b-v2-q4_0", - "7.4 GB" - ], - [ - "13b-v2-q4_1", - "8.2 GB" - ], - [ - "13b-v2-q4_K_M", - "7.9 GB" - ], - [ - "13b-v2-q4_K_S", - "7.4 GB" - ], - [ - "13b-v2-q5_0", - "9.0 GB" - ], - [ - "13b-v2-q5_1", - "9.8 GB" - ], - [ - "13b-v2-q5_K_M", - "9.2 GB" - ], - [ - "13b-v2-q5_K_S", - "9.0 GB" - ], - [ - "13b-v2-q6_K", - "11 GB" - ], - [ - "13b-v2-q8_0", - "14 GB" - ], - [ - "13b-v3", - "7.4 GB" - ], - [ - "13b-v3-fp16", - "26 GB" - ], - [ - "13b-v3-q2_K", - "5.4 GB" - ], - [ - "13b-v3-q3_K_L", - "6.9 GB" - ], - [ - "13b-v3-q3_K_M", - "6.3 GB" - ], - [ - "13b-v3-q3_K_S", - "5.7 GB" - ], - [ - "13b-v3-q4_0", - "7.4 GB" - ], - [ - "13b-v3-q4_1", - "8.2 GB" - ], - [ - "13b-v3-q4_K_M", - "7.9 GB" - ], - [ - "13b-v3-q4_K_S", - "7.4 GB" - ], - [ - "13b-v3-q5_0", - "9.0 GB" - ], - [ - "13b-v3-q5_1", - "9.8 GB" - ], - [ - "13b-v3-q5_K_M", - "9.2 GB" - ], - [ - "13b-v3-q5_K_S", - "9.0 GB" - ], - [ - "13b-v3-q6_K", - "11 GB" - ], - [ - "13b-v3-q8_0", - "14 GB" - ], - [ - "13b-fp16", - "26 GB" - ], - [ - "13b-q2_K", - "5.4 GB" - ], - [ - "13b-q3_K_L", - "6.9 GB" - ], - [ - "13b-q3_K_M", - "6.3 GB" - ], - [ - "13b-q3_K_S", - "5.7 GB" - ], - [ - "13b-q4_0", - "7.4 GB" - ], - [ - "13b-q4_1", - "8.2 GB" - ], - [ - "13b-q4_K_M", - "7.9 GB" - ], - [ - "13b-q4_K_S", - "7.4 GB" - ], - [ - "13b-q5_0", - "9.0 GB" - ], - [ - "13b-q5_1", - "9.8 GB" - ], - [ - "13b-q5_K_M", - "9.2 GB" - ], - [ - "13b-q5_K_S", - "9.0 GB" - ], - [ - "13b-q6_K", - "11 GB" - ], - [ - "13b-q8_0", - "14 GB" - ], - [ - "7b-v2-fp16", - "13 GB" - ], - [ - "7b-v2-q2_K", - "2.8 GB" - ], - [ - "7b-v2-q3_K_L", - "3.6 GB" - ], - [ - "7b-v2-q3_K_M", - "3.3 GB" - ], - [ - "7b-v2-q3_K_S", - "2.9 GB" - ], - [ - "7b-v2-q4_0", - "3.8 GB" - ], - [ - "7b-v2-q4_1", - "4.2 GB" - ], - [ - "7b-v2-q4_K_M", - "4.1 GB" - ], - [ - "7b-v2-q4_K_S", - "3.9 GB" - ], - [ - "7b-v2-q5_0", - "4.7 GB" - ], - [ - "7b-v2-q5_1", - "5.1 GB" - ], - [ - "7b-v2-q5_K_M", - "4.8 GB" - ], - [ - "7b-v2-q5_K_S", - "4.7 GB" - ], - [ - "7b-v2-q6_K", - "5.5 GB" - ], - [ - "7b-v2-q8_0", - "7.2 GB" + "70b", + "39\u202fGB" ], [ "7b-v3", - "3.8 GB" + "3.8\u202fGB" ], [ - "7b-v3-fp16", - "13 GB" + "13b-v3", + "7.4\u202fGB" ], [ - "7b-v3-q2_K", - "2.8 GB" - ], - [ - "7b-v3-q3_K_L", - "3.6 GB" - ], - [ - "7b-v3-q3_K_M", - "3.3 GB" - ], - [ - "7b-v3-q3_K_S", - "2.9 GB" - ], - [ - "7b-v3-q4_0", - "3.8 GB" - ], - [ - "7b-v3-q4_1", - "4.2 GB" - ], - [ - "7b-v3-q4_K_M", - "4.1 GB" - ], - [ - "7b-v3-q4_K_S", - "3.9 GB" - ], - [ - "7b-v3-q5_0", - "4.7 GB" - ], - [ - "7b-v3-q5_1", - "5.1 GB" - ], - [ - "7b-v3-q5_K_M", - "4.8 GB" - ], - [ - "7b-v3-q5_K_S", - "4.7 GB" - ], - [ - "7b-v3-q6_K", - "5.5 GB" - ], - [ - "7b-v3-q8_0", - "7.2 GB" - ], - [ - "7b-fp16", - "13 GB" - ], - [ - "7b-q2_K", - "2.8 GB" - ], - [ - "7b-q3_K_L", - "3.6 GB" - ], - [ - "7b-q3_K_M", - "3.3 GB" - ], - [ - "7b-q3_K_S", - "2.9 GB" - ], - [ - "7b-q4_0", - "3.8 GB" - ], - [ - "7b-q4_1", - "4.2 GB" - ], - [ - "7b-q4_K_M", - "4.1 GB" - ], - [ - "7b-q4_K_S", - "3.9 GB" - ], - [ - "7b-q5_0", - "4.7 GB" - ], - [ - "7b-q5_1", - "5.1 GB" - ], - [ - "7b-q5_K_M", - "4.8 GB" - ], - [ - "7b-q5_K_S", - "4.7 GB" - ], - [ - "7b-q6_K", - "5.5 GB" - ], - [ - "7b-q8_0", - "7.2 GB" + "70b-v3", + "39\u202fGB" ], [ "3b-fp16", - "6.9 GB" + "6.9\u202fGB" ], [ "3b-q4_0", - "2.0 GB" + "2.0\u202fGB" ], [ "3b-q4_1", - "2.2 GB" + "2.2\u202fGB" ], [ "3b-q5_0", - "2.4 GB" + "2.4\u202fGB" ], [ "3b-q5_1", - "2.6 GB" + "2.6\u202fGB" ], [ "3b-q8_0", - "3.6 GB" + "3.6\u202fGB" + ], + [ + "7b-fp16", + "13\u202fGB" + ], + [ + "7b-q2_K", + "2.8\u202fGB" + ], + [ + "7b-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-q4_0", + "3.8\u202fGB" + ], + [ + "7b-q4_1", + "4.2\u202fGB" + ], + [ + "7b-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-q5_0", + "4.7\u202fGB" + ], + [ + "7b-q5_1", + "5.1\u202fGB" + ], + [ + "7b-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-q6_K", + "5.5\u202fGB" + ], + [ + "7b-q8_0", + "7.2\u202fGB" + ], + [ + "13b-fp16", + "26\u202fGB" + ], + [ + "13b-q2_K", + "5.4\u202fGB" + ], + [ + "13b-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-q4_0", + "7.4\u202fGB" + ], + [ + "13b-q4_1", + "8.2\u202fGB" + ], + [ + "13b-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-q5_0", + "9.0\u202fGB" + ], + [ + "13b-q5_1", + "9.8\u202fGB" + ], + [ + "13b-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-q6_K", + "11\u202fGB" + ], + [ + "13b-q8_0", + "14\u202fGB" + ], + [ + "7b-v2-fp16", + "13\u202fGB" + ], + [ + "7b-v2-q2_K", + "2.8\u202fGB" + ], + [ + "7b-v2-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-v2-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-v2-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-v2-q4_0", + "3.8\u202fGB" + ], + [ + "7b-v2-q4_1", + "4.2\u202fGB" + ], + [ + "7b-v2-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-v2-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-v2-q5_0", + "4.7\u202fGB" + ], + [ + "7b-v2-q5_1", + "5.1\u202fGB" + ], + [ + "7b-v2-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-v2-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-v2-q6_K", + "5.5\u202fGB" + ], + [ + "7b-v2-q8_0", + "7.2\u202fGB" + ], + [ + "7b-v3-fp16", + "13\u202fGB" + ], + [ + "7b-v3-q2_K", + "2.8\u202fGB" + ], + [ + "7b-v3-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-v3-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-v3-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-v3-q4_0", + "3.8\u202fGB" + ], + [ + "7b-v3-q4_1", + "4.2\u202fGB" + ], + [ + "7b-v3-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-v3-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-v3-q5_0", + "4.7\u202fGB" + ], + [ + "7b-v3-q5_1", + "5.1\u202fGB" + ], + [ + "7b-v3-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-v3-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-v3-q6_K", + "5.5\u202fGB" + ], + [ + "7b-v3-q8_0", + "7.2\u202fGB" + ], + [ + "13b-v2-fp16", + "26\u202fGB" + ], + [ + "13b-v2-q2_K", + "5.4\u202fGB" + ], + [ + "13b-v2-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-v2-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-v2-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-v2-q4_0", + "7.4\u202fGB" + ], + [ + "13b-v2-q4_1", + "8.2\u202fGB" + ], + [ + "13b-v2-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-v2-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-v2-q5_0", + "9.0\u202fGB" + ], + [ + "13b-v2-q5_1", + "9.8\u202fGB" + ], + [ + "13b-v2-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-v2-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-v2-q6_K", + "11\u202fGB" + ], + [ + "13b-v2-q8_0", + "14\u202fGB" + ], + [ + "13b-v3-fp16", + "26\u202fGB" + ], + [ + "13b-v3-q2_K", + "5.4\u202fGB" + ], + [ + "13b-v3-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-v3-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-v3-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-v3-q4_0", + "7.4\u202fGB" + ], + [ + "13b-v3-q4_1", + "8.2\u202fGB" + ], + [ + "13b-v3-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-v3-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-v3-q5_0", + "9.0\u202fGB" + ], + [ + "13b-v3-q5_1", + "9.8\u202fGB" + ], + [ + "13b-v3-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-v3-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-v3-q6_K", + "11\u202fGB" + ], + [ + "13b-v3-q8_0", + "14\u202fGB" + ], + [ + "70b-v3-fp16", + "138\u202fGB" + ], + [ + "70b-v3-q2_K", + "29\u202fGB" + ], + [ + "70b-v3-q3_K_S", + "30\u202fGB" + ], + [ + "70b-v3-q3_K_M", + "33\u202fGB" + ], + [ + "70b-v3-q3_K_L", + "36\u202fGB" + ], + [ + "70b-v3-q4_0", + "39\u202fGB" + ], + [ + "70b-v3-q4_1", + "43\u202fGB" + ], + [ + "70b-v3-q4_K_S", + "39\u202fGB" + ], + [ + "70b-v3-q4_K_M", + "41\u202fGB" + ], + [ + "70b-v3-q5_0", + "47\u202fGB" + ], + [ + "70b-v3-q5_1", + "52\u202fGB" + ], + [ + "70b-v3-q5_K_S", + "47\u202fGB" + ], + [ + "70b-v3-q5_K_M", + "49\u202fGB" + ], + [ + "70b-v3-q6_K", + "57\u202fGB" + ], + [ + "70b-v3-q8_0", + "73\u202fGB" ] ], "image": false, @@ -8381,1619 +9329,1003 @@ "tags": [ [ "latest", - "4.7 GB" - ], - [ - "70b", - "40 GB" - ], - [ - "8b", - "4.7 GB" + "4.7\u202fGB" ], [ "256k", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "8b", + "4.7\u202fGB" + ], + [ + "70b", + "40\u202fGB" ], [ "v2.9", - "4.7 GB" - ], - [ - "70b-v2.9", - "40 GB" - ], - [ - "70b-v2.9-fp16", - "141 GB" - ], - [ - "70b-v2.9-q2_K", - "26 GB" - ], - [ - "70b-v2.9-q3_K_L", - "37 GB" - ], - [ - "70b-v2.9-q3_K_M", - "34 GB" - ], - [ - "70b-v2.9-q3_K_S", - "31 GB" - ], - [ - "70b-v2.9-q4_0", - "40 GB" - ], - [ - "70b-v2.9-q4_1", - "44 GB" - ], - [ - "70b-v2.9-q4_K_M", - "43 GB" - ], - [ - "70b-v2.9-q4_K_S", - "40 GB" - ], - [ - "70b-v2.9-q5_0", - "49 GB" - ], - [ - "70b-v2.9-q5_1", - "53 GB" - ], - [ - "70b-v2.9-q5_K_M", - "50 GB" - ], - [ - "70b-v2.9-q5_K_S", - "49 GB" - ], - [ - "70b-v2.9-q6_K", - "58 GB" - ], - [ - "70b-v2.9-q8_0", - "75 GB" - ], - [ - "8b-256k-v2.9", - "4.7 GB" + "4.7\u202fGB" ], [ "8b-256k", - "4.7 GB" + "4.7\u202fGB" ], [ - "8b-256k-v2.9-fp16", - "16 GB" - ], - [ - "8b-256k-v2.9-q2_K", - "3.2 GB" - ], - [ - "8b-256k-v2.9-q3_K_L", - "4.3 GB" - ], - [ - "8b-256k-v2.9-q3_K_M", - "4.0 GB" - ], - [ - "8b-256k-v2.9-q3_K_S", - "3.7 GB" - ], - [ - "8b-256k-v2.9-q4_0", - "4.7 GB" - ], - [ - "8b-256k-v2.9-q4_1", - "5.1 GB" - ], - [ - "8b-256k-v2.9-q4_K_M", - "4.9 GB" - ], - [ - "8b-256k-v2.9-q4_K_S", - "4.7 GB" - ], - [ - "8b-256k-v2.9-q5_0", - "5.6 GB" - ], - [ - "8b-256k-v2.9-q5_1", - "6.1 GB" - ], - [ - "8b-256k-v2.9-q5_K_M", - "5.7 GB" - ], - [ - "8b-256k-v2.9-q5_K_S", - "5.6 GB" - ], - [ - "8b-256k-v2.9-q6_K", - "6.6 GB" - ], - [ - "8b-256k-v2.9-q8_0", - "8.5 GB" + "8b-256k-v2.9", + "4.7\u202fGB" ], [ "8b-v2.9", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "70b-v2.9", + "40\u202fGB" + ], + [ + "8b-256k-v2.9-fp16", + "16\u202fGB" + ], + [ + "8b-256k-v2.9-q2_K", + "3.2\u202fGB" + ], + [ + "8b-256k-v2.9-q3_K_S", + "3.7\u202fGB" + ], + [ + "8b-256k-v2.9-q3_K_M", + "4.0\u202fGB" + ], + [ + "8b-256k-v2.9-q3_K_L", + "4.3\u202fGB" + ], + [ + "8b-256k-v2.9-q4_0", + "4.7\u202fGB" + ], + [ + "8b-256k-v2.9-q4_1", + "5.1\u202fGB" + ], + [ + "8b-256k-v2.9-q4_K_S", + "4.7\u202fGB" + ], + [ + "8b-256k-v2.9-q4_K_M", + "4.9\u202fGB" + ], + [ + "8b-256k-v2.9-q5_0", + "5.6\u202fGB" + ], + [ + "8b-256k-v2.9-q5_1", + "6.1\u202fGB" + ], + [ + "8b-256k-v2.9-q5_K_S", + "5.6\u202fGB" + ], + [ + "8b-256k-v2.9-q5_K_M", + "5.7\u202fGB" + ], + [ + "8b-256k-v2.9-q6_K", + "6.6\u202fGB" + ], + [ + "8b-256k-v2.9-q8_0", + "8.5\u202fGB" ], [ "8b-v2.9-fp16", - "16 GB" + "16\u202fGB" ], [ "8b-v2.9-q2_K", - "3.2 GB" - ], - [ - "8b-v2.9-q3_K_L", - "4.3 GB" - ], - [ - "8b-v2.9-q3_K_M", - "4.0 GB" + "3.2\u202fGB" ], [ "8b-v2.9-q3_K_S", - "3.7 GB" + "3.7\u202fGB" + ], + [ + "8b-v2.9-q3_K_M", + "4.0\u202fGB" + ], + [ + "8b-v2.9-q3_K_L", + "4.3\u202fGB" ], [ "8b-v2.9-q4_0", - "4.7 GB" + "4.7\u202fGB" ], [ "8b-v2.9-q4_1", - "5.1 GB" - ], - [ - "8b-v2.9-q4_K_M", - "4.9 GB" + "5.1\u202fGB" ], [ "8b-v2.9-q4_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "8b-v2.9-q4_K_M", + "4.9\u202fGB" ], [ "8b-v2.9-q5_0", - "5.6 GB" + "5.6\u202fGB" ], [ "8b-v2.9-q5_1", - "6.1 GB" - ], - [ - "8b-v2.9-q5_K_M", - "5.7 GB" + "6.1\u202fGB" ], [ "8b-v2.9-q5_K_S", - "5.6 GB" + "5.6\u202fGB" + ], + [ + "8b-v2.9-q5_K_M", + "5.7\u202fGB" ], [ "8b-v2.9-q6_K", - "6.6 GB" + "6.6\u202fGB" ], [ "8b-v2.9-q8_0", - "8.5 GB" + "8.5\u202fGB" + ], + [ + "70b-v2.9-fp16", + "141\u202fGB" + ], + [ + "70b-v2.9-q2_K", + "26\u202fGB" + ], + [ + "70b-v2.9-q3_K_S", + "31\u202fGB" + ], + [ + "70b-v2.9-q3_K_M", + "34\u202fGB" + ], + [ + "70b-v2.9-q3_K_L", + "37\u202fGB" + ], + [ + "70b-v2.9-q4_0", + "40\u202fGB" + ], + [ + "70b-v2.9-q4_1", + "44\u202fGB" + ], + [ + "70b-v2.9-q4_K_S", + "40\u202fGB" + ], + [ + "70b-v2.9-q4_K_M", + "43\u202fGB" + ], + [ + "70b-v2.9-q5_0", + "49\u202fGB" + ], + [ + "70b-v2.9-q5_1", + "53\u202fGB" + ], + [ + "70b-v2.9-q5_K_S", + "49\u202fGB" + ], + [ + "70b-v2.9-q5_K_M", + "50\u202fGB" + ], + [ + "70b-v2.9-q6_K", + "58\u202fGB" + ], + [ + "70b-v2.9-q8_0", + "75\u202fGB" ] ], "image": false, "author": "Eric Hartford" }, - "mxbai-embed-large": { - "url": "https://ollama.com/library/mxbai-embed-large", - "description": "State-of-the-art large embedding model from mixedbread.ai", - "tags": [ - [ - "latest", - "670 MB" - ], - [ - "335m", - "670 MB" - ], - [ - "v1", - "670 MB" - ], - [ - "335m-v1-fp16", - "670 MB" - ] - ], - "image": false, - "author": "Mixedbread.ai" - }, - "starcoder2": { - "url": "https://ollama.com/library/starcoder2", - "description": "StarCoder2 is the next generation of transparently trained open code LLMs that comes in three sizes: 3B, 7B and 15B parameters.", - "tags": [ - [ - "latest", - "1.7 GB" - ], - [ - "15b", - "9.1 GB" - ], - [ - "7b", - "4.0 GB" - ], - [ - "3b", - "1.7 GB" - ], - [ - "instruct", - "9.1 GB" - ], - [ - "15b-instruct", - "9.1 GB" - ], - [ - "15b-instruct-v0.1-fp16", - "32 GB" - ], - [ - "15b-instruct-v0.1-q2_K", - "6.2 GB" - ], - [ - "15b-instruct-v0.1-q3_K_L", - "9.0 GB" - ], - [ - "15b-instruct-v0.1-q3_K_M", - "8.0 GB" - ], - [ - "15b-instruct-v0.1-q3_K_S", - "7.0 GB" - ], - [ - "15b-instruct-v0.1-q4_0", - "9.1 GB" - ], - [ - "15b-instruct-v0.1-q4_1", - "10 GB" - ], - [ - "15b-instruct-v0.1-q4_K_M", - "9.9 GB" - ], - [ - "15b-instruct-v0.1-q4_K_S", - "9.2 GB" - ], - [ - "15b-instruct-v0.1-q5_0", - "11 GB" - ], - [ - "15b-instruct-v0.1-q5_1", - "12 GB" - ], - [ - "15b-instruct-v0.1-q5_K_M", - "11 GB" - ], - [ - "15b-instruct-v0.1-q5_K_S", - "11 GB" - ], - [ - "15b-instruct-v0.1-q6_K", - "13 GB" - ], - [ - "15b-instruct-v0.1-q8_0", - "17 GB" - ], - [ - "15b-instruct-q4_0", - "9.1 GB" - ], - [ - "15b-fp16", - "32 GB" - ], - [ - "15b-q2_K", - "6.2 GB" - ], - [ - "15b-q3_K_L", - "9.0 GB" - ], - [ - "15b-q3_K_M", - "8.1 GB" - ], - [ - "15b-q3_K_S", - "7.0 GB" - ], - [ - "15b-q4_0", - "9.1 GB" - ], - [ - "15b-q4_1", - "10 GB" - ], - [ - "15b-q4_K_M", - "9.9 GB" - ], - [ - "15b-q4_K_S", - "9.3 GB" - ], - [ - "15b-q5_0", - "11 GB" - ], - [ - "15b-q5_1", - "12 GB" - ], - [ - "15b-q5_K_M", - "11 GB" - ], - [ - "15b-q5_K_S", - "11 GB" - ], - [ - "15b-q6_K", - "13 GB" - ], - [ - "15b-q8_0", - "17 GB" - ], - [ - "7b-fp16", - "14 GB" - ], - [ - "7b-q2_K", - "2.7 GB" - ], - [ - "7b-q3_K_L", - "4.0 GB" - ], - [ - "7b-q3_K_M", - "3.6 GB" - ], - [ - "7b-q3_K_S", - "3.1 GB" - ], - [ - "7b-q4_0", - "4.0 GB" - ], - [ - "7b-q4_1", - "4.5 GB" - ], - [ - "7b-q4_K_M", - "4.4 GB" - ], - [ - "7b-q4_K_S", - "4.1 GB" - ], - [ - "7b-q5_0", - "4.9 GB" - ], - [ - "7b-q5_1", - "5.4 GB" - ], - [ - "7b-q5_K_M", - "5.1 GB" - ], - [ - "7b-q5_K_S", - "4.9 GB" - ], - [ - "7b-q6_K", - "5.9 GB" - ], - [ - "7b-q8_0", - "7.6 GB" - ], - [ - "3b-fp16", - "6.1 GB" - ], - [ - "3b-q2_K", - "1.1 GB" - ], - [ - "3b-q3_K_L", - "1.7 GB" - ], - [ - "3b-q3_K_M", - "1.5 GB" - ], - [ - "3b-q3_K_S", - "1.3 GB" - ], - [ - "3b-q4_0", - "1.7 GB" - ], - [ - "3b-q4_1", - "1.9 GB" - ], - [ - "3b-q4_K_M", - "1.8 GB" - ], - [ - "3b-q4_K_S", - "1.7 GB" - ], - [ - "3b-q5_0", - "2.1 GB" - ], - [ - "3b-q5_1", - "2.3 GB" - ], - [ - "3b-q5_K_M", - "2.2 GB" - ], - [ - "3b-q5_K_S", - "2.1 GB" - ], - [ - "3b-q6_K", - "2.5 GB" - ], - [ - "3b-q8_0", - "3.2 GB" - ] - ], - "image": false, - "author": "BigCode" - }, - "mistral-openorca": { - "url": "https://ollama.com/library/mistral-openorca", - "description": "Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the Mistral 7B model using the OpenOrca dataset.", - "tags": [ - [ - "latest", - "4.1 GB" - ], - [ - "7b", - "4.1 GB" - ], - [ - "7b-fp16", - "14 GB" - ], - [ - "7b-q2_K", - "3.1 GB" - ], - [ - "7b-q3_K_L", - "3.8 GB" - ], - [ - "7b-q3_K_M", - "3.5 GB" - ], - [ - "7b-q3_K_S", - "3.2 GB" - ], - [ - "7b-q4_0", - "4.1 GB" - ], - [ - "7b-q4_1", - "4.6 GB" - ], - [ - "7b-q4_K_M", - "4.4 GB" - ], - [ - "7b-q4_K_S", - "4.1 GB" - ], - [ - "7b-q5_0", - "5.0 GB" - ], - [ - "7b-q5_1", - "5.4 GB" - ], - [ - "7b-q5_K_M", - "5.1 GB" - ], - [ - "7b-q5_K_S", - "5.0 GB" - ], - [ - "7b-q6_K", - "5.9 GB" - ], - [ - "7b-q8_0", - "7.7 GB" - ] - ], - "image": false, - "author": "Open Orca" - }, "yi": { "url": "https://ollama.com/library/yi", "description": "Yi 1.5 is a high-performing, bilingual language model.", "tags": [ [ "latest", - "3.5 GB" - ], - [ - "34b", - "19 GB" - ], - [ - "9b", - "5.0 GB" + "3.5\u202fGB" ], [ "6b", - "3.5 GB" + "3.5\u202fGB" + ], + [ + "9b", + "5.0\u202fGB" + ], + [ + "34b", + "19\u202fGB" ], [ "v1.5", - "3.5 GB" - ], - [ - "34b-chat", - "19 GB" - ], - [ - "34b-chat-v1.5-fp16", - "69 GB" - ], - [ - "34b-chat-v1.5-q2_K", - "13 GB" - ], - [ - "34b-chat-v1.5-q3_K_L", - "18 GB" - ], - [ - "34b-chat-v1.5-q3_K_M", - "17 GB" - ], - [ - "34b-chat-v1.5-q3_K_S", - "15 GB" - ], - [ - "34b-chat-v1.5-q4_0", - "19 GB" - ], - [ - "34b-chat-v1.5-q4_1", - "22 GB" - ], - [ - "34b-chat-v1.5-q4_K_M", - "21 GB" - ], - [ - "34b-chat-v1.5-q4_K_S", - "20 GB" - ], - [ - "34b-chat-v1.5-q5_0", - "24 GB" - ], - [ - "34b-chat-v1.5-q5_1", - "26 GB" - ], - [ - "34b-chat-v1.5-q5_K_M", - "24 GB" - ], - [ - "34b-chat-v1.5-q5_K_S", - "24 GB" - ], - [ - "34b-chat-v1.5-q6_K", - "28 GB" - ], - [ - "34b-chat-q6_K", - "28 GB" - ], - [ - "34b-chat-q5_K_M", - "24 GB" - ], - [ - "34b-chat-q4_K_S", - "20 GB" - ], - [ - "34b-chat-q4_K_M", - "21 GB" - ], - [ - "34b-chat-q3_K_M", - "17 GB" - ], - [ - "34b-chat-q3_K_L", - "18 GB" - ], - [ - "34b-chat-v1.5-q8_0", - "37 GB" - ], - [ - "34b-chat-fp16", - "69 GB" - ], - [ - "34b-chat-q4_0", - "19 GB" - ], - [ - "34b-chat-q5_K_S", - "24 GB" - ], - [ - "34b-chat-q4_1", - "22 GB" - ], - [ - "34b-chat-q3_K_S", - "15 GB" - ], - [ - "34b-chat-q2_K", - "15 GB" - ], - [ - "34b-chat-q5_0", - "24 GB" - ], - [ - "34b-chat-q5_1", - "26 GB" - ], - [ - "34b-chat-q8_0", - "37 GB" - ], - [ - "34b-v1.5", - "19 GB" - ], - [ - "34b-v1.5-fp16", - "69 GB" - ], - [ - "34b-v1.5-q2_K", - "13 GB" - ], - [ - "34b-v1.5-q3_K_L", - "18 GB" - ], - [ - "34b-v1.5-q3_K_M", - "17 GB" - ], - [ - "34b-v1.5-q3_K_S", - "15 GB" - ], - [ - "34b-v1.5-q4_0", - "19 GB" - ], - [ - "34b-v1.5-q4_1", - "22 GB" - ], - [ - "34b-v1.5-q4_K_M", - "21 GB" - ], - [ - "34b-v1.5-q4_K_S", - "20 GB" - ], - [ - "34b-v1.5-q5_0", - "24 GB" - ], - [ - "34b-v1.5-q5_1", - "26 GB" - ], - [ - "34b-v1.5-q5_K_M", - "24 GB" - ], - [ - "34b-v1.5-q5_K_S", - "24 GB" - ], - [ - "34b-v1.5-q6_K", - "28 GB" - ], - [ - "34b-v1.5-q8_0", - "37 GB" - ], - [ - "9b-chat", - "5.0 GB" - ], - [ - "34b-q2_K", - "15 GB" - ], - [ - "34b-q3_K_L", - "18 GB" - ], - [ - "34b-q3_K_M", - "17 GB" - ], - [ - "34b-q3_K_S", - "15 GB" - ], - [ - "34b-q4_0", - "19 GB" - ], - [ - "34b-q4_1", - "22 GB" - ], - [ - "34b-q4_K_M", - "21 GB" - ], - [ - "34b-q4_K_S", - "20 GB" - ], - [ - "34b-q5_0", - "24 GB" - ], - [ - "34b-q5_1", - "26 GB" - ], - [ - "34b-q5_K_S", - "24 GB" - ], - [ - "34b-q6_K", - "28 GB" - ], - [ - "9b-chat-v1.5-fp16", - "18 GB" - ], - [ - "9b-chat-v1.5-q2_K", - "3.4 GB" - ], - [ - "9b-chat-v1.5-q3_K_L", - "4.7 GB" - ], - [ - "9b-chat-v1.5-q3_K_M", - "4.3 GB" - ], - [ - "9b-chat-v1.5-q3_K_S", - "3.9 GB" - ], - [ - "9b-chat-v1.5-q4_0", - "5.0 GB" - ], - [ - "9b-chat-v1.5-q4_1", - "5.6 GB" - ], - [ - "9b-chat-v1.5-q4_K_M", - "5.3 GB" - ], - [ - "9b-chat-v1.5-q4_K_S", - "5.1 GB" - ], - [ - "9b-chat-v1.5-q5_0", - "6.1 GB" - ], - [ - "9b-chat-v1.5-q5_1", - "6.6 GB" - ], - [ - "9b-chat-v1.5-q5_K_M", - "6.3 GB" - ], - [ - "9b-chat-v1.5-q5_K_S", - "6.1 GB" - ], - [ - "9b-chat-v1.5-q6_K", - "7.2 GB" - ], - [ - "9b-chat-v1.5-q8_0", - "9.4 GB" - ], - [ - "9b-v1.5", - "5.0 GB" - ], - [ - "9b-v1.5-fp16", - "18 GB" - ], - [ - "9b-v1.5-q2_K", - "3.4 GB" - ], - [ - "9b-v1.5-q3_K_L", - "4.7 GB" - ], - [ - "9b-v1.5-q3_K_M", - "4.3 GB" - ], - [ - "9b-v1.5-q3_K_S", - "3.9 GB" - ], - [ - "9b-v1.5-q4_0", - "5.0 GB" - ], - [ - "9b-v1.5-q4_1", - "5.6 GB" - ], - [ - "9b-v1.5-q4_K_M", - "5.3 GB" - ], - [ - "9b-v1.5-q4_K_S", - "5.1 GB" - ], - [ - "9b-v1.5-q5_0", - "6.1 GB" - ], - [ - "9b-v1.5-q5_1", - "6.6 GB" - ], - [ - "9b-v1.5-q5_K_M", - "6.3 GB" - ], - [ - "9b-v1.5-q5_K_S", - "6.1 GB" - ], - [ - "9b-v1.5-q6_K", - "7.2 GB" - ], - [ - "9b-v1.5-q8_0", - "9.4 GB" + "3.5\u202fGB" ], [ "6b-200k", - "3.5 GB" - ], - [ - "6b-200k-fp16", - "12 GB" - ], - [ - "6b-200k-q2_K", - "2.6 GB" - ], - [ - "6b-200k-q3_K_L", - "3.2 GB" - ], - [ - "6b-200k-q3_K_M", - "3.0 GB" - ], - [ - "6b-200k-q3_K_S", - "2.7 GB" - ], - [ - "6b-200k-q4_0", - "3.5 GB" - ], - [ - "6b-200k-q4_1", - "3.8 GB" - ], - [ - "6b-200k-q4_K_M", - "3.7 GB" - ], - [ - "6b-200k-q4_K_S", - "3.5 GB" - ], - [ - "6b-200k-q5_0", - "4.2 GB" - ], - [ - "6b-200k-q5_1", - "4.6 GB" - ], - [ - "6b-200k-q5_K_M", - "4.3 GB" - ], - [ - "6b-200k-q5_K_S", - "4.2 GB" - ], - [ - "6b-200k-q6_K", - "5.0 GB" - ], - [ - "6b-200k-q8_0", - "6.4 GB" + "3.5\u202fGB" ], [ "6b-chat", - "3.5 GB" - ], - [ - "6b-chat-fp16", - "12 GB" - ], - [ - "6b-chat-q2_K", - "2.6 GB" - ], - [ - "6b-chat-q3_K_L", - "3.2 GB" - ], - [ - "6b-chat-q3_K_M", - "3.0 GB" - ], - [ - "6b-chat-q3_K_S", - "2.7 GB" - ], - [ - "6b-chat-q4_0", - "3.5 GB" - ], - [ - "6b-chat-q4_1", - "3.8 GB" - ], - [ - "6b-chat-q4_K_M", - "3.7 GB" - ], - [ - "6b-chat-q4_K_S", - "3.5 GB" - ], - [ - "6b-chat-q5_0", - "4.2 GB" - ], - [ - "6b-chat-q5_1", - "4.6 GB" - ], - [ - "6b-chat-q5_K_M", - "4.3 GB" - ], - [ - "6b-chat-q5_K_S", - "4.2 GB" - ], - [ - "6b-chat-q6_K", - "5.0 GB" - ], - [ - "6b-chat-v1.5-q5_0", - "4.2 GB" - ], - [ - "6b-chat-v1.5-q2_K", - "2.3 GB" - ], - [ - "6b-chat-v1.5-q4_K_M", - "3.7 GB" - ], - [ - "6b-chat-v1.5-q4_1", - "3.8 GB" - ], - [ - "6b-chat-v1.5-q3_K_L", - "3.2 GB" - ], - [ - "6b-chat-v1.5-q4_K_S", - "3.5 GB" - ], - [ - "6b-chat-q8_0", - "6.4 GB" - ], - [ - "6b-chat-v1.5-q3_K_S", - "2.7 GB" - ], - [ - "6b-chat-v1.5-q3_K_M", - "3.0 GB" - ], - [ - "6b-chat-v1.5-fp16", - "12 GB" - ], - [ - "6b-chat-v1.5-q4_0", - "3.5 GB" - ], - [ - "6b-chat-v1.5-q5_1", - "4.6 GB" - ], - [ - "6b-chat-v1.5-q5_K_M", - "4.3 GB" - ], - [ - "6b-chat-v1.5-q5_K_S", - "4.2 GB" - ], - [ - "6b-chat-v1.5-q6_K", - "5.0 GB" - ], - [ - "6b-chat-v1.5-q8_0", - "6.4 GB" + "3.5\u202fGB" ], [ "6b-v1.5", - "3.5 GB" + "3.5\u202fGB" ], [ - "6b-v1.5-fp16", - "12 GB" + "9b-chat", + "5.0\u202fGB" ], [ - "6b-v1.5-q2_K", - "2.3 GB" + "9b-v1.5", + "5.0\u202fGB" ], [ - "6b-v1.5-q3_K_L", - "3.2 GB" + "34b-chat", + "19\u202fGB" ], [ - "6b-v1.5-q3_K_M", - "3.0 GB" - ], - [ - "6b-v1.5-q3_K_S", - "2.7 GB" - ], - [ - "6b-v1.5-q4_0", - "3.5 GB" - ], - [ - "6b-v1.5-q4_1", - "3.8 GB" - ], - [ - "6b-v1.5-q4_K_M", - "3.7 GB" - ], - [ - "6b-v1.5-q4_K_S", - "3.5 GB" - ], - [ - "6b-v1.5-q5_0", - "4.2 GB" - ], - [ - "6b-v1.5-q5_1", - "4.6 GB" - ], - [ - "6b-v1.5-q5_K_M", - "4.3 GB" - ], - [ - "6b-v1.5-q5_K_S", - "4.2 GB" - ], - [ - "6b-v1.5-q6_K", - "5.0 GB" - ], - [ - "6b-v1.5-q8_0", - "6.4 GB" + "34b-v1.5", + "19\u202fGB" ], [ "6b-fp16", - "12 GB" + "12\u202fGB" ], [ "6b-q2_K", - "2.6 GB" - ], - [ - "6b-q3_K_L", - "3.2 GB" - ], - [ - "6b-q3_K_M", - "3.0 GB" + "2.6\u202fGB" ], [ "6b-q3_K_S", - "2.7 GB" + "2.7\u202fGB" + ], + [ + "6b-q3_K_M", + "3.0\u202fGB" + ], + [ + "6b-q3_K_L", + "3.2\u202fGB" ], [ "6b-q4_0", - "3.5 GB" + "3.5\u202fGB" ], [ "6b-q4_1", - "3.8 GB" - ], - [ - "6b-q4_K_M", - "3.7 GB" + "3.8\u202fGB" ], [ "6b-q4_K_S", - "3.5 GB" + "3.5\u202fGB" + ], + [ + "6b-q4_K_M", + "3.7\u202fGB" ], [ "6b-q5_0", - "4.2 GB" + "4.2\u202fGB" ], [ "6b-q5_1", - "4.6 GB" - ], - [ - "6b-q5_K_M", - "4.3 GB" + "4.6\u202fGB" ], [ "6b-q5_K_S", - "4.2 GB" + "4.2\u202fGB" + ], + [ + "6b-q5_K_M", + "4.3\u202fGB" ], [ "6b-q6_K", - "5.0 GB" + "5.0\u202fGB" ], [ "6b-q8_0", - "6.4 GB" + "6.4\u202fGB" + ], + [ + "34b-q2_K", + "15\u202fGB" + ], + [ + "34b-q3_K_S", + "15\u202fGB" + ], + [ + "34b-q3_K_M", + "17\u202fGB" + ], + [ + "34b-q3_K_L", + "18\u202fGB" + ], + [ + "34b-q4_0", + "19\u202fGB" + ], + [ + "34b-q4_1", + "22\u202fGB" + ], + [ + "34b-q4_K_S", + "20\u202fGB" + ], + [ + "34b-q4_K_M", + "21\u202fGB" + ], + [ + "34b-q5_0", + "24\u202fGB" + ], + [ + "34b-q5_1", + "26\u202fGB" + ], + [ + "34b-q5_K_S", + "24\u202fGB" + ], + [ + "34b-q6_K", + "28\u202fGB" + ], + [ + "6b-200k-fp16", + "12\u202fGB" + ], + [ + "6b-200k-q2_K", + "2.6\u202fGB" + ], + [ + "6b-200k-q3_K_S", + "2.7\u202fGB" + ], + [ + "6b-200k-q3_K_M", + "3.0\u202fGB" + ], + [ + "6b-200k-q3_K_L", + "3.2\u202fGB" + ], + [ + "6b-200k-q4_0", + "3.5\u202fGB" + ], + [ + "6b-200k-q4_1", + "3.8\u202fGB" + ], + [ + "6b-200k-q4_K_S", + "3.5\u202fGB" + ], + [ + "6b-200k-q4_K_M", + "3.7\u202fGB" + ], + [ + "6b-200k-q5_0", + "4.2\u202fGB" + ], + [ + "6b-200k-q5_1", + "4.6\u202fGB" + ], + [ + "6b-200k-q5_K_S", + "4.2\u202fGB" + ], + [ + "6b-200k-q5_K_M", + "4.3\u202fGB" + ], + [ + "6b-200k-q6_K", + "5.0\u202fGB" + ], + [ + "6b-200k-q8_0", + "6.4\u202fGB" + ], + [ + "6b-chat-fp16", + "12\u202fGB" + ], + [ + "6b-chat-q2_K", + "2.6\u202fGB" + ], + [ + "6b-chat-q3_K_S", + "2.7\u202fGB" + ], + [ + "6b-chat-q3_K_M", + "3.0\u202fGB" + ], + [ + "6b-chat-q3_K_L", + "3.2\u202fGB" + ], + [ + "6b-chat-q4_0", + "3.5\u202fGB" + ], + [ + "6b-chat-q4_1", + "3.8\u202fGB" + ], + [ + "6b-chat-q4_K_S", + "3.5\u202fGB" + ], + [ + "6b-chat-q4_K_M", + "3.7\u202fGB" + ], + [ + "6b-chat-q5_0", + "4.2\u202fGB" + ], + [ + "6b-chat-q5_1", + "4.6\u202fGB" + ], + [ + "6b-chat-q5_K_S", + "4.2\u202fGB" + ], + [ + "6b-chat-q5_K_M", + "4.3\u202fGB" + ], + [ + "6b-chat-q6_K", + "5.0\u202fGB" + ], + [ + "6b-chat-q8_0", + "6.4\u202fGB" + ], + [ + "6b-chat-v1.5-fp16", + "12\u202fGB" + ], + [ + "6b-chat-v1.5-q2_K", + "2.3\u202fGB" + ], + [ + "6b-chat-v1.5-q3_K_S", + "2.7\u202fGB" + ], + [ + "6b-chat-v1.5-q3_K_M", + "3.0\u202fGB" + ], + [ + "6b-chat-v1.5-q3_K_L", + "3.2\u202fGB" + ], + [ + "6b-chat-v1.5-q4_0", + "3.5\u202fGB" + ], + [ + "6b-chat-v1.5-q4_1", + "3.8\u202fGB" + ], + [ + "6b-chat-v1.5-q4_K_S", + "3.5\u202fGB" + ], + [ + "6b-chat-v1.5-q4_K_M", + "3.7\u202fGB" + ], + [ + "6b-chat-v1.5-q5_0", + "4.2\u202fGB" + ], + [ + "6b-chat-v1.5-q5_1", + "4.6\u202fGB" + ], + [ + "6b-chat-v1.5-q5_K_S", + "4.2\u202fGB" + ], + [ + "6b-chat-v1.5-q5_K_M", + "4.3\u202fGB" + ], + [ + "6b-chat-v1.5-q6_K", + "5.0\u202fGB" + ], + [ + "6b-chat-v1.5-q8_0", + "6.4\u202fGB" + ], + [ + "6b-v1.5-fp16", + "12\u202fGB" + ], + [ + "6b-v1.5-q2_K", + "2.3\u202fGB" + ], + [ + "6b-v1.5-q3_K_S", + "2.7\u202fGB" + ], + [ + "6b-v1.5-q3_K_M", + "3.0\u202fGB" + ], + [ + "6b-v1.5-q3_K_L", + "3.2\u202fGB" + ], + [ + "6b-v1.5-q4_0", + "3.5\u202fGB" + ], + [ + "6b-v1.5-q4_1", + "3.8\u202fGB" + ], + [ + "6b-v1.5-q4_K_S", + "3.5\u202fGB" + ], + [ + "6b-v1.5-q4_K_M", + "3.7\u202fGB" + ], + [ + "6b-v1.5-q5_0", + "4.2\u202fGB" + ], + [ + "6b-v1.5-q5_1", + "4.6\u202fGB" + ], + [ + "6b-v1.5-q5_K_S", + "4.2\u202fGB" + ], + [ + "6b-v1.5-q5_K_M", + "4.3\u202fGB" + ], + [ + "6b-v1.5-q6_K", + "5.0\u202fGB" + ], + [ + "6b-v1.5-q8_0", + "6.4\u202fGB" + ], + [ + "9b-chat-v1.5-fp16", + "18\u202fGB" + ], + [ + "9b-chat-v1.5-q2_K", + "3.4\u202fGB" + ], + [ + "9b-chat-v1.5-q3_K_S", + "3.9\u202fGB" + ], + [ + "9b-chat-v1.5-q3_K_M", + "4.3\u202fGB" + ], + [ + "9b-chat-v1.5-q3_K_L", + "4.7\u202fGB" + ], + [ + "9b-chat-v1.5-q4_0", + "5.0\u202fGB" + ], + [ + "9b-chat-v1.5-q4_1", + "5.6\u202fGB" + ], + [ + "9b-chat-v1.5-q4_K_S", + "5.1\u202fGB" + ], + [ + "9b-chat-v1.5-q4_K_M", + "5.3\u202fGB" + ], + [ + "9b-chat-v1.5-q5_0", + "6.1\u202fGB" + ], + [ + "9b-chat-v1.5-q5_1", + "6.6\u202fGB" + ], + [ + "9b-chat-v1.5-q5_K_S", + "6.1\u202fGB" + ], + [ + "9b-chat-v1.5-q5_K_M", + "6.3\u202fGB" + ], + [ + "9b-chat-v1.5-q6_K", + "7.2\u202fGB" + ], + [ + "9b-chat-v1.5-q8_0", + "9.4\u202fGB" + ], + [ + "9b-v1.5-fp16", + "18\u202fGB" + ], + [ + "9b-v1.5-q2_K", + "3.4\u202fGB" + ], + [ + "9b-v1.5-q3_K_S", + "3.9\u202fGB" + ], + [ + "9b-v1.5-q3_K_M", + "4.3\u202fGB" + ], + [ + "9b-v1.5-q3_K_L", + "4.7\u202fGB" + ], + [ + "9b-v1.5-q4_0", + "5.0\u202fGB" + ], + [ + "9b-v1.5-q4_1", + "5.6\u202fGB" + ], + [ + "9b-v1.5-q4_K_S", + "5.1\u202fGB" + ], + [ + "9b-v1.5-q4_K_M", + "5.3\u202fGB" + ], + [ + "9b-v1.5-q5_0", + "6.1\u202fGB" + ], + [ + "9b-v1.5-q5_1", + "6.6\u202fGB" + ], + [ + "9b-v1.5-q5_K_S", + "6.1\u202fGB" + ], + [ + "9b-v1.5-q5_K_M", + "6.3\u202fGB" + ], + [ + "9b-v1.5-q6_K", + "7.2\u202fGB" + ], + [ + "9b-v1.5-q8_0", + "9.4\u202fGB" + ], + [ + "34b-chat-fp16", + "69\u202fGB" + ], + [ + "34b-chat-q2_K", + "15\u202fGB" + ], + [ + "34b-chat-q3_K_S", + "15\u202fGB" + ], + [ + "34b-chat-q3_K_M", + "17\u202fGB" + ], + [ + "34b-chat-q3_K_L", + "18\u202fGB" + ], + [ + "34b-chat-q4_0", + "19\u202fGB" + ], + [ + "34b-chat-q4_1", + "22\u202fGB" + ], + [ + "34b-chat-q4_K_S", + "20\u202fGB" + ], + [ + "34b-chat-q4_K_M", + "21\u202fGB" + ], + [ + "34b-chat-q5_0", + "24\u202fGB" + ], + [ + "34b-chat-q5_1", + "26\u202fGB" + ], + [ + "34b-chat-q5_K_S", + "24\u202fGB" + ], + [ + "34b-chat-q5_K_M", + "24\u202fGB" + ], + [ + "34b-chat-q6_K", + "28\u202fGB" + ], + [ + "34b-chat-q8_0", + "37\u202fGB" + ], + [ + "34b-chat-v1.5-fp16", + "69\u202fGB" + ], + [ + "34b-chat-v1.5-q2_K", + "13\u202fGB" + ], + [ + "34b-chat-v1.5-q3_K_S", + "15\u202fGB" + ], + [ + "34b-chat-v1.5-q3_K_M", + "17\u202fGB" + ], + [ + "34b-chat-v1.5-q3_K_L", + "18\u202fGB" + ], + [ + "34b-chat-v1.5-q4_0", + "19\u202fGB" + ], + [ + "34b-chat-v1.5-q4_1", + "22\u202fGB" + ], + [ + "34b-chat-v1.5-q4_K_S", + "20\u202fGB" + ], + [ + "34b-chat-v1.5-q4_K_M", + "21\u202fGB" + ], + [ + "34b-chat-v1.5-q5_0", + "24\u202fGB" + ], + [ + "34b-chat-v1.5-q5_1", + "26\u202fGB" + ], + [ + "34b-chat-v1.5-q5_K_S", + "24\u202fGB" + ], + [ + "34b-chat-v1.5-q5_K_M", + "24\u202fGB" + ], + [ + "34b-chat-v1.5-q6_K", + "28\u202fGB" + ], + [ + "34b-chat-v1.5-q8_0", + "37\u202fGB" + ], + [ + "34b-v1.5-fp16", + "69\u202fGB" + ], + [ + "34b-v1.5-q2_K", + "13\u202fGB" + ], + [ + "34b-v1.5-q3_K_S", + "15\u202fGB" + ], + [ + "34b-v1.5-q3_K_M", + "17\u202fGB" + ], + [ + "34b-v1.5-q3_K_L", + "18\u202fGB" + ], + [ + "34b-v1.5-q4_0", + "19\u202fGB" + ], + [ + "34b-v1.5-q4_1", + "22\u202fGB" + ], + [ + "34b-v1.5-q4_K_S", + "20\u202fGB" + ], + [ + "34b-v1.5-q4_K_M", + "21\u202fGB" + ], + [ + "34b-v1.5-q5_0", + "24\u202fGB" + ], + [ + "34b-v1.5-q5_1", + "26\u202fGB" + ], + [ + "34b-v1.5-q5_K_S", + "24\u202fGB" + ], + [ + "34b-v1.5-q5_K_M", + "24\u202fGB" + ], + [ + "34b-v1.5-q6_K", + "28\u202fGB" + ], + [ + "34b-v1.5-q8_0", + "37\u202fGB" ] ], "image": false, "author": "01.AI" }, - "zephyr": { - "url": "https://ollama.com/library/zephyr", - "description": "Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models that are trained to act as helpful assistants.", + "mistral-openorca": { + "url": "https://ollama.com/library/mistral-openorca", + "description": "Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the Mistral 7B model using the OpenOrca dataset.", "tags": [ [ "latest", - "4.1 GB" - ], - [ - "141b", - "80 GB" + "4.1\u202fGB" ], [ "7b", - "4.1 GB" + "4.1\u202fGB" ], [ - "141b-v0.1", - "80 GB" + "7b-fp16", + "14\u202fGB" ], [ - "141b-v0.1-fp16", - "281 GB" + "7b-q2_K", + "3.1\u202fGB" ], [ - "141b-v0.1-q2_K", - "52 GB" + "7b-q3_K_S", + "3.2\u202fGB" ], [ - "141b-v0.1-q4_0", - "80 GB" + "7b-q3_K_M", + "3.5\u202fGB" ], [ - "141b-v0.1-q8_0", - "149 GB" + "7b-q3_K_L", + "3.8\u202fGB" ], [ - "7b-alpha", - "4.1 GB" + "7b-q4_0", + "4.1\u202fGB" ], [ - "7b-alpha-fp16", - "14 GB" + "7b-q4_1", + "4.6\u202fGB" ], [ - "7b-alpha-q2_K", - "3.1 GB" + "7b-q4_K_S", + "4.1\u202fGB" ], [ - "7b-alpha-q3_K_L", - "3.8 GB" + "7b-q4_K_M", + "4.4\u202fGB" ], [ - "7b-alpha-q3_K_M", - "3.5 GB" + "7b-q5_0", + "5.0\u202fGB" ], [ - "7b-alpha-q3_K_S", - "3.2 GB" + "7b-q5_1", + "5.4\u202fGB" ], [ - "7b-alpha-q4_0", - "4.1 GB" + "7b-q5_K_S", + "5.0\u202fGB" ], [ - "7b-alpha-q4_1", - "4.6 GB" + "7b-q5_K_M", + "5.1\u202fGB" ], [ - "7b-alpha-q4_K_M", - "4.4 GB" + "7b-q6_K", + "5.9\u202fGB" ], [ - "7b-alpha-q4_K_S", - "4.1 GB" - ], - [ - "7b-alpha-q5_0", - "5.0 GB" - ], - [ - "7b-alpha-q5_1", - "5.4 GB" - ], - [ - "7b-alpha-q5_K_M", - "5.1 GB" - ], - [ - "7b-alpha-q5_K_S", - "5.0 GB" - ], - [ - "7b-alpha-q6_K", - "5.9 GB" - ], - [ - "7b-alpha-q8_0", - "7.7 GB" - ], - [ - "7b-beta", - "4.1 GB" - ], - [ - "7b-beta-fp16", - "14 GB" - ], - [ - "7b-beta-q2_K", - "3.1 GB" - ], - [ - "7b-beta-q3_K_L", - "3.8 GB" - ], - [ - "7b-beta-q3_K_M", - "3.5 GB" - ], - [ - "7b-beta-q3_K_S", - "3.2 GB" - ], - [ - "7b-beta-q4_0", - "4.1 GB" - ], - [ - "7b-beta-q4_1", - "4.6 GB" - ], - [ - "7b-beta-q4_K_M", - "4.4 GB" - ], - [ - "7b-beta-q4_K_S", - "4.1 GB" - ], - [ - "7b-beta-q5_0", - "5.0 GB" - ], - [ - "7b-beta-q5_1", - "5.4 GB" - ], - [ - "7b-beta-q5_K_M", - "5.1 GB" - ], - [ - "7b-beta-q5_K_S", - "5.0 GB" - ], - [ - "7b-beta-q6_K", - "5.9 GB" - ], - [ - "7b-beta-q8_0", - "7.7 GB" + "7b-q8_0", + "7.7\u202fGB" ] ], "image": false, - "author": "Hugging Face H4" - }, - "llama2-chinese": { - "url": "https://ollama.com/library/llama2-chinese", - "description": "Llama 2 based model fine tuned to improve Chinese dialogue ability.", - "tags": [ - [ - "latest", - "3.8 GB" - ], - [ - "13b", - "7.4 GB" - ], - [ - "7b", - "3.8 GB" - ], - [ - "13b-chat", - "7.4 GB" - ], - [ - "13b-chat-fp16", - "26 GB" - ], - [ - "13b-chat-q2_K", - "5.4 GB" - ], - [ - "13b-chat-q3_K_L", - "6.9 GB" - ], - [ - "13b-chat-q3_K_M", - "6.3 GB" - ], - [ - "13b-chat-q3_K_S", - "5.7 GB" - ], - [ - "13b-chat-q4_0", - "7.4 GB" - ], - [ - "13b-chat-q4_1", - "8.2 GB" - ], - [ - "13b-chat-q4_K_M", - "7.9 GB" - ], - [ - "13b-chat-q4_K_S", - "7.4 GB" - ], - [ - "13b-chat-q5_0", - "9.0 GB" - ], - [ - "13b-chat-q5_1", - "9.8 GB" - ], - [ - "13b-chat-q5_K_M", - "9.2 GB" - ], - [ - "13b-chat-q5_K_S", - "9.0 GB" - ], - [ - "13b-chat-q6_K", - "11 GB" - ], - [ - "13b-chat-q8_0", - "14 GB" - ], - [ - "7b-chat", - "3.8 GB" - ], - [ - "7b-chat-fp16", - "13 GB" - ], - [ - "7b-chat-q2_K", - "2.8 GB" - ], - [ - "7b-chat-q3_K_L", - "3.6 GB" - ], - [ - "7b-chat-q3_K_M", - "3.3 GB" - ], - [ - "7b-chat-q3_K_S", - "2.9 GB" - ], - [ - "7b-chat-q4_0", - "3.8 GB" - ], - [ - "7b-chat-q4_1", - "4.2 GB" - ], - [ - "7b-chat-q4_K_M", - "4.1 GB" - ], - [ - "7b-chat-q4_K_S", - "3.9 GB" - ], - [ - "7b-chat-q5_0", - "4.7 GB" - ], - [ - "7b-chat-q5_1", - "5.1 GB" - ], - [ - "7b-chat-q5_K_M", - "4.8 GB" - ], - [ - "7b-chat-q5_K_S", - "4.7 GB" - ], - [ - "7b-chat-q6_K", - "5.5 GB" - ], - [ - "7b-chat-q8_0", - "7.2 GB" - ] - ], - "image": false, - "author": "Meta" + "author": "Open Orca" }, "llava-llama3": { "url": "https://ollama.com/library/llava-llama3", @@ -10001,1555 +10333,1607 @@ "tags": [ [ "latest", - "5.5 GB" + "5.5\u202fGB" ], [ "8b", - "5.5 GB" + "5.5\u202fGB" ], [ "8b-v1.1-fp16", - "17 GB" + "17\u202fGB" ], [ "8b-v1.1-q4_0", - "5.5 GB" + "5.5\u202fGB" ] ], "image": true, "author": "Xtuner" }, - "vicuna": { - "url": "https://ollama.com/library/vicuna", - "description": "General use chat model based on Llama and Llama 2 with 2K to 16K context sizes.", - "tags": [ - [ - "latest", - "3.8 GB" - ], - [ - "33b", - "18 GB" - ], - [ - "13b", - "7.4 GB" - ], - [ - "7b", - "3.8 GB" - ], - [ - "13b-16k", - "7.4 GB" - ], - [ - "33b-fp16", - "65 GB" - ], - [ - "33b-q2_K", - "14 GB" - ], - [ - "33b-q3_K_L", - "17 GB" - ], - [ - "33b-q3_K_M", - "16 GB" - ], - [ - "33b-q3_K_S", - "14 GB" - ], - [ - "33b-q4_0", - "18 GB" - ], - [ - "33b-q4_1", - "20 GB" - ], - [ - "33b-q4_K_M", - "20 GB" - ], - [ - "33b-q4_K_S", - "18 GB" - ], - [ - "33b-q5_0", - "22 GB" - ], - [ - "33b-q5_1", - "24 GB" - ], - [ - "33b-q5_K_M", - "23 GB" - ], - [ - "33b-q5_K_S", - "22 GB" - ], - [ - "33b-q6_K", - "27 GB" - ], - [ - "33b-q8_0", - "35 GB" - ], - [ - "13b-v1.5-16k-fp16", - "26 GB" - ], - [ - "13b-v1.5-16k-q2_K", - "5.4 GB" - ], - [ - "13b-v1.5-16k-q3_K_L", - "6.9 GB" - ], - [ - "13b-v1.5-16k-q3_K_M", - "6.3 GB" - ], - [ - "13b-v1.5-16k-q3_K_S", - "5.7 GB" - ], - [ - "13b-v1.5-16k-q4_0", - "7.4 GB" - ], - [ - "13b-v1.5-16k-q4_1", - "8.2 GB" - ], - [ - "13b-v1.5-16k-q4_K_M", - "7.9 GB" - ], - [ - "13b-v1.5-16k-q4_K_S", - "7.4 GB" - ], - [ - "13b-v1.5-16k-q5_0", - "9.0 GB" - ], - [ - "13b-v1.5-16k-q5_1", - "9.8 GB" - ], - [ - "13b-v1.5-16k-q5_K_M", - "9.2 GB" - ], - [ - "13b-v1.5-q3_K_S", - "5.7 GB" - ], - [ - "13b-v1.5-q2_K", - "5.4 GB" - ], - [ - "13b-v1.5-q3_K_M", - "6.3 GB" - ], - [ - "13b-v1.5-16k-q5_K_S", - "9.0 GB" - ], - [ - "13b-v1.5-16k-q6_K", - "11 GB" - ], - [ - "13b-v1.5-q3_K_L", - "6.9 GB" - ], - [ - "13b-v1.5-16k-q8_0", - "14 GB" - ], - [ - "13b-v1.5-fp16", - "26 GB" - ], - [ - "13b-v1.5-q4_0", - "7.4 GB" - ], - [ - "13b-v1.5-q4_1", - "8.2 GB" - ], - [ - "13b-v1.5-q4_K_M", - "7.9 GB" - ], - [ - "13b-v1.5-q4_K_S", - "7.4 GB" - ], - [ - "13b-v1.5-q5_0", - "9.0 GB" - ], - [ - "13b-v1.5-q5_1", - "9.8 GB" - ], - [ - "13b-v1.5-q5_K_M", - "9.2 GB" - ], - [ - "13b-v1.5-q5_K_S", - "9.0 GB" - ], - [ - "13b-v1.5-q6_K", - "11 GB" - ], - [ - "13b-v1.5-q8_0", - "14 GB" - ], - [ - "7b-16k", - "3.8 GB" - ], - [ - "13b-fp16", - "26 GB" - ], - [ - "13b-q2_K", - "5.4 GB" - ], - [ - "13b-q3_K_L", - "6.9 GB" - ], - [ - "13b-q3_K_M", - "6.3 GB" - ], - [ - "13b-q3_K_S", - "5.7 GB" - ], - [ - "13b-q4_0", - "7.4 GB" - ], - [ - "13b-q4_1", - "8.2 GB" - ], - [ - "13b-q4_K_M", - "7.9 GB" - ], - [ - "13b-q4_K_S", - "7.4 GB" - ], - [ - "13b-q5_0", - "9.0 GB" - ], - [ - "13b-q5_1", - "9.8 GB" - ], - [ - "13b-q5_K_M", - "9.2 GB" - ], - [ - "13b-q5_K_S", - "9.0 GB" - ], - [ - "13b-q6_K", - "11 GB" - ], - [ - "13b-q8_0", - "14 GB" - ], - [ - "7b-v1.5-fp16", - "13 GB" - ], - [ - "7b-v1.5-q2_K", - "2.8 GB" - ], - [ - "7b-v1.5-q3_K_L", - "3.6 GB" - ], - [ - "7b-v1.5-q3_K_M", - "3.3 GB" - ], - [ - "7b-v1.5-q3_K_S", - "2.9 GB" - ], - [ - "7b-v1.5-q4_0", - "3.8 GB" - ], - [ - "7b-v1.5-q4_1", - "4.2 GB" - ], - [ - "7b-v1.5-16k-q8_0", - "7.2 GB" - ], - [ - "7b-v1.5-16k-q5_K_M", - "4.8 GB" - ], - [ - "7b-v1.5-q5_K_M", - "4.8 GB" - ], - [ - "7b-v1.5-q4_K_S", - "3.9 GB" - ], - [ - "7b-v1.5-16k-q4_1", - "4.2 GB" - ], - [ - "7b-v1.5-q6_K", - "5.5 GB" - ], - [ - "7b-v1.5-q5_K_S", - "4.7 GB" - ], - [ - "7b-v1.5-q4_K_M", - "4.1 GB" - ], - [ - "7b-v1.5-q8_0", - "7.2 GB" - ], - [ - "7b-v1.5-16k-q3_K_S", - "2.9 GB" - ], - [ - "7b-v1.5-16k-fp16", - "13 GB" - ], - [ - "7b-v1.5-16k-q4_K_M", - "4.1 GB" - ], - [ - "7b-v1.5-16k-q2_K", - "2.8 GB" - ], - [ - "7b-v1.5-16k-q4_0", - "3.8 GB" - ], - [ - "7b-v1.5-16k-q4_K_S", - "3.9 GB" - ], - [ - "7b-v1.5-16k-q3_K_L", - "3.6 GB" - ], - [ - "7b-v1.5-16k-q5_K_S", - "4.7 GB" - ], - [ - "7b-v1.5-16k-q5_0", - "4.7 GB" - ], - [ - "7b-v1.5-16k-q3_K_M", - "3.3 GB" - ], - [ - "7b-v1.5-q5_1", - "5.1 GB" - ], - [ - "7b-v1.5-q5_0", - "4.7 GB" - ], - [ - "7b-v1.5-16k-q6_K", - "5.5 GB" - ], - [ - "7b-v1.5-16k-q5_1", - "5.1 GB" - ], - [ - "7b-fp16", - "13 GB" - ], - [ - "7b-q2_K", - "2.8 GB" - ], - [ - "7b-q3_K_L", - "3.6 GB" - ], - [ - "7b-q3_K_M", - "3.3 GB" - ], - [ - "7b-q3_K_S", - "2.9 GB" - ], - [ - "7b-q4_0", - "3.8 GB" - ], - [ - "7b-q4_1", - "4.2 GB" - ], - [ - "7b-q4_K_M", - "4.1 GB" - ], - [ - "7b-q4_K_S", - "3.9 GB" - ], - [ - "7b-q5_0", - "4.7 GB" - ], - [ - "7b-q5_1", - "5.1 GB" - ], - [ - "7b-q5_K_M", - "4.8 GB" - ], - [ - "7b-q5_K_S", - "4.7 GB" - ], - [ - "7b-q6_K", - "5.5 GB" - ], - [ - "7b-q8_0", - "7.2 GB" - ] - ], - "image": false, - "author": "lmsys.org" - }, - "nous-hermes2": { - "url": "https://ollama.com/library/nous-hermes2", - "description": "The powerful family of models by Nous Research that excels at scientific discussion and coding tasks.", - "tags": [ - [ - "latest", - "6.1 GB" - ], - [ - "34b", - "19 GB" - ], - [ - "10.7b", - "6.1 GB" - ], - [ - "34b-yi-fp16", - "69 GB" - ], - [ - "34b-yi-q2_K", - "15 GB" - ], - [ - "34b-yi-q3_K_L", - "18 GB" - ], - [ - "34b-yi-q3_K_M", - "17 GB" - ], - [ - "34b-yi-q3_K_S", - "15 GB" - ], - [ - "34b-yi-q4_0", - "19 GB" - ], - [ - "34b-yi-q4_1", - "22 GB" - ], - [ - "34b-yi-q4_K_M", - "21 GB" - ], - [ - "34b-yi-q4_K_S", - "20 GB" - ], - [ - "34b-yi-q5_0", - "24 GB" - ], - [ - "34b-yi-q5_1", - "26 GB" - ], - [ - "34b-yi-q5_K_M", - "24 GB" - ], - [ - "34b-yi-q5_K_S", - "24 GB" - ], - [ - "34b-yi-q6_K", - "28 GB" - ], - [ - "34b-yi-q8_0", - "37 GB" - ], - [ - "10.7b-solar-fp16", - "21 GB" - ], - [ - "10.7b-solar-q2_K", - "4.5 GB" - ], - [ - "10.7b-solar-q3_K_L", - "5.7 GB" - ], - [ - "10.7b-solar-q3_K_M", - "5.2 GB" - ], - [ - "10.7b-solar-q3_K_S", - "4.7 GB" - ], - [ - "10.7b-solar-q4_0", - "6.1 GB" - ], - [ - "10.7b-solar-q4_1", - "6.7 GB" - ], - [ - "10.7b-solar-q4_K_M", - "6.5 GB" - ], - [ - "10.7b-solar-q4_K_S", - "6.1 GB" - ], - [ - "10.7b-solar-q5_0", - "7.4 GB" - ], - [ - "10.7b-solar-q5_1", - "8.1 GB" - ], - [ - "10.7b-solar-q5_K_M", - "7.6 GB" - ], - [ - "10.7b-solar-q5_K_S", - "7.4 GB" - ], - [ - "10.7b-solar-q6_K", - "8.8 GB" - ], - [ - "10.7b-solar-q8_0", - "11 GB" - ] - ], - "image": false, - "author": "Nous Research" - }, - "tinyllama": { - "url": "https://ollama.com/library/tinyllama", - "description": "The TinyLlama project is an open endeavor to train a compact 1.1B Llama model on 3 trillion tokens.", - "tags": [ - [ - "latest", - "638 MB" - ], - [ - "1.1b", - "638 MB" - ], - [ - "chat", - "638 MB" - ], - [ - "v0.6", - "638 MB" - ], - [ - "v1", - "638 MB" - ], - [ - "1.1b-chat", - "638 MB" - ], - [ - "1.1b-chat-v0.6-fp16", - "2.2 GB" - ], - [ - "1.1b-chat-v0.6-q2_K", - "483 MB" - ], - [ - "1.1b-chat-v0.6-q3_K_L", - "593 MB" - ], - [ - "1.1b-chat-v0.6-q3_K_M", - "551 MB" - ], - [ - "1.1b-chat-v0.6-q3_K_S", - "500 MB" - ], - [ - "1.1b-chat-v0.6-q4_0", - "638 MB" - ], - [ - "1.1b-chat-v0.6-q4_1", - "702 MB" - ], - [ - "1.1b-chat-v0.6-q4_K_M", - "669 MB" - ], - [ - "1.1b-chat-v0.6-q4_K_S", - "644 MB" - ], - [ - "1.1b-chat-v0.6-q5_0", - "767 MB" - ], - [ - "1.1b-chat-v0.6-q5_1", - "832 MB" - ], - [ - "1.1b-chat-v0.6-q5_K_M", - "783 MB" - ], - [ - "1.1b-chat-v0.6-q5_K_S", - "767 MB" - ], - [ - "1.1b-chat-v0.6-q6_K", - "904 MB" - ], - [ - "1.1b-chat-v0.6-q8_0", - "1.2 GB" - ], - [ - "1.1b-chat-v1-fp16", - "2.2 GB" - ], - [ - "1.1b-chat-v1-q2_K", - "483 MB" - ], - [ - "1.1b-chat-v1-q3_K_L", - "593 MB" - ], - [ - "1.1b-chat-v1-q3_K_M", - "551 MB" - ], - [ - "1.1b-chat-v1-q3_K_S", - "500 MB" - ], - [ - "1.1b-chat-v1-q4_0", - "638 MB" - ], - [ - "1.1b-chat-v1-q4_1", - "702 MB" - ], - [ - "1.1b-chat-v1-q4_K_M", - "669 MB" - ], - [ - "1.1b-chat-v1-q4_K_S", - "644 MB" - ], - [ - "1.1b-chat-v1-q5_0", - "767 MB" - ], - [ - "1.1b-chat-v1-q5_1", - "832 MB" - ], - [ - "1.1b-chat-v1-q5_K_M", - "783 MB" - ], - [ - "1.1b-chat-v1-q5_K_S", - "767 MB" - ], - [ - "1.1b-chat-v1-q6_K", - "904 MB" - ], - [ - "1.1b-chat-v1-q8_0", - "1.2 GB" - ] - ], - "image": false, - "author": "TinyLlama Team" - }, - "wizard-vicuna-uncensored": { - "url": "https://ollama.com/library/wizard-vicuna-uncensored", - "description": "Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on Llama 2 uncensored by Eric Hartford.", - "tags": [ - [ - "latest", - "3.8 GB" - ], - [ - "30b", - "18 GB" - ], - [ - "13b", - "7.4 GB" - ], - [ - "7b", - "3.8 GB" - ], - [ - "30b-fp16", - "65 GB" - ], - [ - "30b-q2_K", - "14 GB" - ], - [ - "30b-q3_K_L", - "17 GB" - ], - [ - "30b-q3_K_M", - "16 GB" - ], - [ - "30b-q3_K_S", - "14 GB" - ], - [ - "30b-q4_0", - "18 GB" - ], - [ - "30b-q4_1", - "20 GB" - ], - [ - "30b-q4_K_M", - "20 GB" - ], - [ - "30b-q4_K_S", - "18 GB" - ], - [ - "30b-q5_0", - "22 GB" - ], - [ - "30b-q5_1", - "24 GB" - ], - [ - "30b-q5_K_M", - "23 GB" - ], - [ - "30b-q5_K_S", - "22 GB" - ], - [ - "30b-q6_K", - "27 GB" - ], - [ - "30b-q8_0", - "35 GB" - ], - [ - "13b-fp16", - "26 GB" - ], - [ - "13b-q2_K", - "5.4 GB" - ], - [ - "13b-q3_K_L", - "6.9 GB" - ], - [ - "13b-q3_K_M", - "6.3 GB" - ], - [ - "13b-q3_K_S", - "5.7 GB" - ], - [ - "13b-q4_0", - "7.4 GB" - ], - [ - "13b-q4_1", - "8.2 GB" - ], - [ - "13b-q4_K_M", - "7.9 GB" - ], - [ - "13b-q4_K_S", - "7.4 GB" - ], - [ - "13b-q5_0", - "9.0 GB" - ], - [ - "13b-q5_1", - "9.8 GB" - ], - [ - "13b-q5_K_M", - "9.2 GB" - ], - [ - "13b-q5_K_S", - "9.0 GB" - ], - [ - "13b-q6_K", - "11 GB" - ], - [ - "13b-q8_0", - "14 GB" - ], - [ - "7b-fp16", - "13 GB" - ], - [ - "7b-q2_K", - "2.8 GB" - ], - [ - "7b-q3_K_L", - "3.6 GB" - ], - [ - "7b-q3_K_M", - "3.3 GB" - ], - [ - "7b-q3_K_S", - "2.9 GB" - ], - [ - "7b-q4_0", - "3.8 GB" - ], - [ - "7b-q4_1", - "4.2 GB" - ], - [ - "7b-q4_K_M", - "4.1 GB" - ], - [ - "7b-q4_K_S", - "3.9 GB" - ], - [ - "7b-q5_0", - "4.7 GB" - ], - [ - "7b-q5_1", - "5.1 GB" - ], - [ - "7b-q5_K_M", - "4.8 GB" - ], - [ - "7b-q5_K_S", - "4.7 GB" - ], - [ - "7b-q6_K", - "5.5 GB" - ], - [ - "7b-q8_0", - "7.2 GB" - ] - ], - "image": false, - "author": "Eric Hartford" - }, - "codestral": { - "url": "https://ollama.com/library/codestral", - "description": "Codestral is Mistral AI\u2019s first-ever code model designed for code generation tasks.", - "tags": [ - [ - "latest", - "13 GB" - ], - [ - "22b", - "13 GB" - ], - [ - "v0.1", - "13 GB" - ], - [ - "22b-v0.1-f16", - "44 GB" - ], - [ - "22b-v0.1-q2_K", - "8.3 GB" - ], - [ - "22b-v0.1-q3_K_L", - "12 GB" - ], - [ - "22b-v0.1-q3_K_M", - "11 GB" - ], - [ - "22b-v0.1-q3_K_S", - "9.6 GB" - ], - [ - "22b-v0.1-q4_0", - "13 GB" - ], - [ - "22b-v0.1-q4_1", - "14 GB" - ], - [ - "22b-v0.1-q4_K_M", - "13 GB" - ], - [ - "22b-v0.1-q4_K_S", - "13 GB" - ], - [ - "22b-v0.1-q5_0", - "15 GB" - ], - [ - "22b-v0.1-q5_1", - "17 GB" - ], - [ - "22b-v0.1-q5_K_M", - "16 GB" - ], - [ - "22b-v0.1-q5_K_S", - "15 GB" - ], - [ - "22b-v0.1-q6_K", - "18 GB" - ], - [ - "22b-v0.1-q8_0", - "24 GB" - ] - ], - "image": false, - "author": "Mistral AI" - }, "starcoder": { "url": "https://ollama.com/library/starcoder", "description": "StarCoder is a code generation model trained on 80+ programming languages.", "tags": [ [ "latest", - "1.8 GB" - ], - [ - "15b", - "9.0 GB" - ], - [ - "7b", - "4.3 GB" - ], - [ - "3b", - "1.8 GB" + "1.8\u202fGB" ], [ "1b", - "726 MB" + "726\u202fMB" ], [ - "15b-base", - "9.0 GB" + "3b", + "1.8\u202fGB" ], [ - "15b-base-fp16", - "32 GB" + "7b", + "4.3\u202fGB" ], [ - "15b-base-q2_K", - "6.7 GB" - ], - [ - "15b-base-q3_K_L", - "9.1 GB" - ], - [ - "15b-base-q3_K_M", - "8.2 GB" - ], - [ - "15b-base-q3_K_S", - "6.9 GB" - ], - [ - "15b-base-q4_0", - "9.0 GB" - ], - [ - "15b-base-q4_1", - "10.0 GB" - ], - [ - "15b-base-q4_K_M", - "10.0 GB" - ], - [ - "15b-base-q4_K_S", - "9.1 GB" - ], - [ - "15b-base-q5_0", - "11 GB" - ], - [ - "15b-base-q5_1", - "12 GB" - ], - [ - "15b-base-q5_K_M", - "12 GB" - ], - [ - "15b-base-q5_K_S", - "11 GB" - ], - [ - "15b-base-q6_K", - "13 GB" - ], - [ - "15b-base-q8_0", - "17 GB" - ], - [ - "15b-plus", - "9.0 GB" - ], - [ - "15b-plus-fp16", - "32 GB" - ], - [ - "15b-plus-q2_K", - "6.7 GB" - ], - [ - "15b-plus-q3_K_L", - "9.1 GB" - ], - [ - "15b-plus-q3_K_M", - "8.2 GB" - ], - [ - "15b-plus-q3_K_S", - "6.9 GB" - ], - [ - "15b-plus-q4_0", - "9.0 GB" - ], - [ - "15b-plus-q4_1", - "10.0 GB" - ], - [ - "15b-plus-q4_K_M", - "10.0 GB" - ], - [ - "15b-plus-q4_K_S", - "9.1 GB" - ], - [ - "15b-plus-q5_0", - "11 GB" - ], - [ - "15b-plus-q5_1", - "12 GB" - ], - [ - "15b-plus-q5_K_M", - "12 GB" - ], - [ - "15b-plus-q5_K_S", - "11 GB" - ], - [ - "15b-plus-q6_K", - "13 GB" - ], - [ - "15b-plus-q8_0", - "17 GB" - ], - [ - "7b-base", - "4.3 GB" - ], - [ - "3b-base", - "1.8 GB" + "15b", + "9.0\u202fGB" ], [ "1b-base", - "726 MB" + "726\u202fMB" + ], + [ + "3b-base", + "1.8\u202fGB" + ], + [ + "7b-base", + "4.3\u202fGB" + ], + [ + "15b-base", + "9.0\u202fGB" + ], + [ + "15b-plus", + "9.0\u202fGB" ], [ "15b-fp16", - "32 GB" + "32\u202fGB" ], [ "15b-q2_K", - "6.7 GB" - ], - [ - "15b-q3_K_L", - "9.1 GB" - ], - [ - "15b-q3_K_M", - "8.2 GB" + "6.7\u202fGB" ], [ "15b-q3_K_S", - "6.9 GB" + "6.9\u202fGB" + ], + [ + "15b-q3_K_M", + "8.2\u202fGB" + ], + [ + "15b-q3_K_L", + "9.1\u202fGB" ], [ "15b-q4_0", - "9.0 GB" + "9.0\u202fGB" ], [ "15b-q4_1", - "10.0 GB" - ], - [ - "15b-q4_K_M", - "10.0 GB" + "10.0\u202fGB" ], [ "15b-q4_K_S", - "9.1 GB" + "9.1\u202fGB" + ], + [ + "15b-q4_K_M", + "10.0\u202fGB" ], [ "15b-q5_0", - "11 GB" + "11\u202fGB" ], [ "15b-q5_1", - "12 GB" - ], - [ - "15b-q5_K_M", - "12 GB" + "12\u202fGB" ], [ "15b-q5_K_S", - "11 GB" + "11\u202fGB" + ], + [ + "15b-q5_K_M", + "12\u202fGB" ], [ "15b-q6_K", - "13 GB" + "13\u202fGB" ], [ "15b-q8_0", - "17 GB" - ], - [ - "7b-base-fp16", - "15 GB" - ], - [ - "7b-base-q2_K", - "3.2 GB" - ], - [ - "7b-base-q3_K_L", - "4.3 GB" - ], - [ - "7b-base-q3_K_M", - "3.9 GB" - ], - [ - "7b-base-q3_K_S", - "3.3 GB" - ], - [ - "7b-base-q4_0", - "4.3 GB" - ], - [ - "7b-base-q4_1", - "4.8 GB" - ], - [ - "7b-base-q4_K_M", - "4.8 GB" - ], - [ - "7b-base-q4_K_S", - "4.3 GB" - ], - [ - "7b-base-q5_0", - "5.2 GB" - ], - [ - "7b-base-q5_1", - "5.7 GB" - ], - [ - "7b-base-q5_K_M", - "5.5 GB" - ], - [ - "7b-base-q5_K_S", - "5.2 GB" - ], - [ - "7b-base-q6_K", - "6.2 GB" - ], - [ - "7b-base-q8_0", - "8.0 GB" - ], - [ - "3b-base-fp16", - "6.4 GB" - ], - [ - "3b-base-q2_K", - "1.4 GB" - ], - [ - "3b-base-q3_K_L", - "1.8 GB" - ], - [ - "3b-base-q3_K_M", - "1.7 GB" - ], - [ - "3b-base-q3_K_S", - "1.4 GB" - ], - [ - "3b-base-q4_0", - "1.8 GB" - ], - [ - "3b-base-q4_1", - "2.0 GB" - ], - [ - "3b-base-q4_K_M", - "2.0 GB" - ], - [ - "3b-base-q4_K_S", - "1.8 GB" - ], - [ - "3b-base-q5_0", - "2.2 GB" - ], - [ - "3b-base-q5_1", - "2.4 GB" - ], - [ - "3b-base-q5_K_M", - "2.3 GB" - ], - [ - "3b-base-q5_K_S", - "2.2 GB" - ], - [ - "3b-base-q6_K", - "2.6 GB" - ], - [ - "3b-base-q8_0", - "3.4 GB" + "17\u202fGB" ], [ "1b-base-fp16", - "2.5 GB" + "2.5\u202fGB" ], [ "1b-base-q2_K", - "552 MB" - ], - [ - "1b-base-q3_K_L", - "720 MB" - ], - [ - "1b-base-q3_K_M", - "661 MB" + "552\u202fMB" ], [ "1b-base-q3_K_S", - "575 MB" + "575\u202fMB" + ], + [ + "1b-base-q3_K_M", + "661\u202fMB" + ], + [ + "1b-base-q3_K_L", + "720\u202fMB" ], [ "1b-base-q4_0", - "726 MB" + "726\u202fMB" ], [ "1b-base-q4_1", - "797 MB" - ], - [ - "1b-base-q4_K_M", - "792 MB" + "797\u202fMB" ], [ "1b-base-q4_K_S", - "734 MB" + "734\u202fMB" + ], + [ + "1b-base-q4_K_M", + "792\u202fMB" ], [ "1b-base-q5_0", - "868 MB" + "868\u202fMB" ], [ "1b-base-q5_1", - "939 MB" - ], - [ - "1b-base-q5_K_M", - "910 MB" + "939\u202fMB" ], [ "1b-base-q5_K_S", - "868 MB" + "868\u202fMB" + ], + [ + "1b-base-q5_K_M", + "910\u202fMB" ], [ "1b-base-q6_K", - "1.0 GB" + "1.0\u202fGB" ], [ "1b-base-q8_0", - "1.3 GB" + "1.3\u202fGB" + ], + [ + "3b-base-fp16", + "6.4\u202fGB" + ], + [ + "3b-base-q2_K", + "1.4\u202fGB" + ], + [ + "3b-base-q3_K_S", + "1.4\u202fGB" + ], + [ + "3b-base-q3_K_M", + "1.7\u202fGB" + ], + [ + "3b-base-q3_K_L", + "1.8\u202fGB" + ], + [ + "3b-base-q4_0", + "1.8\u202fGB" + ], + [ + "3b-base-q4_1", + "2.0\u202fGB" + ], + [ + "3b-base-q4_K_S", + "1.8\u202fGB" + ], + [ + "3b-base-q4_K_M", + "2.0\u202fGB" + ], + [ + "3b-base-q5_0", + "2.2\u202fGB" + ], + [ + "3b-base-q5_1", + "2.4\u202fGB" + ], + [ + "3b-base-q5_K_S", + "2.2\u202fGB" + ], + [ + "3b-base-q5_K_M", + "2.3\u202fGB" + ], + [ + "3b-base-q6_K", + "2.6\u202fGB" + ], + [ + "3b-base-q8_0", + "3.4\u202fGB" + ], + [ + "7b-base-fp16", + "15\u202fGB" + ], + [ + "7b-base-q2_K", + "3.2\u202fGB" + ], + [ + "7b-base-q3_K_S", + "3.3\u202fGB" + ], + [ + "7b-base-q3_K_M", + "3.9\u202fGB" + ], + [ + "7b-base-q3_K_L", + "4.3\u202fGB" + ], + [ + "7b-base-q4_0", + "4.3\u202fGB" + ], + [ + "7b-base-q4_1", + "4.8\u202fGB" + ], + [ + "7b-base-q4_K_S", + "4.3\u202fGB" + ], + [ + "7b-base-q4_K_M", + "4.8\u202fGB" + ], + [ + "7b-base-q5_0", + "5.2\u202fGB" + ], + [ + "7b-base-q5_1", + "5.7\u202fGB" + ], + [ + "7b-base-q5_K_S", + "5.2\u202fGB" + ], + [ + "7b-base-q5_K_M", + "5.5\u202fGB" + ], + [ + "7b-base-q6_K", + "6.2\u202fGB" + ], + [ + "7b-base-q8_0", + "8.0\u202fGB" + ], + [ + "15b-base-fp16", + "32\u202fGB" + ], + [ + "15b-base-q2_K", + "6.7\u202fGB" + ], + [ + "15b-base-q3_K_S", + "6.9\u202fGB" + ], + [ + "15b-base-q3_K_M", + "8.2\u202fGB" + ], + [ + "15b-base-q3_K_L", + "9.1\u202fGB" + ], + [ + "15b-base-q4_0", + "9.0\u202fGB" + ], + [ + "15b-base-q4_1", + "10.0\u202fGB" + ], + [ + "15b-base-q4_K_S", + "9.1\u202fGB" + ], + [ + "15b-base-q4_K_M", + "10.0\u202fGB" + ], + [ + "15b-base-q5_0", + "11\u202fGB" + ], + [ + "15b-base-q5_1", + "12\u202fGB" + ], + [ + "15b-base-q5_K_S", + "11\u202fGB" + ], + [ + "15b-base-q5_K_M", + "12\u202fGB" + ], + [ + "15b-base-q6_K", + "13\u202fGB" + ], + [ + "15b-base-q8_0", + "17\u202fGB" + ], + [ + "15b-plus-fp16", + "32\u202fGB" + ], + [ + "15b-plus-q2_K", + "6.7\u202fGB" + ], + [ + "15b-plus-q3_K_S", + "6.9\u202fGB" + ], + [ + "15b-plus-q3_K_M", + "8.2\u202fGB" + ], + [ + "15b-plus-q3_K_L", + "9.1\u202fGB" + ], + [ + "15b-plus-q4_0", + "9.0\u202fGB" + ], + [ + "15b-plus-q4_1", + "10.0\u202fGB" + ], + [ + "15b-plus-q4_K_S", + "9.1\u202fGB" + ], + [ + "15b-plus-q4_K_M", + "10.0\u202fGB" + ], + [ + "15b-plus-q5_0", + "11\u202fGB" + ], + [ + "15b-plus-q5_1", + "12\u202fGB" + ], + [ + "15b-plus-q5_K_S", + "11\u202fGB" + ], + [ + "15b-plus-q5_K_M", + "12\u202fGB" + ], + [ + "15b-plus-q6_K", + "13\u202fGB" + ], + [ + "15b-plus-q8_0", + "17\u202fGB" ] ], "image": false, "author": "BigCode" }, - "wizardlm2": { - "url": "https://ollama.com/library/wizardlm2", - "description": "State of the art large language model from Microsoft AI with improved performance on complex chat, multilingual, reasoning and agent use cases.", + "llama2-chinese": { + "url": "https://ollama.com/library/llama2-chinese", + "description": "Llama 2 based model fine tuned to improve Chinese dialogue ability.", "tags": [ [ "latest", - "4.1 GB" - ], - [ - "8x22b", - "80 GB" + "3.8\u202fGB" ], [ "7b", - "4.1 GB" + "3.8\u202fGB" ], [ - "8x22b-fp16", - "281 GB" + "13b", + "7.4\u202fGB" ], [ - "8x22b-q2_K", - "52 GB" + "7b-chat", + "3.8\u202fGB" ], [ - "8x22b-q4_0", - "80 GB" + "13b-chat", + "7.4\u202fGB" ], [ - "8x22b-q8_0", - "149 GB" + "7b-chat-fp16", + "13\u202fGB" ], [ - "7b-fp16", - "14 GB" + "7b-chat-q2_K", + "2.8\u202fGB" ], [ - "7b-q2_K", - "2.7 GB" + "7b-chat-q3_K_S", + "2.9\u202fGB" ], [ - "7b-q3_K_L", - "3.8 GB" + "7b-chat-q3_K_M", + "3.3\u202fGB" ], [ - "7b-q3_K_M", - "3.5 GB" + "7b-chat-q3_K_L", + "3.6\u202fGB" ], [ - "7b-q3_K_S", - "3.2 GB" + "7b-chat-q4_0", + "3.8\u202fGB" ], [ - "7b-q4_0", - "4.1 GB" + "7b-chat-q4_1", + "4.2\u202fGB" ], [ - "7b-q4_1", - "4.6 GB" + "7b-chat-q4_K_S", + "3.9\u202fGB" ], [ - "7b-q4_K_M", - "4.4 GB" + "7b-chat-q4_K_M", + "4.1\u202fGB" ], [ - "7b-q4_K_S", - "4.1 GB" + "7b-chat-q5_0", + "4.7\u202fGB" ], [ - "7b-q5_0", - "5.0 GB" + "7b-chat-q5_1", + "5.1\u202fGB" ], [ - "7b-q5_1", - "5.4 GB" + "7b-chat-q5_K_S", + "4.7\u202fGB" ], [ - "7b-q5_K_M", - "5.1 GB" + "7b-chat-q5_K_M", + "4.8\u202fGB" ], [ - "7b-q5_K_S", - "5.0 GB" + "7b-chat-q6_K", + "5.5\u202fGB" ], [ - "7b-q6_K", - "5.9 GB" + "7b-chat-q8_0", + "7.2\u202fGB" ], [ - "7b-q8_0", - "7.7 GB" + "13b-chat-fp16", + "26\u202fGB" + ], + [ + "13b-chat-q2_K", + "5.4\u202fGB" + ], + [ + "13b-chat-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-chat-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-chat-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-chat-q4_0", + "7.4\u202fGB" + ], + [ + "13b-chat-q4_1", + "8.2\u202fGB" + ], + [ + "13b-chat-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-chat-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-chat-q5_0", + "9.0\u202fGB" + ], + [ + "13b-chat-q5_1", + "9.8\u202fGB" + ], + [ + "13b-chat-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-chat-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-chat-q6_K", + "11\u202fGB" + ], + [ + "13b-chat-q8_0", + "14\u202fGB" ] ], "image": false, - "author": "Microsoft" + "author": "Meta" + }, + "vicuna": { + "url": "https://ollama.com/library/vicuna", + "description": "General use chat model based on Llama and Llama 2 with 2K to 16K context sizes.", + "tags": [ + [ + "latest", + "3.8\u202fGB" + ], + [ + "7b", + "3.8\u202fGB" + ], + [ + "13b", + "7.4\u202fGB" + ], + [ + "33b", + "18\u202fGB" + ], + [ + "7b-16k", + "3.8\u202fGB" + ], + [ + "13b-16k", + "7.4\u202fGB" + ], + [ + "7b-fp16", + "13\u202fGB" + ], + [ + "7b-q2_K", + "2.8\u202fGB" + ], + [ + "7b-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-q4_0", + "3.8\u202fGB" + ], + [ + "7b-q4_1", + "4.2\u202fGB" + ], + [ + "7b-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-q5_0", + "4.7\u202fGB" + ], + [ + "7b-q5_1", + "5.1\u202fGB" + ], + [ + "7b-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-q6_K", + "5.5\u202fGB" + ], + [ + "7b-q8_0", + "7.2\u202fGB" + ], + [ + "13b-fp16", + "26\u202fGB" + ], + [ + "13b-q2_K", + "5.4\u202fGB" + ], + [ + "13b-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-q4_0", + "7.4\u202fGB" + ], + [ + "13b-q4_1", + "8.2\u202fGB" + ], + [ + "13b-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-q5_0", + "9.0\u202fGB" + ], + [ + "13b-q5_1", + "9.8\u202fGB" + ], + [ + "13b-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-q6_K", + "11\u202fGB" + ], + [ + "13b-q8_0", + "14\u202fGB" + ], + [ + "33b-fp16", + "65\u202fGB" + ], + [ + "33b-q2_K", + "14\u202fGB" + ], + [ + "33b-q3_K_S", + "14\u202fGB" + ], + [ + "33b-q3_K_M", + "16\u202fGB" + ], + [ + "33b-q3_K_L", + "17\u202fGB" + ], + [ + "33b-q4_0", + "18\u202fGB" + ], + [ + "33b-q4_1", + "20\u202fGB" + ], + [ + "33b-q4_K_S", + "18\u202fGB" + ], + [ + "33b-q4_K_M", + "20\u202fGB" + ], + [ + "33b-q5_0", + "22\u202fGB" + ], + [ + "33b-q5_1", + "24\u202fGB" + ], + [ + "33b-q5_K_S", + "22\u202fGB" + ], + [ + "33b-q5_K_M", + "23\u202fGB" + ], + [ + "33b-q6_K", + "27\u202fGB" + ], + [ + "33b-q8_0", + "35\u202fGB" + ], + [ + "7b-v1.5-fp16", + "13\u202fGB" + ], + [ + "7b-v1.5-q2_K", + "2.8\u202fGB" + ], + [ + "7b-v1.5-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-v1.5-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-v1.5-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-v1.5-q4_0", + "3.8\u202fGB" + ], + [ + "7b-v1.5-q4_1", + "4.2\u202fGB" + ], + [ + "7b-v1.5-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-v1.5-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-v1.5-q5_0", + "4.7\u202fGB" + ], + [ + "7b-v1.5-q5_1", + "5.1\u202fGB" + ], + [ + "7b-v1.5-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-v1.5-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-v1.5-q6_K", + "5.5\u202fGB" + ], + [ + "7b-v1.5-q8_0", + "7.2\u202fGB" + ], + [ + "7b-v1.5-16k-fp16", + "13\u202fGB" + ], + [ + "7b-v1.5-16k-q2_K", + "2.8\u202fGB" + ], + [ + "7b-v1.5-16k-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-v1.5-16k-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-v1.5-16k-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-v1.5-16k-q4_0", + "3.8\u202fGB" + ], + [ + "7b-v1.5-16k-q4_1", + "4.2\u202fGB" + ], + [ + "7b-v1.5-16k-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-v1.5-16k-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-v1.5-16k-q5_0", + "4.7\u202fGB" + ], + [ + "7b-v1.5-16k-q5_1", + "5.1\u202fGB" + ], + [ + "7b-v1.5-16k-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-v1.5-16k-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-v1.5-16k-q6_K", + "5.5\u202fGB" + ], + [ + "7b-v1.5-16k-q8_0", + "7.2\u202fGB" + ], + [ + "13b-v1.5-fp16", + "26\u202fGB" + ], + [ + "13b-v1.5-q2_K", + "5.4\u202fGB" + ], + [ + "13b-v1.5-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-v1.5-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-v1.5-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-v1.5-q4_0", + "7.4\u202fGB" + ], + [ + "13b-v1.5-q4_1", + "8.2\u202fGB" + ], + [ + "13b-v1.5-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-v1.5-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-v1.5-q5_0", + "9.0\u202fGB" + ], + [ + "13b-v1.5-q5_1", + "9.8\u202fGB" + ], + [ + "13b-v1.5-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-v1.5-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-v1.5-q6_K", + "11\u202fGB" + ], + [ + "13b-v1.5-q8_0", + "14\u202fGB" + ], + [ + "13b-v1.5-16k-fp16", + "26\u202fGB" + ], + [ + "13b-v1.5-16k-q2_K", + "5.4\u202fGB" + ], + [ + "13b-v1.5-16k-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-v1.5-16k-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-v1.5-16k-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-v1.5-16k-q4_0", + "7.4\u202fGB" + ], + [ + "13b-v1.5-16k-q4_1", + "8.2\u202fGB" + ], + [ + "13b-v1.5-16k-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-v1.5-16k-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-v1.5-16k-q5_0", + "9.0\u202fGB" + ], + [ + "13b-v1.5-16k-q5_1", + "9.8\u202fGB" + ], + [ + "13b-v1.5-16k-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-v1.5-16k-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-v1.5-16k-q6_K", + "11\u202fGB" + ], + [ + "13b-v1.5-16k-q8_0", + "14\u202fGB" + ] + ], + "image": false, + "author": "lmsys.org" + }, + "tinyllama": { + "url": "https://ollama.com/library/tinyllama", + "description": "The TinyLlama project is an open endeavor to train a compact 1.1B Llama model on 3 trillion tokens.", + "tags": [ + [ + "latest", + "638\u202fMB" + ], + [ + "1.1b", + "638\u202fMB" + ], + [ + "chat", + "638\u202fMB" + ], + [ + "v0.6", + "638\u202fMB" + ], + [ + "v1", + "638\u202fMB" + ], + [ + "1.1b-chat", + "638\u202fMB" + ], + [ + "1.1b-chat-v0.6-fp16", + "2.2\u202fGB" + ], + [ + "1.1b-chat-v0.6-q2_K", + "483\u202fMB" + ], + [ + "1.1b-chat-v0.6-q3_K_S", + "500\u202fMB" + ], + [ + "1.1b-chat-v0.6-q3_K_M", + "551\u202fMB" + ], + [ + "1.1b-chat-v0.6-q3_K_L", + "593\u202fMB" + ], + [ + "1.1b-chat-v0.6-q4_0", + "638\u202fMB" + ], + [ + "1.1b-chat-v0.6-q4_1", + "702\u202fMB" + ], + [ + "1.1b-chat-v0.6-q4_K_S", + "644\u202fMB" + ], + [ + "1.1b-chat-v0.6-q4_K_M", + "669\u202fMB" + ], + [ + "1.1b-chat-v0.6-q5_0", + "767\u202fMB" + ], + [ + "1.1b-chat-v0.6-q5_1", + "832\u202fMB" + ], + [ + "1.1b-chat-v0.6-q5_K_S", + "767\u202fMB" + ], + [ + "1.1b-chat-v0.6-q5_K_M", + "783\u202fMB" + ], + [ + "1.1b-chat-v0.6-q6_K", + "904\u202fMB" + ], + [ + "1.1b-chat-v0.6-q8_0", + "1.2\u202fGB" + ], + [ + "1.1b-chat-v1-fp16", + "2.2\u202fGB" + ], + [ + "1.1b-chat-v1-q2_K", + "483\u202fMB" + ], + [ + "1.1b-chat-v1-q3_K_S", + "500\u202fMB" + ], + [ + "1.1b-chat-v1-q3_K_M", + "551\u202fMB" + ], + [ + "1.1b-chat-v1-q3_K_L", + "593\u202fMB" + ], + [ + "1.1b-chat-v1-q4_0", + "638\u202fMB" + ], + [ + "1.1b-chat-v1-q4_1", + "702\u202fMB" + ], + [ + "1.1b-chat-v1-q4_K_S", + "644\u202fMB" + ], + [ + "1.1b-chat-v1-q4_K_M", + "669\u202fMB" + ], + [ + "1.1b-chat-v1-q5_0", + "767\u202fMB" + ], + [ + "1.1b-chat-v1-q5_1", + "832\u202fMB" + ], + [ + "1.1b-chat-v1-q5_K_S", + "767\u202fMB" + ], + [ + "1.1b-chat-v1-q5_K_M", + "783\u202fMB" + ], + [ + "1.1b-chat-v1-q6_K", + "904\u202fMB" + ], + [ + "1.1b-chat-v1-q8_0", + "1.2\u202fGB" + ] + ], + "image": false, + "author": "TinyLlama Team" + }, + "codestral": { + "url": "https://ollama.com/library/codestral", + "description": "Codestral is Mistral AI\u2019s first-ever code model designed for code generation tasks.", + "tags": [ + [ + "latest", + "13\u202fGB" + ], + [ + "22b", + "13\u202fGB" + ], + [ + "v0.1", + "13\u202fGB" + ], + [ + "22b-v0.1-f16", + "44\u202fGB" + ], + [ + "22b-v0.1-q2_K", + "8.3\u202fGB" + ], + [ + "22b-v0.1-q3_K_S", + "9.6\u202fGB" + ], + [ + "22b-v0.1-q3_K_M", + "11\u202fGB" + ], + [ + "22b-v0.1-q3_K_L", + "12\u202fGB" + ], + [ + "22b-v0.1-q4_0", + "13\u202fGB" + ], + [ + "22b-v0.1-q4_1", + "14\u202fGB" + ], + [ + "22b-v0.1-q4_K_S", + "13\u202fGB" + ], + [ + "22b-v0.1-q4_K_M", + "13\u202fGB" + ], + [ + "22b-v0.1-q5_0", + "15\u202fGB" + ], + [ + "22b-v0.1-q5_1", + "17\u202fGB" + ], + [ + "22b-v0.1-q5_K_S", + "15\u202fGB" + ], + [ + "22b-v0.1-q5_K_M", + "16\u202fGB" + ], + [ + "22b-v0.1-q6_K", + "18\u202fGB" + ], + [ + "22b-v0.1-q8_0", + "24\u202fGB" + ] + ], + "image": false, + "author": "Mistral AI" + }, + "wizard-vicuna-uncensored": { + "url": "https://ollama.com/library/wizard-vicuna-uncensored", + "description": "Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on Llama 2 uncensored by Eric Hartford.", + "tags": [ + [ + "latest", + "3.8\u202fGB" + ], + [ + "7b", + "3.8\u202fGB" + ], + [ + "13b", + "7.4\u202fGB" + ], + [ + "30b", + "18\u202fGB" + ], + [ + "7b-fp16", + "13\u202fGB" + ], + [ + "7b-q2_K", + "2.8\u202fGB" + ], + [ + "7b-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-q4_0", + "3.8\u202fGB" + ], + [ + "7b-q4_1", + "4.2\u202fGB" + ], + [ + "7b-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-q5_0", + "4.7\u202fGB" + ], + [ + "7b-q5_1", + "5.1\u202fGB" + ], + [ + "7b-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-q6_K", + "5.5\u202fGB" + ], + [ + "7b-q8_0", + "7.2\u202fGB" + ], + [ + "13b-fp16", + "26\u202fGB" + ], + [ + "13b-q2_K", + "5.4\u202fGB" + ], + [ + "13b-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-q4_0", + "7.4\u202fGB" + ], + [ + "13b-q4_1", + "8.2\u202fGB" + ], + [ + "13b-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-q5_0", + "9.0\u202fGB" + ], + [ + "13b-q5_1", + "9.8\u202fGB" + ], + [ + "13b-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-q6_K", + "11\u202fGB" + ], + [ + "13b-q8_0", + "14\u202fGB" + ], + [ + "30b-fp16", + "65\u202fGB" + ], + [ + "30b-q2_K", + "14\u202fGB" + ], + [ + "30b-q3_K_S", + "14\u202fGB" + ], + [ + "30b-q3_K_M", + "16\u202fGB" + ], + [ + "30b-q3_K_L", + "17\u202fGB" + ], + [ + "30b-q4_0", + "18\u202fGB" + ], + [ + "30b-q4_1", + "20\u202fGB" + ], + [ + "30b-q4_K_S", + "18\u202fGB" + ], + [ + "30b-q4_K_M", + "20\u202fGB" + ], + [ + "30b-q5_0", + "22\u202fGB" + ], + [ + "30b-q5_1", + "24\u202fGB" + ], + [ + "30b-q5_K_S", + "22\u202fGB" + ], + [ + "30b-q5_K_M", + "23\u202fGB" + ], + [ + "30b-q6_K", + "27\u202fGB" + ], + [ + "30b-q8_0", + "35\u202fGB" + ] + ], + "image": false, + "author": "Eric Hartford" + }, + "nous-hermes2": { + "url": "https://ollama.com/library/nous-hermes2", + "description": "The powerful family of models by Nous Research that excels at scientific discussion and coding tasks.", + "tags": [ + [ + "latest", + "6.1\u202fGB" + ], + [ + "10.7b", + "6.1\u202fGB" + ], + [ + "34b", + "19\u202fGB" + ], + [ + "10.7b-solar-fp16", + "21\u202fGB" + ], + [ + "10.7b-solar-q2_K", + "4.5\u202fGB" + ], + [ + "10.7b-solar-q3_K_S", + "4.7\u202fGB" + ], + [ + "10.7b-solar-q3_K_M", + "5.2\u202fGB" + ], + [ + "10.7b-solar-q3_K_L", + "5.7\u202fGB" + ], + [ + "10.7b-solar-q4_0", + "6.1\u202fGB" + ], + [ + "10.7b-solar-q4_1", + "6.7\u202fGB" + ], + [ + "10.7b-solar-q4_K_S", + "6.1\u202fGB" + ], + [ + "10.7b-solar-q4_K_M", + "6.5\u202fGB" + ], + [ + "10.7b-solar-q5_0", + "7.4\u202fGB" + ], + [ + "10.7b-solar-q5_1", + "8.1\u202fGB" + ], + [ + "10.7b-solar-q5_K_S", + "7.4\u202fGB" + ], + [ + "10.7b-solar-q5_K_M", + "7.6\u202fGB" + ], + [ + "10.7b-solar-q6_K", + "8.8\u202fGB" + ], + [ + "10.7b-solar-q8_0", + "11\u202fGB" + ], + [ + "34b-yi-fp16", + "69\u202fGB" + ], + [ + "34b-yi-q2_K", + "15\u202fGB" + ], + [ + "34b-yi-q3_K_S", + "15\u202fGB" + ], + [ + "34b-yi-q3_K_M", + "17\u202fGB" + ], + [ + "34b-yi-q3_K_L", + "18\u202fGB" + ], + [ + "34b-yi-q4_0", + "19\u202fGB" + ], + [ + "34b-yi-q4_1", + "22\u202fGB" + ], + [ + "34b-yi-q4_K_S", + "20\u202fGB" + ], + [ + "34b-yi-q4_K_M", + "21\u202fGB" + ], + [ + "34b-yi-q5_0", + "24\u202fGB" + ], + [ + "34b-yi-q5_1", + "26\u202fGB" + ], + [ + "34b-yi-q5_K_S", + "24\u202fGB" + ], + [ + "34b-yi-q5_K_M", + "24\u202fGB" + ], + [ + "34b-yi-q6_K", + "28\u202fGB" + ], + [ + "34b-yi-q8_0", + "37\u202fGB" + ] + ], + "image": false, + "author": "Nous Research" }, "openchat": { "url": "https://ollama.com/library/openchat", @@ -11557,203 +11941,203 @@ "tags": [ [ "latest", - "4.1 GB" + "4.1\u202fGB" ], [ "7b", - "4.1 GB" - ], - [ - "7b-v3.5-1210", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v3.5", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v3.5-0106", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-v3.5-1210", + "4.1\u202fGB" ], [ "7b-v3.5-fp16", - "14 GB" + "14\u202fGB" ], [ "7b-v3.5-q2_K", - "3.1 GB" - ], - [ - "7b-v3.5-0106-q4_K_M", - "4.4 GB" - ], - [ - "7b-v3.5-q5_0", - "5.0 GB" - ], - [ - "7b-v3.5-q4_1", - "4.6 GB" - ], - [ - "7b-v3.5-q3_K_L", - "3.8 GB" - ], - [ - "7b-v3.5-q5_1", - "5.4 GB" - ], - [ - "7b-v3.5-q5_K_M", - "5.1 GB" - ], - [ - "7b-v3.5-0106-q2_K", - "3.1 GB" - ], - [ - "7b-v3.5-q4_0", - "4.1 GB" - ], - [ - "7b-v3.5-0106-q3_K_M", - "3.5 GB" - ], - [ - "7b-v3.5-q6_K", - "5.9 GB" - ], - [ - "7b-v3.5-q3_K_M", - "3.5 GB" - ], - [ - "7b-v3.5-0106-fp16", - "14 GB" - ], - [ - "7b-v3.5-q4_K_M", - "4.4 GB" - ], - [ - "7b-v3.5-q8_0", - "7.7 GB" - ], - [ - "7b-v3.5-0106-q4_1", - "4.6 GB" - ], - [ - "7b-v3.5-0106-q3_K_L", - "3.8 GB" - ], - [ - "7b-v3.5-q5_K_S", - "5.0 GB" - ], - [ - "7b-v3.5-q4_K_S", - "4.1 GB" + "3.1\u202fGB" ], [ "7b-v3.5-q3_K_S", - "3.2 GB" + "3.2\u202fGB" ], [ - "7b-v3.5-0106-q4_0", - "4.1 GB" + "7b-v3.5-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v3.5-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-v3.5-q4_0", + "4.1\u202fGB" + ], + [ + "7b-v3.5-q4_1", + "4.6\u202fGB" + ], + [ + "7b-v3.5-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-v3.5-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-v3.5-q5_0", + "5.0\u202fGB" + ], + [ + "7b-v3.5-q5_1", + "5.4\u202fGB" + ], + [ + "7b-v3.5-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-v3.5-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-v3.5-q6_K", + "5.9\u202fGB" + ], + [ + "7b-v3.5-q8_0", + "7.7\u202fGB" + ], + [ + "7b-v3.5-0106-fp16", + "14\u202fGB" + ], + [ + "7b-v3.5-0106-q2_K", + "3.1\u202fGB" ], [ "7b-v3.5-0106-q3_K_S", - "3.2 GB" + "3.2\u202fGB" + ], + [ + "7b-v3.5-0106-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v3.5-0106-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-v3.5-0106-q4_0", + "4.1\u202fGB" + ], + [ + "7b-v3.5-0106-q4_1", + "4.6\u202fGB" ], [ "7b-v3.5-0106-q4_K_S", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-v3.5-0106-q4_K_M", + "4.4\u202fGB" ], [ "7b-v3.5-0106-q5_0", - "5.0 GB" + "5.0\u202fGB" ], [ "7b-v3.5-0106-q5_1", - "5.4 GB" - ], - [ - "7b-v3.5-0106-q5_K_M", - "5.1 GB" + "5.4\u202fGB" ], [ "7b-v3.5-0106-q5_K_S", - "5.0 GB" + "5.0\u202fGB" + ], + [ + "7b-v3.5-0106-q5_K_M", + "5.1\u202fGB" ], [ "7b-v3.5-0106-q6_K", - "5.9 GB" + "5.9\u202fGB" ], [ "7b-v3.5-0106-q8_0", - "7.7 GB" + "7.7\u202fGB" ], [ "7b-v3.5-1210-fp16", - "14 GB" + "14\u202fGB" ], [ "7b-v3.5-1210-q2_K", - "3.1 GB" - ], - [ - "7b-v3.5-1210-q3_K_L", - "3.8 GB" - ], - [ - "7b-v3.5-1210-q3_K_M", - "3.5 GB" + "3.1\u202fGB" ], [ "7b-v3.5-1210-q3_K_S", - "3.2 GB" + "3.2\u202fGB" + ], + [ + "7b-v3.5-1210-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v3.5-1210-q3_K_L", + "3.8\u202fGB" ], [ "7b-v3.5-1210-q4_0", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v3.5-1210-q4_1", - "4.6 GB" - ], - [ - "7b-v3.5-1210-q4_K_M", - "4.4 GB" + "4.6\u202fGB" ], [ "7b-v3.5-1210-q4_K_S", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-v3.5-1210-q4_K_M", + "4.4\u202fGB" ], [ "7b-v3.5-1210-q5_0", - "5.0 GB" + "5.0\u202fGB" ], [ "7b-v3.5-1210-q5_1", - "5.4 GB" - ], - [ - "7b-v3.5-1210-q5_K_M", - "5.1 GB" + "5.4\u202fGB" ], [ "7b-v3.5-1210-q5_K_S", - "5.0 GB" + "5.0\u202fGB" + ], + [ + "7b-v3.5-1210-q5_K_M", + "5.1\u202fGB" ], [ "7b-v3.5-1210-q6_K", - "5.9 GB" + "5.9\u202fGB" ], [ "7b-v3.5-1210-q8_0", - "7.7 GB" + "7.7\u202fGB" ] ], "image": false, @@ -11765,375 +12149,951 @@ "tags": [ [ "latest", - "4.8 GB" - ], - [ - "35b", - "20 GB" + "4.8\u202fGB" ], [ "8b", - "4.8 GB" + "4.8\u202fGB" ], [ - "35b-23", - "20 GB" - ], - [ - "35b-23-f16", - "70 GB" - ], - [ - "35b-23-q2_K", - "14 GB" - ], - [ - "35b-23-q3_K_L", - "19 GB" - ], - [ - "35b-23-q3_K_M", - "18 GB" - ], - [ - "35b-23-q3_K_S", - "16 GB" - ], - [ - "35b-23-q4_0", - "20 GB" - ], - [ - "35b-23-q4_1", - "22 GB" - ], - [ - "35b-23-q4_K_M", - "22 GB" - ], - [ - "35b-23-q4_K_S", - "20 GB" - ], - [ - "35b-23-q5_0", - "24 GB" - ], - [ - "35b-23-q5_1", - "26 GB" - ], - [ - "35b-23-q5_K_M", - "25 GB" - ], - [ - "35b-23-q5_K_S", - "24 GB" - ], - [ - "35b-23-q6_K", - "29 GB" - ], - [ - "35b-23-q8_0", - "37 GB" - ], - [ - "8b-23-f16", - "16 GB" + "35b", + "20\u202fGB" ], [ "8b-23", - "4.8 GB" + "4.8\u202fGB" + ], + [ + "8b-23-f16", + "16\u202fGB" + ], + [ + "35b-23", + "20\u202fGB" + ], + [ + "35b-23-f16", + "70\u202fGB" ], [ "8b-23-q2_K", - "3.4 GB" - ], - [ - "8b-23-q3_K_L", - "4.5 GB" - ], - [ - "8b-23-q3_K_M", - "4.2 GB" + "3.4\u202fGB" ], [ "8b-23-q3_K_S", - "3.9 GB" + "3.9\u202fGB" + ], + [ + "8b-23-q3_K_M", + "4.2\u202fGB" + ], + [ + "8b-23-q3_K_L", + "4.5\u202fGB" ], [ "8b-23-q4_0", - "4.8 GB" + "4.8\u202fGB" ], [ "8b-23-q4_1", - "5.2 GB" - ], - [ - "8b-23-q4_K_M", - "5.1 GB" + "5.2\u202fGB" ], [ "8b-23-q4_K_S", - "4.8 GB" + "4.8\u202fGB" + ], + [ + "8b-23-q4_K_M", + "5.1\u202fGB" ], [ "8b-23-q5_0", - "5.7 GB" + "5.7\u202fGB" ], [ "8b-23-q5_1", - "6.1 GB" - ], - [ - "8b-23-q5_K_M", - "5.8 GB" + "6.1\u202fGB" ], [ "8b-23-q5_K_S", - "5.7 GB" + "5.7\u202fGB" + ], + [ + "8b-23-q5_K_M", + "5.8\u202fGB" ], [ "8b-23-q6_K", - "6.6 GB" + "6.6\u202fGB" ], [ "8b-23-q8_0", - "8.5 GB" + "8.5\u202fGB" + ], + [ + "35b-23-q2_K", + "14\u202fGB" + ], + [ + "35b-23-q3_K_S", + "16\u202fGB" + ], + [ + "35b-23-q3_K_M", + "18\u202fGB" + ], + [ + "35b-23-q3_K_L", + "19\u202fGB" + ], + [ + "35b-23-q4_0", + "20\u202fGB" + ], + [ + "35b-23-q4_1", + "22\u202fGB" + ], + [ + "35b-23-q4_K_S", + "20\u202fGB" + ], + [ + "35b-23-q4_K_M", + "22\u202fGB" + ], + [ + "35b-23-q5_0", + "24\u202fGB" + ], + [ + "35b-23-q5_1", + "26\u202fGB" + ], + [ + "35b-23-q5_K_S", + "24\u202fGB" + ], + [ + "35b-23-q5_K_M", + "25\u202fGB" + ], + [ + "35b-23-q6_K", + "29\u202fGB" + ], + [ + "35b-23-q8_0", + "37\u202fGB" ] ], "image": false, "author": "Cohere" }, + "wizardlm2": { + "url": "https://ollama.com/library/wizardlm2", + "description": "State of the art large language model from Microsoft AI with improved performance on complex chat, multilingual, reasoning and agent use cases.", + "tags": [ + [ + "latest", + "4.1\u202fGB" + ], + [ + "7b", + "4.1\u202fGB" + ], + [ + "8x22b", + "80\u202fGB" + ], + [ + "7b-fp16", + "14\u202fGB" + ], + [ + "7b-q2_K", + "2.7\u202fGB" + ], + [ + "7b-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-q4_0", + "4.1\u202fGB" + ], + [ + "7b-q4_1", + "4.6\u202fGB" + ], + [ + "7b-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-q5_0", + "5.0\u202fGB" + ], + [ + "7b-q5_1", + "5.4\u202fGB" + ], + [ + "7b-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-q6_K", + "5.9\u202fGB" + ], + [ + "7b-q8_0", + "7.7\u202fGB" + ], + [ + "8x22b-fp16", + "281\u202fGB" + ], + [ + "8x22b-q2_K", + "52\u202fGB" + ], + [ + "8x22b-q4_0", + "80\u202fGB" + ], + [ + "8x22b-q8_0", + "149\u202fGB" + ] + ], + "image": false, + "author": "Microsoft" + }, "tinydolphin": { "url": "https://ollama.com/library/tinydolphin", "description": "An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset by Eric Hartford and based on TinyLlama.", "tags": [ [ "latest", - "637 MB" + "637\u202fMB" ], [ "1.1b", - "637 MB" + "637\u202fMB" ], [ "v2.8", - "637 MB" + "637\u202fMB" ], [ "1.1b-v2.8-fp16", - "2.2 GB" + "2.2\u202fGB" ], [ "1.1b-v2.8-q2_K", - "432 MB" - ], - [ - "1.1b-v2.8-q3_K_L", - "592 MB" - ], - [ - "1.1b-v2.8-q3_K_M", - "548 MB" + "432\u202fMB" ], [ "1.1b-v2.8-q3_K_S", - "499 MB" + "499\u202fMB" + ], + [ + "1.1b-v2.8-q3_K_M", + "548\u202fMB" + ], + [ + "1.1b-v2.8-q3_K_L", + "592\u202fMB" ], [ "1.1b-v2.8-q4_0", - "637 MB" + "637\u202fMB" ], [ "1.1b-v2.8-q4_1", - "701 MB" - ], - [ - "1.1b-v2.8-q4_K_M", - "668 MB" + "701\u202fMB" ], [ "1.1b-v2.8-q4_K_S", - "640 MB" + "640\u202fMB" + ], + [ + "1.1b-v2.8-q4_K_M", + "668\u202fMB" ], [ "1.1b-v2.8-q5_0", - "766 MB" + "766\u202fMB" ], [ "1.1b-v2.8-q5_1", - "831 MB" - ], - [ - "1.1b-v2.8-q5_K_M", - "782 MB" + "831\u202fMB" ], [ "1.1b-v2.8-q5_K_S", - "766 MB" + "766\u202fMB" + ], + [ + "1.1b-v2.8-q5_K_M", + "782\u202fMB" ], [ "1.1b-v2.8-q6_K", - "903 MB" + "903\u202fMB" ], [ "1.1b-v2.8-q8_0", - "1.2 GB" + "1.2\u202fGB" ] ], "image": false, "author": "Eric Hartford" }, - "openhermes": { - "url": "https://ollama.com/library/openhermes", - "description": "OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully open datasets.", + "granite-code": { + "url": "https://ollama.com/library/granite-code", + "description": "A family of open foundation models by IBM for Code Intelligence", "tags": [ [ "latest", - "4.1 GB" + "2.0\u202fGB" ], [ - "v2.5", - "4.1 GB" + "3b", + "2.0\u202fGB" ], [ - "v2", - "4.1 GB" + "8b", + "4.6\u202fGB" ], [ - "7b-mistral-v2-fp16", - "14 GB" + "20b", + "12\u202fGB" ], [ - "7b-mistral-v2-q2_K", - "3.1 GB" + "34b", + "19\u202fGB" ], [ - "7b-mistral-v2-q3_K_L", - "3.8 GB" + "3b-base", + "2.0\u202fGB" ], [ - "7b-mistral-v2-q3_K_M", - "3.5 GB" + "3b-base-f16", + "7.0\u202fGB" ], [ - "7b-mistral-v2-q3_K_S", - "3.2 GB" + "3b-instruct", + "2.0\u202fGB" ], [ - "7b-mistral-v2-q4_0", - "4.1 GB" + "3b-instruct-f16", + "7.0\u202fGB" ], [ - "7b-mistral-v2-q4_1", - "4.6 GB" + "8b-base", + "4.6\u202fGB" ], [ - "7b-mistral-v2-q4_K_M", - "4.4 GB" + "8b-base-f16", + "16\u202fGB" ], [ - "7b-mistral-v2-q4_K_S", - "4.1 GB" + "8b-instruct", + "4.6\u202fGB" ], [ - "7b-mistral-v2-q5_0", - "5.0 GB" + "8b-instruct-f16", + "16\u202fGB" ], [ - "7b-mistral-v2-q5_1", - "5.4 GB" + "20b-base", + "12\u202fGB" ], [ - "7b-mistral-v2-q5_K_M", - "5.1 GB" + "20b-base-f16", + "40\u202fGB" ], [ - "7b-mistral-v2-q5_K_S", - "5.0 GB" + "20b-instruct", + "12\u202fGB" ], [ - "7b-mistral-v2-q6_K", - "5.9 GB" + "20b-instruct-f16", + "40\u202fGB" ], [ - "7b-mistral-v2-q8_0", - "7.7 GB" + "34b-base", + "19\u202fGB" ], [ - "7b-mistral-v2.5-fp16", - "14 GB" + "34b-base-f16", + "68\u202fGB" ], [ - "7b-mistral-v2.5-q2_K", - "3.1 GB" + "34b-instruct", + "19\u202fGB" ], [ - "7b-mistral-v2.5-q3_K_L", - "3.8 GB" + "34b-instruct-f16", + "68\u202fGB" ], [ - "7b-mistral-v2.5-q3_K_M", - "3.5 GB" + "3b-base-fp16", + "7.0\u202fGB" ], [ - "7b-mistral-v2.5-q3_K_S", - "3.2 GB" + "3b-base-q2_K", + "1.3\u202fGB" ], [ - "7b-mistral-v2.5-q4_0", - "4.1 GB" + "3b-base-q3_K_S", + "1.6\u202fGB" ], [ - "7b-mistral-v2.5-q4_1", - "4.6 GB" + "3b-base-q3_K_M", + "1.7\u202fGB" ], [ - "7b-mistral-v2.5-q4_K_M", - "4.4 GB" + "3b-base-q3_K_L", + "1.9\u202fGB" ], [ - "7b-mistral-v2.5-q4_K_S", - "4.1 GB" + "3b-base-q4_0", + "2.0\u202fGB" ], [ - "7b-mistral-v2.5-q5_0", - "5.0 GB" + "3b-base-q4_1", + "2.2\u202fGB" ], [ - "7b-mistral-v2.5-q5_1", - "5.4 GB" + "3b-base-q4_K_S", + "2.0\u202fGB" ], [ - "7b-mistral-v2.5-q5_K_M", - "5.1 GB" + "3b-base-q4_K_M", + "2.1\u202fGB" ], [ - "7b-mistral-v2.5-q5_K_S", - "5.0 GB" + "3b-base-q5_0", + "2.4\u202fGB" ], [ - "7b-mistral-v2.5-q6_K", - "5.9 GB" + "3b-base-q5_1", + "2.6\u202fGB" ], [ - "7b-mistral-v2.5-q8_0", - "7.7 GB" + "3b-base-q5_K_S", + "2.4\u202fGB" ], [ - "7b-v2", - "4.1 GB" + "3b-base-q5_K_M", + "2.5\u202fGB" ], [ - "7b-v2.5", - "4.1 GB" + "3b-base-q6_K", + "2.9\u202fGB" + ], + [ + "3b-base-q8_0", + "3.7\u202fGB" + ], + [ + "3b-instruct-fp16", + "7.0\u202fGB" + ], + [ + "3b-instruct-q2_K", + "1.3\u202fGB" + ], + [ + "3b-instruct-q3_K_S", + "1.6\u202fGB" + ], + [ + "3b-instruct-q3_K_M", + "1.7\u202fGB" + ], + [ + "3b-instruct-q3_K_L", + "1.9\u202fGB" + ], + [ + "3b-instruct-q4_0", + "2.0\u202fGB" + ], + [ + "3b-instruct-q4_1", + "2.2\u202fGB" + ], + [ + "3b-instruct-q4_K_S", + "2.0\u202fGB" + ], + [ + "3b-instruct-q4_K_M", + "2.1\u202fGB" + ], + [ + "3b-instruct-q5_0", + "2.4\u202fGB" + ], + [ + "3b-instruct-q5_1", + "2.6\u202fGB" + ], + [ + "3b-instruct-q5_K_S", + "2.4\u202fGB" + ], + [ + "3b-instruct-q5_K_M", + "2.5\u202fGB" + ], + [ + "3b-instruct-q6_K", + "2.9\u202fGB" + ], + [ + "3b-instruct-q8_0", + "3.7\u202fGB" + ], + [ + "3b-instruct-128k-fp16", + "7.0\u202fGB" + ], + [ + "3b-instruct-128k-q2_K", + "1.3\u202fGB" + ], + [ + "3b-instruct-128k-q3_K_S", + "1.6\u202fGB" + ], + [ + "3b-instruct-128k-q3_K_M", + "1.7\u202fGB" + ], + [ + "3b-instruct-128k-q3_K_L", + "1.9\u202fGB" + ], + [ + "3b-instruct-128k-q4_0", + "2.0\u202fGB" + ], + [ + "3b-instruct-128k-q4_1", + "2.2\u202fGB" + ], + [ + "3b-instruct-128k-q4_K_S", + "2.0\u202fGB" + ], + [ + "3b-instruct-128k-q4_K_M", + "2.1\u202fGB" + ], + [ + "3b-instruct-128k-q5_0", + "2.4\u202fGB" + ], + [ + "3b-instruct-128k-q5_1", + "2.6\u202fGB" + ], + [ + "3b-instruct-128k-q5_K_S", + "2.4\u202fGB" + ], + [ + "3b-instruct-128k-q5_K_M", + "2.5\u202fGB" + ], + [ + "3b-instruct-128k-q6_K", + "2.9\u202fGB" + ], + [ + "3b-instruct-128k-q8_0", + "3.7\u202fGB" + ], + [ + "8b-base-fp16", + "16\u202fGB" + ], + [ + "8b-base-q2_K", + "3.1\u202fGB" + ], + [ + "8b-base-q3_K_S", + "3.5\u202fGB" + ], + [ + "8b-base-q3_K_M", + "3.9\u202fGB" + ], + [ + "8b-base-q3_K_L", + "4.3\u202fGB" + ], + [ + "8b-base-q4_0", + "4.6\u202fGB" + ], + [ + "8b-base-q4_1", + "5.1\u202fGB" + ], + [ + "8b-base-q4_K_S", + "4.6\u202fGB" + ], + [ + "8b-base-q4_K_M", + "4.9\u202fGB" + ], + [ + "8b-base-q5_0", + "5.6\u202fGB" + ], + [ + "8b-base-q5_1", + "6.1\u202fGB" + ], + [ + "8b-base-q5_K_S", + "5.6\u202fGB" + ], + [ + "8b-base-q5_K_M", + "5.7\u202fGB" + ], + [ + "8b-base-q6_K", + "6.6\u202fGB" + ], + [ + "8b-base-q8_0", + "8.6\u202fGB" + ], + [ + "8b-instruct-fp16", + "16\u202fGB" + ], + [ + "8b-instruct-q2_K", + "3.1\u202fGB" + ], + [ + "8b-instruct-q3_K_S", + "3.5\u202fGB" + ], + [ + "8b-instruct-q3_K_M", + "3.9\u202fGB" + ], + [ + "8b-instruct-q3_K_L", + "4.3\u202fGB" + ], + [ + "8b-instruct-q4_0", + "4.6\u202fGB" + ], + [ + "8b-instruct-q4_1", + "5.1\u202fGB" + ], + [ + "8b-instruct-q4_K_S", + "4.6\u202fGB" + ], + [ + "8b-instruct-q4_K_M", + "4.9\u202fGB" + ], + [ + "8b-instruct-q5_0", + "5.6\u202fGB" + ], + [ + "8b-instruct-q5_1", + "6.1\u202fGB" + ], + [ + "8b-instruct-q5_K_S", + "5.6\u202fGB" + ], + [ + "8b-instruct-q5_K_M", + "5.7\u202fGB" + ], + [ + "8b-instruct-q6_K", + "6.6\u202fGB" + ], + [ + "8b-instruct-q8_0", + "8.6\u202fGB" + ], + [ + "8b-instruct-128k-q4_0", + "4.6\u202fGB" + ], + [ + "8b-instruct-128k-q4_1", + "5.1\u202fGB" + ], + [ + "20b-base-fp16", + "40\u202fGB" + ], + [ + "20b-base-q2_K", + "7.9\u202fGB" + ], + [ + "20b-base-q3_K_S", + "8.9\u202fGB" + ], + [ + "20b-base-q3_K_M", + "11\u202fGB" + ], + [ + "20b-base-q3_K_L", + "12\u202fGB" + ], + [ + "20b-base-q4_0", + "12\u202fGB" + ], + [ + "20b-base-q4_1", + "13\u202fGB" + ], + [ + "20b-base-q4_K_S", + "12\u202fGB" + ], + [ + "20b-base-q4_K_M", + "13\u202fGB" + ], + [ + "20b-base-q5_0", + "14\u202fGB" + ], + [ + "20b-base-q5_1", + "15\u202fGB" + ], + [ + "20b-base-q5_K_S", + "14\u202fGB" + ], + [ + "20b-base-q5_K_M", + "15\u202fGB" + ], + [ + "20b-base-q6_K", + "17\u202fGB" + ], + [ + "20b-base-q8_0", + "21\u202fGB" + ], + [ + "20b-instruct-q2_K", + "7.9\u202fGB" + ], + [ + "20b-instruct-q3_K_S", + "8.9\u202fGB" + ], + [ + "20b-instruct-q3_K_M", + "11\u202fGB" + ], + [ + "20b-instruct-q3_K_L", + "12\u202fGB" + ], + [ + "20b-instruct-q4_0", + "12\u202fGB" + ], + [ + "20b-instruct-q4_1", + "13\u202fGB" + ], + [ + "20b-instruct-q4_K_S", + "12\u202fGB" + ], + [ + "20b-instruct-q4_K_M", + "13\u202fGB" + ], + [ + "20b-instruct-q5_0", + "14\u202fGB" + ], + [ + "20b-instruct-q5_1", + "15\u202fGB" + ], + [ + "20b-instruct-q5_K_S", + "14\u202fGB" + ], + [ + "20b-instruct-q5_K_M", + "15\u202fGB" + ], + [ + "20b-instruct-q6_K", + "17\u202fGB" + ], + [ + "20b-instruct-q8_0", + "21\u202fGB" + ], + [ + "34b-base-q2_K", + "13\u202fGB" + ], + [ + "34b-base-q3_K_S", + "15\u202fGB" + ], + [ + "34b-base-q3_K_M", + "18\u202fGB" + ], + [ + "34b-base-q3_K_L", + "20\u202fGB" + ], + [ + "34b-base-q4_0", + "19\u202fGB" + ], + [ + "34b-base-q4_1", + "21\u202fGB" + ], + [ + "34b-base-q4_K_S", + "19\u202fGB" + ], + [ + "34b-base-q4_K_M", + "21\u202fGB" + ], + [ + "34b-base-q5_0", + "23\u202fGB" + ], + [ + "34b-base-q5_1", + "25\u202fGB" + ], + [ + "34b-base-q5_K_S", + "23\u202fGB" + ], + [ + "34b-base-q5_K_M", + "25\u202fGB" + ], + [ + "34b-base-q6_K", + "28\u202fGB" + ], + [ + "34b-base-q8_0", + "36\u202fGB" + ], + [ + "34b-instruct-q2_K", + "13\u202fGB" + ], + [ + "34b-instruct-q3_K_S", + "15\u202fGB" + ], + [ + "34b-instruct-q3_K_M", + "18\u202fGB" + ], + [ + "34b-instruct-q3_K_L", + "20\u202fGB" + ], + [ + "34b-instruct-q4_0", + "19\u202fGB" + ], + [ + "34b-instruct-q4_1", + "21\u202fGB" + ], + [ + "34b-instruct-q4_K_S", + "19\u202fGB" + ], + [ + "34b-instruct-q4_K_M", + "21\u202fGB" + ], + [ + "34b-instruct-q5_0", + "23\u202fGB" + ], + [ + "34b-instruct-q5_1", + "25\u202fGB" + ], + [ + "34b-instruct-q5_K_S", + "23\u202fGB" + ], + [ + "34b-instruct-q5_K_M", + "25\u202fGB" + ], + [ + "34b-instruct-q6_K", + "28\u202fGB" + ], + [ + "34b-instruct-q8_0", + "36\u202fGB" ] ], "image": false, - "author": "Teknium" + "author": "IBM for Code Intelligence" }, "wizardcoder": { "url": "https://ollama.com/library/wizardcoder", @@ -12141,271 +13101,271 @@ "tags": [ [ "latest", - "3.8 GB" + "3.8\u202fGB" ], [ "33b", - "19 GB" + "19\u202fGB" ], [ "python", - "3.8 GB" - ], - [ - "34b-python", - "19 GB" - ], - [ - "34b-python-fp16", - "67 GB" - ], - [ - "34b-python-q2_K", - "14 GB" - ], - [ - "34b-python-q3_K_L", - "18 GB" - ], - [ - "34b-python-q3_K_M", - "16 GB" - ], - [ - "34b-python-q3_K_S", - "15 GB" - ], - [ - "34b-python-q4_0", - "19 GB" - ], - [ - "34b-python-q4_1", - "21 GB" - ], - [ - "34b-python-q4_K_M", - "20 GB" - ], - [ - "34b-python-q4_K_S", - "19 GB" - ], - [ - "34b-python-q5_0", - "23 GB" - ], - [ - "34b-python-q5_1", - "25 GB" - ], - [ - "34b-python-q5_K_M", - "24 GB" - ], - [ - "34b-python-q5_K_S", - "23 GB" - ], - [ - "34b-python-q6_K", - "28 GB" - ], - [ - "34b-python-q8_0", - "36 GB" - ], - [ - "33b-v1.1", - "19 GB" - ], - [ - "33b-v1.1-fp16", - "67 GB" - ], - [ - "33b-v1.1-q2_K", - "14 GB" - ], - [ - "33b-v1.1-q3_K_L", - "18 GB" - ], - [ - "33b-v1.1-q3_K_M", - "16 GB" - ], - [ - "33b-v1.1-q3_K_S", - "14 GB" - ], - [ - "33b-v1.1-q4_0", - "19 GB" - ], - [ - "33b-v1.1-q4_1", - "21 GB" - ], - [ - "33b-v1.1-q4_K_M", - "20 GB" - ], - [ - "33b-v1.1-q4_K_S", - "19 GB" - ], - [ - "33b-v1.1-q5_0", - "23 GB" - ], - [ - "33b-v1.1-q5_1", - "25 GB" - ], - [ - "33b-v1.1-q5_K_M", - "24 GB" - ], - [ - "33b-v1.1-q5_K_S", - "23 GB" - ], - [ - "33b-v1.1-q6_K", - "27 GB" - ], - [ - "33b-v1.1-q8_0", - "35 GB" - ], - [ - "13b-python", - "7.4 GB" - ], - [ - "13b-python-fp16", - "26 GB" - ], - [ - "13b-python-q2_K", - "5.4 GB" - ], - [ - "13b-python-q3_K_L", - "6.9 GB" - ], - [ - "13b-python-q3_K_M", - "6.3 GB" - ], - [ - "13b-python-q3_K_S", - "5.7 GB" - ], - [ - "13b-python-q4_0", - "7.4 GB" - ], - [ - "13b-python-q4_1", - "8.2 GB" - ], - [ - "13b-python-q4_K_M", - "7.9 GB" - ], - [ - "13b-python-q4_K_S", - "7.4 GB" - ], - [ - "13b-python-q5_0", - "9.0 GB" - ], - [ - "13b-python-q5_1", - "9.8 GB" - ], - [ - "13b-python-q5_K_M", - "9.2 GB" - ], - [ - "13b-python-q5_K_S", - "9.0 GB" - ], - [ - "13b-python-q6_K", - "11 GB" - ], - [ - "13b-python-q8_0", - "14 GB" + "3.8\u202fGB" ], [ "7b-python", - "3.8 GB" + "3.8\u202fGB" + ], + [ + "13b-python", + "7.4\u202fGB" + ], + [ + "33b-v1.1", + "19\u202fGB" + ], + [ + "34b-python", + "19\u202fGB" ], [ "7b-python-fp16", - "13 GB" + "13\u202fGB" ], [ "7b-python-q2_K", - "2.8 GB" - ], - [ - "7b-python-q3_K_L", - "3.6 GB" - ], - [ - "7b-python-q3_K_M", - "3.3 GB" + "2.8\u202fGB" ], [ "7b-python-q3_K_S", - "2.9 GB" + "2.9\u202fGB" + ], + [ + "7b-python-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-python-q3_K_L", + "3.6\u202fGB" ], [ "7b-python-q4_0", - "3.8 GB" + "3.8\u202fGB" ], [ "7b-python-q4_1", - "4.2 GB" - ], - [ - "7b-python-q4_K_M", - "4.1 GB" + "4.2\u202fGB" ], [ "7b-python-q4_K_S", - "3.9 GB" + "3.9\u202fGB" + ], + [ + "7b-python-q4_K_M", + "4.1\u202fGB" ], [ "7b-python-q5_0", - "4.7 GB" + "4.7\u202fGB" ], [ "7b-python-q5_1", - "5.1 GB" - ], - [ - "7b-python-q5_K_M", - "4.8 GB" + "5.1\u202fGB" ], [ "7b-python-q5_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "7b-python-q5_K_M", + "4.8\u202fGB" ], [ "7b-python-q6_K", - "5.5 GB" + "5.5\u202fGB" ], [ "7b-python-q8_0", - "7.2 GB" + "7.2\u202fGB" + ], + [ + "13b-python-fp16", + "26\u202fGB" + ], + [ + "13b-python-q2_K", + "5.4\u202fGB" + ], + [ + "13b-python-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-python-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-python-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-python-q4_0", + "7.4\u202fGB" + ], + [ + "13b-python-q4_1", + "8.2\u202fGB" + ], + [ + "13b-python-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-python-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-python-q5_0", + "9.0\u202fGB" + ], + [ + "13b-python-q5_1", + "9.8\u202fGB" + ], + [ + "13b-python-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-python-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-python-q6_K", + "11\u202fGB" + ], + [ + "13b-python-q8_0", + "14\u202fGB" + ], + [ + "33b-v1.1-fp16", + "67\u202fGB" + ], + [ + "33b-v1.1-q2_K", + "14\u202fGB" + ], + [ + "33b-v1.1-q3_K_S", + "14\u202fGB" + ], + [ + "33b-v1.1-q3_K_M", + "16\u202fGB" + ], + [ + "33b-v1.1-q3_K_L", + "18\u202fGB" + ], + [ + "33b-v1.1-q4_0", + "19\u202fGB" + ], + [ + "33b-v1.1-q4_1", + "21\u202fGB" + ], + [ + "33b-v1.1-q4_K_S", + "19\u202fGB" + ], + [ + "33b-v1.1-q4_K_M", + "20\u202fGB" + ], + [ + "33b-v1.1-q5_0", + "23\u202fGB" + ], + [ + "33b-v1.1-q5_1", + "25\u202fGB" + ], + [ + "33b-v1.1-q5_K_S", + "23\u202fGB" + ], + [ + "33b-v1.1-q5_K_M", + "24\u202fGB" + ], + [ + "33b-v1.1-q6_K", + "27\u202fGB" + ], + [ + "33b-v1.1-q8_0", + "35\u202fGB" + ], + [ + "34b-python-fp16", + "67\u202fGB" + ], + [ + "34b-python-q2_K", + "14\u202fGB" + ], + [ + "34b-python-q3_K_S", + "15\u202fGB" + ], + [ + "34b-python-q3_K_M", + "16\u202fGB" + ], + [ + "34b-python-q3_K_L", + "18\u202fGB" + ], + [ + "34b-python-q4_0", + "19\u202fGB" + ], + [ + "34b-python-q4_1", + "21\u202fGB" + ], + [ + "34b-python-q4_K_S", + "19\u202fGB" + ], + [ + "34b-python-q4_K_M", + "20\u202fGB" + ], + [ + "34b-python-q5_0", + "23\u202fGB" + ], + [ + "34b-python-q5_1", + "25\u202fGB" + ], + [ + "34b-python-q5_K_S", + "23\u202fGB" + ], + [ + "34b-python-q5_K_M", + "24\u202fGB" + ], + [ + "34b-python-q6_K", + "28\u202fGB" + ], + [ + "34b-python-q8_0", + "36\u202fGB" ] ], "image": false, @@ -12417,539 +13377,1079 @@ "tags": [ [ "latest", - "1.6 GB" + "1.6\u202fGB" ], [ "3b", - "1.6 GB" + "1.6\u202fGB" ], [ "code", - "1.6 GB" + "1.6\u202fGB" ], [ "instruct", - "1.6 GB" + "1.6\u202fGB" ], [ "3b-code", - "1.6 GB" - ], - [ - "3b-code-fp16", - "5.6 GB" - ], - [ - "3b-code-q2_K", - "1.1 GB" - ], - [ - "3b-code-q3_K_L", - "1.5 GB" - ], - [ - "3b-code-q3_K_M", - "1.4 GB" - ], - [ - "3b-code-q3_K_S", - "1.3 GB" - ], - [ - "3b-code-q4_0", - "1.6 GB" - ], - [ - "3b-code-q4_1", - "1.8 GB" - ], - [ - "3b-code-q4_K_M", - "1.7 GB" - ], - [ - "3b-code-q4_K_S", - "1.6 GB" - ], - [ - "3b-code-q5_0", - "1.9 GB" - ], - [ - "3b-code-q5_1", - "2.1 GB" - ], - [ - "3b-code-q5_K_M", - "2.0 GB" - ], - [ - "3b-code-q5_K_S", - "1.9 GB" - ], - [ - "3b-code-q6_K", - "2.3 GB" - ], - [ - "3b-code-q8_0", - "3.0 GB" + "1.6\u202fGB" ], [ "3b-instruct", - "1.6 GB" + "1.6\u202fGB" + ], + [ + "3b-code-fp16", + "5.6\u202fGB" + ], + [ + "3b-code-q2_K", + "1.1\u202fGB" + ], + [ + "3b-code-q3_K_S", + "1.3\u202fGB" + ], + [ + "3b-code-q3_K_M", + "1.4\u202fGB" + ], + [ + "3b-code-q3_K_L", + "1.5\u202fGB" + ], + [ + "3b-code-q4_0", + "1.6\u202fGB" + ], + [ + "3b-code-q4_1", + "1.8\u202fGB" + ], + [ + "3b-code-q4_K_S", + "1.6\u202fGB" + ], + [ + "3b-code-q4_K_M", + "1.7\u202fGB" + ], + [ + "3b-code-q5_0", + "1.9\u202fGB" + ], + [ + "3b-code-q5_1", + "2.1\u202fGB" + ], + [ + "3b-code-q5_K_S", + "1.9\u202fGB" + ], + [ + "3b-code-q5_K_M", + "2.0\u202fGB" + ], + [ + "3b-code-q6_K", + "2.3\u202fGB" + ], + [ + "3b-code-q8_0", + "3.0\u202fGB" ], [ "3b-instruct-fp16", - "5.6 GB" + "5.6\u202fGB" ], [ "3b-instruct-q2_K", - "1.1 GB" - ], - [ - "3b-instruct-q3_K_L", - "1.5 GB" - ], - [ - "3b-instruct-q3_K_M", - "1.4 GB" + "1.1\u202fGB" ], [ "3b-instruct-q3_K_S", - "1.3 GB" + "1.3\u202fGB" + ], + [ + "3b-instruct-q3_K_M", + "1.4\u202fGB" + ], + [ + "3b-instruct-q3_K_L", + "1.5\u202fGB" ], [ "3b-instruct-q4_0", - "1.6 GB" + "1.6\u202fGB" ], [ "3b-instruct-q4_1", - "1.8 GB" - ], - [ - "3b-instruct-q4_K_M", - "1.7 GB" + "1.8\u202fGB" ], [ "3b-instruct-q4_K_S", - "1.6 GB" + "1.6\u202fGB" + ], + [ + "3b-instruct-q4_K_M", + "1.7\u202fGB" ], [ "3b-instruct-q5_0", - "1.9 GB" + "1.9\u202fGB" ], [ "3b-instruct-q5_1", - "2.1 GB" - ], - [ - "3b-instruct-q5_K_M", - "2.0 GB" + "2.1\u202fGB" ], [ "3b-instruct-q5_K_S", - "1.9 GB" + "1.9\u202fGB" + ], + [ + "3b-instruct-q5_K_M", + "2.0\u202fGB" ], [ "3b-instruct-q6_K", - "2.3 GB" + "2.3\u202fGB" ], [ "3b-instruct-q8_0", - "3.0 GB" + "3.0\u202fGB" ] ], "image": false, "author": "Stability AI" }, + "openhermes": { + "url": "https://ollama.com/library/openhermes", + "description": "OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully open datasets.", + "tags": [ + [ + "latest", + "4.1\u202fGB" + ], + [ + "v2.5", + "4.1\u202fGB" + ], + [ + "v2", + "4.1\u202fGB" + ], + [ + "7b-v2", + "4.1\u202fGB" + ], + [ + "7b-v2.5", + "4.1\u202fGB" + ], + [ + "7b-mistral-v2-fp16", + "14\u202fGB" + ], + [ + "7b-mistral-v2-q2_K", + "3.1\u202fGB" + ], + [ + "7b-mistral-v2-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-mistral-v2-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-mistral-v2-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-mistral-v2-q4_0", + "4.1\u202fGB" + ], + [ + "7b-mistral-v2-q4_1", + "4.6\u202fGB" + ], + [ + "7b-mistral-v2-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-mistral-v2-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-mistral-v2-q5_0", + "5.0\u202fGB" + ], + [ + "7b-mistral-v2-q5_1", + "5.4\u202fGB" + ], + [ + "7b-mistral-v2-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-mistral-v2-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-mistral-v2-q6_K", + "5.9\u202fGB" + ], + [ + "7b-mistral-v2-q8_0", + "7.7\u202fGB" + ], + [ + "7b-mistral-v2.5-fp16", + "14\u202fGB" + ], + [ + "7b-mistral-v2.5-q2_K", + "3.1\u202fGB" + ], + [ + "7b-mistral-v2.5-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-mistral-v2.5-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-mistral-v2.5-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-mistral-v2.5-q4_0", + "4.1\u202fGB" + ], + [ + "7b-mistral-v2.5-q4_1", + "4.6\u202fGB" + ], + [ + "7b-mistral-v2.5-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-mistral-v2.5-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-mistral-v2.5-q5_0", + "5.0\u202fGB" + ], + [ + "7b-mistral-v2.5-q5_1", + "5.4\u202fGB" + ], + [ + "7b-mistral-v2.5-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-mistral-v2.5-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-mistral-v2.5-q6_K", + "5.9\u202fGB" + ], + [ + "7b-mistral-v2.5-q8_0", + "7.7\u202fGB" + ] + ], + "image": false, + "author": "Teknium" + }, + "all-minilm": { + "url": "https://ollama.com/library/all-minilm", + "description": "Embedding models on very large sentence level datasets.", + "tags": [ + [ + "latest", + "46\u202fMB" + ], + [ + "22m", + "46\u202fMB" + ], + [ + "33m", + "67\u202fMB" + ], + [ + "l12", + "67\u202fMB" + ], + [ + "l12-v2", + "67\u202fMB" + ], + [ + "l6", + "46\u202fMB" + ], + [ + "l6-v2", + "46\u202fMB" + ], + [ + "v2", + "46\u202fMB" + ], + [ + "22m-l6-v2-fp16", + "46\u202fMB" + ], + [ + "33m-l12-v2-fp16", + "67\u202fMB" + ] + ], + "image": false, + "author": "Sentence Transformers" + }, "codeqwen": { "url": "https://ollama.com/library/codeqwen", "description": "CodeQwen1.5 is a large language model pretrained on a large amount of code data.", "tags": [ [ "latest", - "4.2 GB" + "4.2\u202fGB" ], [ "7b", - "4.2 GB" + "4.2\u202fGB" ], [ "chat", - "4.2 GB" + "4.2\u202fGB" ], [ "code", - "4.2 GB" + "4.2\u202fGB" ], [ "v1.5", - "4.2 GB" - ], - [ - "7b-chat", - "4.2 GB" - ], - [ - "7b-chat-v1.5-fp16", - "15 GB" - ], - [ - "7b-chat-v1.5-q2_K", - "3.1 GB" - ], - [ - "7b-chat-v1.5-q3_K_L", - "4.0 GB" - ], - [ - "7b-chat-v1.5-q3_K_M", - "3.8 GB" - ], - [ - "7b-chat-v1.5-q3_K_S", - "3.5 GB" - ], - [ - "7b-chat-v1.5-q4_0", - "4.2 GB" - ], - [ - "7b-chat-v1.5-q4_1", - "4.6 GB" - ], - [ - "7b-chat-v1.5-q4_K_M", - "4.7 GB" - ], - [ - "7b-chat-v1.5-q4_K_S", - "4.4 GB" - ], - [ - "7b-chat-v1.5-q5_0", - "5.0 GB" - ], - [ - "7b-chat-v1.5-q5_1", - "5.5 GB" - ], - [ - "7b-chat-v1.5-q5_K_M", - "5.4 GB" - ], - [ - "7b-chat-v1.5-q5_K_S", - "5.1 GB" - ], - [ - "7b-chat-v1.5-q6_K", - "6.4 GB" - ], - [ - "7b-chat-v1.5-q8_0", - "7.7 GB" - ], - [ - "7b-code", - "4.2 GB" - ], - [ - "7b-code-v1.5-fp16", - "15 GB" - ], - [ - "7b-code-v1.5-q4_0", - "4.2 GB" - ], - [ - "7b-code-v1.5-q4_1", - "4.6 GB" - ], - [ - "7b-code-v1.5-q5_0", - "5.0 GB" - ], - [ - "7b-code-v1.5-q5_1", - "5.5 GB" - ], - [ - "7b-code-v1.5-q8_0", - "7.7 GB" + "4.2\u202fGB" ], [ "v1.5-chat", - "4.2 GB" + "4.2\u202fGB" ], [ "v1.5-code", - "4.2 GB" + "4.2\u202fGB" + ], + [ + "7b-chat", + "4.2\u202fGB" + ], + [ + "7b-code", + "4.2\u202fGB" + ], + [ + "7b-chat-v1.5-fp16", + "15\u202fGB" + ], + [ + "7b-chat-v1.5-q2_K", + "3.1\u202fGB" + ], + [ + "7b-chat-v1.5-q3_K_S", + "3.5\u202fGB" + ], + [ + "7b-chat-v1.5-q3_K_M", + "3.8\u202fGB" + ], + [ + "7b-chat-v1.5-q3_K_L", + "4.0\u202fGB" + ], + [ + "7b-chat-v1.5-q4_0", + "4.2\u202fGB" + ], + [ + "7b-chat-v1.5-q4_1", + "4.6\u202fGB" + ], + [ + "7b-chat-v1.5-q4_K_S", + "4.4\u202fGB" + ], + [ + "7b-chat-v1.5-q4_K_M", + "4.7\u202fGB" + ], + [ + "7b-chat-v1.5-q5_0", + "5.0\u202fGB" + ], + [ + "7b-chat-v1.5-q5_1", + "5.5\u202fGB" + ], + [ + "7b-chat-v1.5-q5_K_S", + "5.1\u202fGB" + ], + [ + "7b-chat-v1.5-q5_K_M", + "5.4\u202fGB" + ], + [ + "7b-chat-v1.5-q6_K", + "6.4\u202fGB" + ], + [ + "7b-chat-v1.5-q8_0", + "7.7\u202fGB" + ], + [ + "7b-code-v1.5-fp16", + "15\u202fGB" + ], + [ + "7b-code-v1.5-q4_0", + "4.2\u202fGB" + ], + [ + "7b-code-v1.5-q4_1", + "4.6\u202fGB" + ], + [ + "7b-code-v1.5-q5_0", + "5.0\u202fGB" + ], + [ + "7b-code-v1.5-q5_1", + "5.5\u202fGB" + ], + [ + "7b-code-v1.5-q8_0", + "7.7\u202fGB" ] ], "image": false, "author": "Alibaba" }, + "stablelm2": { + "url": "https://ollama.com/library/stablelm2", + "description": "Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model trained on multilingual data in English, Spanish, German, Italian, French, Portuguese, and Dutch.", + "tags": [ + [ + "latest", + "983\u202fMB" + ], + [ + "1.6b", + "983\u202fMB" + ], + [ + "12b", + "7.0\u202fGB" + ], + [ + "chat", + "983\u202fMB" + ], + [ + "zephyr", + "983\u202fMB" + ], + [ + "1.6b-chat", + "983\u202fMB" + ], + [ + "1.6b-zephyr", + "983\u202fMB" + ], + [ + "12b-chat", + "7.0\u202fGB" + ], + [ + "12b-text", + "7.0\u202fGB" + ], + [ + "1.6b-fp16", + "3.3\u202fGB" + ], + [ + "1.6b-q2_K", + "694\u202fMB" + ], + [ + "1.6b-q3_K_S", + "792\u202fMB" + ], + [ + "1.6b-q3_K_M", + "858\u202fMB" + ], + [ + "1.6b-q3_K_L", + "915\u202fMB" + ], + [ + "1.6b-q4_0", + "983\u202fMB" + ], + [ + "1.6b-q4_1", + "1.1\u202fGB" + ], + [ + "1.6b-q4_K_S", + "989\u202fMB" + ], + [ + "1.6b-q4_K_M", + "1.0\u202fGB" + ], + [ + "1.6b-q5_0", + "1.2\u202fGB" + ], + [ + "1.6b-q5_1", + "1.3\u202fGB" + ], + [ + "1.6b-q5_K_S", + "1.2\u202fGB" + ], + [ + "1.6b-q5_K_M", + "1.2\u202fGB" + ], + [ + "1.6b-q6_K", + "1.4\u202fGB" + ], + [ + "1.6b-q8_0", + "1.8\u202fGB" + ], + [ + "12b-fp16", + "24\u202fGB" + ], + [ + "12b-q2_K", + "4.7\u202fGB" + ], + [ + "12b-q3_K_S", + "5.4\u202fGB" + ], + [ + "12b-q3_K_M", + "6.0\u202fGB" + ], + [ + "12b-q3_K_L", + "6.5\u202fGB" + ], + [ + "12b-q4_0", + "7.0\u202fGB" + ], + [ + "12b-q4_1", + "7.7\u202fGB" + ], + [ + "12b-q4_K_S", + "7.0\u202fGB" + ], + [ + "12b-q4_K_M", + "7.4\u202fGB" + ], + [ + "12b-q5_0", + "8.4\u202fGB" + ], + [ + "12b-q5_1", + "9.1\u202fGB" + ], + [ + "12b-q5_K_S", + "8.4\u202fGB" + ], + [ + "12b-q5_K_M", + "8.6\u202fGB" + ], + [ + "12b-q6_K", + "10.0\u202fGB" + ], + [ + "12b-q8_0", + "13\u202fGB" + ], + [ + "1.6b-chat-fp16", + "3.3\u202fGB" + ], + [ + "1.6b-chat-q2_K", + "694\u202fMB" + ], + [ + "1.6b-chat-q3_K_S", + "792\u202fMB" + ], + [ + "1.6b-chat-q3_K_M", + "858\u202fMB" + ], + [ + "1.6b-chat-q3_K_L", + "915\u202fMB" + ], + [ + "1.6b-chat-q4_0", + "983\u202fMB" + ], + [ + "1.6b-chat-q4_1", + "1.1\u202fGB" + ], + [ + "1.6b-chat-q4_K_S", + "989\u202fMB" + ], + [ + "1.6b-chat-q4_K_M", + "1.0\u202fGB" + ], + [ + "1.6b-chat-q5_0", + "1.2\u202fGB" + ], + [ + "1.6b-chat-q5_1", + "1.3\u202fGB" + ], + [ + "1.6b-chat-q5_K_S", + "1.2\u202fGB" + ], + [ + "1.6b-chat-q5_K_M", + "1.2\u202fGB" + ], + [ + "1.6b-chat-q6_K", + "1.4\u202fGB" + ], + [ + "1.6b-chat-q8_0", + "1.8\u202fGB" + ], + [ + "1.6b-zephyr-fp16", + "3.3\u202fGB" + ], + [ + "1.6b-zephyr-q2_K", + "694\u202fMB" + ], + [ + "1.6b-zephyr-q3_K_S", + "792\u202fMB" + ], + [ + "1.6b-zephyr-q3_K_M", + "858\u202fMB" + ], + [ + "1.6b-zephyr-q3_K_L", + "915\u202fMB" + ], + [ + "1.6b-zephyr-q4_0", + "983\u202fMB" + ], + [ + "1.6b-zephyr-q4_1", + "1.1\u202fGB" + ], + [ + "1.6b-zephyr-q4_K_S", + "989\u202fMB" + ], + [ + "1.6b-zephyr-q4_K_M", + "1.0\u202fGB" + ], + [ + "1.6b-zephyr-q5_0", + "1.2\u202fGB" + ], + [ + "1.6b-zephyr-q5_1", + "1.3\u202fGB" + ], + [ + "1.6b-zephyr-q5_K_S", + "1.2\u202fGB" + ], + [ + "1.6b-zephyr-q5_K_M", + "1.2\u202fGB" + ], + [ + "1.6b-zephyr-q6_K", + "1.4\u202fGB" + ], + [ + "1.6b-zephyr-q8_0", + "1.8\u202fGB" + ], + [ + "12b-chat-fp16", + "24\u202fGB" + ], + [ + "12b-chat-q2_K", + "4.7\u202fGB" + ], + [ + "12b-chat-q3_K_S", + "5.4\u202fGB" + ], + [ + "12b-chat-q3_K_M", + "6.0\u202fGB" + ], + [ + "12b-chat-q3_K_L", + "6.5\u202fGB" + ], + [ + "12b-chat-q4_0", + "7.0\u202fGB" + ], + [ + "12b-chat-q4_1", + "7.7\u202fGB" + ], + [ + "12b-chat-q4_K_S", + "7.0\u202fGB" + ], + [ + "12b-chat-q4_K_M", + "7.4\u202fGB" + ], + [ + "12b-chat-q5_0", + "8.4\u202fGB" + ], + [ + "12b-chat-q5_1", + "9.1\u202fGB" + ], + [ + "12b-chat-q5_K_S", + "8.4\u202fGB" + ], + [ + "12b-chat-q5_K_M", + "8.6\u202fGB" + ], + [ + "12b-chat-q6_K", + "10.0\u202fGB" + ], + [ + "12b-chat-q8_0", + "13\u202fGB" + ] + ], + "image": false, + "author": "Stability AI" + }, "wizard-math": { "url": "https://ollama.com/library/wizard-math", "description": "Model focused on math and logic problems", "tags": [ [ "latest", - "4.1 GB" - ], - [ - "70b", - "39 GB" - ], - [ - "13b", - "7.4 GB" + "4.1\u202fGB" ], [ "7b", - "4.1 GB" + "4.1\u202fGB" ], [ - "70b-fp16", - "138 GB" + "13b", + "7.4\u202fGB" ], [ - "70b-q2_K", - "29 GB" - ], - [ - "70b-q3_K_L", - "36 GB" - ], - [ - "70b-q3_K_M", - "33 GB" - ], - [ - "70b-q3_K_S", - "30 GB" - ], - [ - "70b-q4_0", - "39 GB" - ], - [ - "70b-q4_1", - "43 GB" - ], - [ - "70b-q4_K_M", - "41 GB" - ], - [ - "70b-q4_K_S", - "39 GB" - ], - [ - "70b-q5_0", - "47 GB" - ], - [ - "70b-q5_1", - "52 GB" - ], - [ - "70b-q5_K_M", - "49 GB" - ], - [ - "70b-q5_K_S", - "47 GB" - ], - [ - "70b-q6_K", - "57 GB" - ], - [ - "70b-q8_0", - "73 GB" - ], - [ - "13b-fp16", - "26 GB" - ], - [ - "13b-q2_K", - "5.4 GB" - ], - [ - "13b-q3_K_L", - "6.9 GB" - ], - [ - "13b-q3_K_M", - "6.3 GB" - ], - [ - "13b-q3_K_S", - "5.7 GB" - ], - [ - "13b-q4_0", - "7.4 GB" - ], - [ - "13b-q4_1", - "8.2 GB" - ], - [ - "13b-q4_K_M", - "7.9 GB" - ], - [ - "13b-q4_K_S", - "7.4 GB" - ], - [ - "13b-q5_0", - "9.0 GB" - ], - [ - "13b-q5_1", - "9.8 GB" - ], - [ - "13b-q5_K_M", - "9.2 GB" - ], - [ - "13b-q5_K_S", - "9.0 GB" - ], - [ - "13b-q6_K", - "11 GB" - ], - [ - "13b-q8_0", - "14 GB" - ], - [ - "7b-v1.1-fp16", - "14 GB" - ], - [ - "7b-v1.1-q2_K", - "3.1 GB" - ], - [ - "7b-v1.1-q3_K_L", - "3.8 GB" - ], - [ - "7b-v1.1-q3_K_M", - "3.5 GB" - ], - [ - "7b-v1.1-q3_K_S", - "3.2 GB" - ], - [ - "7b-v1.1-q4_0", - "4.1 GB" - ], - [ - "7b-v1.1-q4_1", - "4.6 GB" - ], - [ - "7b-v1.1-q4_K_M", - "4.4 GB" - ], - [ - "7b-v1.1-q4_K_S", - "4.1 GB" - ], - [ - "7b-v1.1-q5_0", - "5.0 GB" - ], - [ - "7b-v1.1-q5_1", - "5.4 GB" - ], - [ - "7b-v1.1-q5_K_M", - "5.1 GB" - ], - [ - "7b-v1.1-q5_K_S", - "5.0 GB" - ], - [ - "7b-v1.1-q6_K", - "5.9 GB" - ], - [ - "7b-v1.1-q8_0", - "7.7 GB" + "70b", + "39\u202fGB" ], [ "7b-fp16", - "13 GB" + "13\u202fGB" ], [ "7b-q2_K", - "2.8 GB" - ], - [ - "7b-q3_K_L", - "3.6 GB" - ], - [ - "7b-q3_K_M", - "3.3 GB" + "2.8\u202fGB" ], [ "7b-q3_K_S", - "2.9 GB" + "2.9\u202fGB" + ], + [ + "7b-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-q3_K_L", + "3.6\u202fGB" ], [ "7b-q4_0", - "3.8 GB" + "3.8\u202fGB" ], [ "7b-q4_1", - "4.2 GB" - ], - [ - "7b-q4_K_M", - "4.1 GB" + "4.2\u202fGB" ], [ "7b-q4_K_S", - "3.9 GB" + "3.9\u202fGB" + ], + [ + "7b-q4_K_M", + "4.1\u202fGB" ], [ "7b-q5_0", - "4.7 GB" + "4.7\u202fGB" ], [ "7b-q5_1", - "5.1 GB" - ], - [ - "7b-q5_K_M", - "4.8 GB" + "5.1\u202fGB" ], [ "7b-q5_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "7b-q5_K_M", + "4.8\u202fGB" ], [ "7b-q6_K", - "5.5 GB" + "5.5\u202fGB" ], [ "7b-q8_0", - "7.2 GB" + "7.2\u202fGB" + ], + [ + "13b-fp16", + "26\u202fGB" + ], + [ + "13b-q2_K", + "5.4\u202fGB" + ], + [ + "13b-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-q4_0", + "7.4\u202fGB" + ], + [ + "13b-q4_1", + "8.2\u202fGB" + ], + [ + "13b-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-q5_0", + "9.0\u202fGB" + ], + [ + "13b-q5_1", + "9.8\u202fGB" + ], + [ + "13b-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-q6_K", + "11\u202fGB" + ], + [ + "13b-q8_0", + "14\u202fGB" + ], + [ + "70b-fp16", + "138\u202fGB" + ], + [ + "70b-q2_K", + "29\u202fGB" + ], + [ + "70b-q3_K_S", + "30\u202fGB" + ], + [ + "70b-q3_K_M", + "33\u202fGB" + ], + [ + "70b-q3_K_L", + "36\u202fGB" + ], + [ + "70b-q4_0", + "39\u202fGB" + ], + [ + "70b-q4_1", + "43\u202fGB" + ], + [ + "70b-q4_K_S", + "39\u202fGB" + ], + [ + "70b-q4_K_M", + "41\u202fGB" + ], + [ + "70b-q5_0", + "47\u202fGB" + ], + [ + "70b-q5_1", + "52\u202fGB" + ], + [ + "70b-q5_K_S", + "47\u202fGB" + ], + [ + "70b-q5_K_M", + "49\u202fGB" + ], + [ + "70b-q6_K", + "57\u202fGB" + ], + [ + "70b-q8_0", + "73\u202fGB" + ], + [ + "7b-v1.1-fp16", + "14\u202fGB" + ], + [ + "7b-v1.1-q2_K", + "3.1\u202fGB" + ], + [ + "7b-v1.1-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-v1.1-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v1.1-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-v1.1-q4_0", + "4.1\u202fGB" + ], + [ + "7b-v1.1-q4_1", + "4.6\u202fGB" + ], + [ + "7b-v1.1-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-v1.1-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-v1.1-q5_0", + "5.0\u202fGB" + ], + [ + "7b-v1.1-q5_1", + "5.4\u202fGB" + ], + [ + "7b-v1.1-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-v1.1-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-v1.1-q6_K", + "5.9\u202fGB" + ], + [ + "7b-v1.1-q8_0", + "7.7\u202fGB" ] ], "image": false, @@ -12961,2871 +14461,1491 @@ "tags": [ [ "latest", - "4.1 GB" + "4.1\u202fGB" ], [ "7b", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v3.1", - "4.1 GB" - ], - [ - "7b-v3.1-fp16", - "14 GB" - ], - [ - "7b-v3.1-q2_K", - "3.1 GB" - ], - [ - "7b-v3.1-q3_K_L", - "3.8 GB" - ], - [ - "7b-v3.1-q3_K_M", - "3.5 GB" - ], - [ - "7b-v3.1-q3_K_S", - "3.2 GB" - ], - [ - "7b-v3.1-q4_0", - "4.1 GB" - ], - [ - "7b-v3.1-q4_1", - "4.6 GB" - ], - [ - "7b-v3.1-q4_K_M", - "4.4 GB" - ], - [ - "7b-v3.1-q4_K_S", - "4.1 GB" - ], - [ - "7b-v3.1-q5_0", - "5.0 GB" - ], - [ - "7b-v3.1-q5_1", - "5.4 GB" - ], - [ - "7b-v3.1-q5_K_M", - "5.1 GB" - ], - [ - "7b-v3.1-q5_K_S", - "5.0 GB" - ], - [ - "7b-v3.1-q6_K", - "5.9 GB" - ], - [ - "7b-v3.1-q8_0", - "7.7 GB" + "4.1\u202fGB" ], [ "7b-v3.2", - "4.1 GB" - ], - [ - "7b-v3.2-fp16", - "14 GB" - ], - [ - "7b-v3.2-q2_K", - "3.1 GB" - ], - [ - "7b-v3.2-q3_K_L", - "3.8 GB" - ], - [ - "7b-v3.2-q3_K_M", - "3.5 GB" - ], - [ - "7b-v3.2-q3_K_S", - "3.2 GB" - ], - [ - "7b-v3.2-q4_0", - "4.1 GB" - ], - [ - "7b-v3.2-q4_1", - "4.6 GB" - ], - [ - "7b-v3.2-q4_K_M", - "4.4 GB" - ], - [ - "7b-v3.2-q4_K_S", - "4.1 GB" - ], - [ - "7b-v3.2-q5_0", - "5.0 GB" - ], - [ - "7b-v3.2-q5_1", - "5.4 GB" - ], - [ - "7b-v3.2-q5_K_M", - "5.1 GB" - ], - [ - "7b-v3.2-q5_K_S", - "5.0 GB" - ], - [ - "7b-v3.2-q6_K", - "5.9 GB" - ], - [ - "7b-v3.2-q8_0", - "7.7 GB" + "4.1\u202fGB" ], [ "7b-v3.3", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-v3.1-fp16", + "14\u202fGB" + ], + [ + "7b-v3.1-q2_K", + "3.1\u202fGB" + ], + [ + "7b-v3.1-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-v3.1-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v3.1-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-v3.1-q4_0", + "4.1\u202fGB" + ], + [ + "7b-v3.1-q4_1", + "4.6\u202fGB" + ], + [ + "7b-v3.1-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-v3.1-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-v3.1-q5_0", + "5.0\u202fGB" + ], + [ + "7b-v3.1-q5_1", + "5.4\u202fGB" + ], + [ + "7b-v3.1-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-v3.1-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-v3.1-q6_K", + "5.9\u202fGB" + ], + [ + "7b-v3.1-q8_0", + "7.7\u202fGB" + ], + [ + "7b-v3.2-fp16", + "14\u202fGB" + ], + [ + "7b-v3.2-q2_K", + "3.1\u202fGB" + ], + [ + "7b-v3.2-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-v3.2-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v3.2-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-v3.2-q4_0", + "4.1\u202fGB" + ], + [ + "7b-v3.2-q4_1", + "4.6\u202fGB" + ], + [ + "7b-v3.2-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-v3.2-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-v3.2-q5_0", + "5.0\u202fGB" + ], + [ + "7b-v3.2-q5_1", + "5.4\u202fGB" + ], + [ + "7b-v3.2-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-v3.2-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-v3.2-q6_K", + "5.9\u202fGB" + ], + [ + "7b-v3.2-q8_0", + "7.7\u202fGB" ], [ "7b-v3.3-fp16", - "14 GB" + "14\u202fGB" ], [ "7b-v3.3-q2_K", - "3.1 GB" - ], - [ - "7b-v3.3-q3_K_L", - "3.8 GB" - ], - [ - "7b-v3.3-q3_K_M", - "3.5 GB" + "3.1\u202fGB" ], [ "7b-v3.3-q3_K_S", - "3.2 GB" + "3.2\u202fGB" + ], + [ + "7b-v3.3-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v3.3-q3_K_L", + "3.8\u202fGB" ], [ "7b-v3.3-q4_0", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v3.3-q4_1", - "4.6 GB" - ], - [ - "7b-v3.3-q4_K_M", - "4.4 GB" + "4.6\u202fGB" ], [ "7b-v3.3-q4_K_S", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-v3.3-q4_K_M", + "4.4\u202fGB" ], [ "7b-v3.3-q5_0", - "5.0 GB" + "5.0\u202fGB" ], [ "7b-v3.3-q5_1", - "5.4 GB" - ], - [ - "7b-v3.3-q5_K_M", - "5.1 GB" + "5.4\u202fGB" ], [ "7b-v3.3-q5_K_S", - "5.0 GB" + "5.0\u202fGB" + ], + [ + "7b-v3.3-q5_K_M", + "5.1\u202fGB" ], [ "7b-v3.3-q6_K", - "5.9 GB" + "5.9\u202fGB" ], [ "7b-v3.3-q8_0", - "7.7 GB" + "7.7\u202fGB" ] ], "image": false, "author": "Intel" }, - "stablelm2": { - "url": "https://ollama.com/library/stablelm2", - "description": "Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model trained on multilingual data in English, Spanish, German, Italian, French, Portuguese, and Dutch.", - "tags": [ - [ - "latest", - "983 MB" - ], - [ - "12b", - "7.0 GB" - ], - [ - "1.6b", - "983 MB" - ], - [ - "chat", - "983 MB" - ], - [ - "zephyr", - "983 MB" - ], - [ - "12b-chat", - "7.0 GB" - ], - [ - "12b-chat-fp16", - "24 GB" - ], - [ - "12b-chat-q2_K", - "4.7 GB" - ], - [ - "12b-chat-q3_K_L", - "6.5 GB" - ], - [ - "12b-chat-q3_K_M", - "6.0 GB" - ], - [ - "12b-chat-q3_K_S", - "5.4 GB" - ], - [ - "12b-chat-q4_0", - "7.0 GB" - ], - [ - "12b-chat-q4_1", - "7.7 GB" - ], - [ - "12b-chat-q4_K_M", - "7.4 GB" - ], - [ - "12b-chat-q4_K_S", - "7.0 GB" - ], - [ - "12b-chat-q5_0", - "8.4 GB" - ], - [ - "12b-chat-q5_1", - "9.1 GB" - ], - [ - "12b-chat-q5_K_M", - "8.6 GB" - ], - [ - "12b-chat-q5_K_S", - "8.4 GB" - ], - [ - "12b-chat-q6_K", - "10.0 GB" - ], - [ - "12b-chat-q8_0", - "13 GB" - ], - [ - "12b-text", - "7.0 GB" - ], - [ - "1.6b-chat", - "983 MB" - ], - [ - "12b-fp16", - "24 GB" - ], - [ - "12b-q2_K", - "4.7 GB" - ], - [ - "12b-q3_K_L", - "6.5 GB" - ], - [ - "12b-q3_K_M", - "6.0 GB" - ], - [ - "12b-q3_K_S", - "5.4 GB" - ], - [ - "12b-q4_0", - "7.0 GB" - ], - [ - "12b-q4_1", - "7.7 GB" - ], - [ - "12b-q4_K_M", - "7.4 GB" - ], - [ - "12b-q4_K_S", - "7.0 GB" - ], - [ - "12b-q5_0", - "8.4 GB" - ], - [ - "12b-q5_1", - "9.1 GB" - ], - [ - "12b-q5_K_M", - "8.6 GB" - ], - [ - "12b-q5_K_S", - "8.4 GB" - ], - [ - "12b-q6_K", - "10.0 GB" - ], - [ - "12b-q8_0", - "13 GB" - ], - [ - "1.6b-chat-fp16", - "3.3 GB" - ], - [ - "1.6b-chat-q2_K", - "694 MB" - ], - [ - "1.6b-chat-q3_K_L", - "915 MB" - ], - [ - "1.6b-chat-q3_K_M", - "858 MB" - ], - [ - "1.6b-chat-q3_K_S", - "792 MB" - ], - [ - "1.6b-chat-q4_0", - "983 MB" - ], - [ - "1.6b-chat-q4_1", - "1.1 GB" - ], - [ - "1.6b-chat-q4_K_M", - "1.0 GB" - ], - [ - "1.6b-chat-q4_K_S", - "989 MB" - ], - [ - "1.6b-chat-q5_0", - "1.2 GB" - ], - [ - "1.6b-chat-q5_1", - "1.3 GB" - ], - [ - "1.6b-chat-q5_K_M", - "1.2 GB" - ], - [ - "1.6b-chat-q5_K_S", - "1.2 GB" - ], - [ - "1.6b-chat-q6_K", - "1.4 GB" - ], - [ - "1.6b-chat-q8_0", - "1.8 GB" - ], - [ - "1.6b-zephyr", - "983 MB" - ], - [ - "1.6b-zephyr-fp16", - "3.3 GB" - ], - [ - "1.6b-zephyr-q2_K", - "694 MB" - ], - [ - "1.6b-zephyr-q3_K_L", - "915 MB" - ], - [ - "1.6b-zephyr-q3_K_M", - "858 MB" - ], - [ - "1.6b-zephyr-q3_K_S", - "792 MB" - ], - [ - "1.6b-zephyr-q4_0", - "983 MB" - ], - [ - "1.6b-zephyr-q4_1", - "1.1 GB" - ], - [ - "1.6b-zephyr-q4_K_M", - "1.0 GB" - ], - [ - "1.6b-zephyr-q4_K_S", - "989 MB" - ], - [ - "1.6b-zephyr-q5_0", - "1.2 GB" - ], - [ - "1.6b-zephyr-q5_1", - "1.3 GB" - ], - [ - "1.6b-zephyr-q5_K_M", - "1.2 GB" - ], - [ - "1.6b-zephyr-q5_K_S", - "1.2 GB" - ], - [ - "1.6b-zephyr-q6_K", - "1.4 GB" - ], - [ - "1.6b-zephyr-q8_0", - "1.8 GB" - ], - [ - "1.6b-fp16", - "3.3 GB" - ], - [ - "1.6b-q2_K", - "694 MB" - ], - [ - "1.6b-q3_K_L", - "915 MB" - ], - [ - "1.6b-q3_K_M", - "858 MB" - ], - [ - "1.6b-q3_K_S", - "792 MB" - ], - [ - "1.6b-q4_0", - "983 MB" - ], - [ - "1.6b-q4_1", - "1.1 GB" - ], - [ - "1.6b-q4_K_M", - "1.0 GB" - ], - [ - "1.6b-q4_K_S", - "989 MB" - ], - [ - "1.6b-q5_0", - "1.2 GB" - ], - [ - "1.6b-q5_1", - "1.3 GB" - ], - [ - "1.6b-q5_K_M", - "1.2 GB" - ], - [ - "1.6b-q5_K_S", - "1.2 GB" - ], - [ - "1.6b-q6_K", - "1.4 GB" - ], - [ - "1.6b-q8_0", - "1.8 GB" - ] - ], - "image": false, - "author": "Stability AI" - }, - "granite-code": { - "url": "https://ollama.com/library/granite-code", - "description": "A family of open foundation models by IBM for Code Intelligence", - "tags": [ - [ - "latest", - "2.0 GB" - ], - [ - "34b", - "19 GB" - ], - [ - "20b", - "12 GB" - ], - [ - "8b", - "4.6 GB" - ], - [ - "3b", - "2.0 GB" - ], - [ - "34b-base-f16", - "68 GB" - ], - [ - "34b-base", - "19 GB" - ], - [ - "34b-base-q2_K", - "13 GB" - ], - [ - "34b-base-q3_K_L", - "20 GB" - ], - [ - "34b-base-q3_K_M", - "18 GB" - ], - [ - "34b-base-q3_K_S", - "15 GB" - ], - [ - "34b-base-q4_0", - "19 GB" - ], - [ - "34b-base-q4_1", - "21 GB" - ], - [ - "34b-base-q4_K_M", - "21 GB" - ], - [ - "34b-base-q4_K_S", - "19 GB" - ], - [ - "34b-base-q5_0", - "23 GB" - ], - [ - "34b-base-q5_1", - "25 GB" - ], - [ - "34b-base-q5_K_M", - "25 GB" - ], - [ - "34b-base-q5_K_S", - "23 GB" - ], - [ - "34b-base-q6_K", - "28 GB" - ], - [ - "34b-base-q8_0", - "36 GB" - ], - [ - "34b-instruct-f16", - "68 GB" - ], - [ - "34b-instruct", - "19 GB" - ], - [ - "34b-instruct-q2_K", - "13 GB" - ], - [ - "34b-instruct-q3_K_L", - "20 GB" - ], - [ - "34b-instruct-q3_K_M", - "18 GB" - ], - [ - "34b-instruct-q3_K_S", - "15 GB" - ], - [ - "34b-instruct-q4_0", - "19 GB" - ], - [ - "34b-instruct-q4_1", - "21 GB" - ], - [ - "34b-instruct-q4_K_M", - "21 GB" - ], - [ - "34b-instruct-q4_K_S", - "19 GB" - ], - [ - "34b-instruct-q5_0", - "23 GB" - ], - [ - "34b-instruct-q5_1", - "25 GB" - ], - [ - "34b-instruct-q5_K_M", - "25 GB" - ], - [ - "34b-instruct-q5_K_S", - "23 GB" - ], - [ - "34b-instruct-q6_K", - "28 GB" - ], - [ - "34b-instruct-q8_0", - "36 GB" - ], - [ - "20b-base-f16", - "40 GB" - ], - [ - "20b-base", - "12 GB" - ], - [ - "20b-base-fp16", - "40 GB" - ], - [ - "20b-base-q2_K", - "7.9 GB" - ], - [ - "20b-base-q3_K_L", - "12 GB" - ], - [ - "20b-base-q3_K_M", - "11 GB" - ], - [ - "20b-base-q3_K_S", - "8.9 GB" - ], - [ - "20b-base-q4_0", - "12 GB" - ], - [ - "20b-base-q4_1", - "13 GB" - ], - [ - "20b-base-q4_K_M", - "13 GB" - ], - [ - "20b-base-q4_K_S", - "12 GB" - ], - [ - "20b-base-q5_0", - "14 GB" - ], - [ - "20b-base-q5_1", - "15 GB" - ], - [ - "20b-base-q5_K_M", - "15 GB" - ], - [ - "20b-base-q5_K_S", - "14 GB" - ], - [ - "20b-base-q6_K", - "17 GB" - ], - [ - "20b-base-q8_0", - "21 GB" - ], - [ - "20b-instruct-f16", - "40 GB" - ], - [ - "20b-instruct", - "12 GB" - ], - [ - "20b-instruct-q2_K", - "7.9 GB" - ], - [ - "20b-instruct-q3_K_L", - "12 GB" - ], - [ - "20b-instruct-q3_K_M", - "11 GB" - ], - [ - "20b-instruct-q3_K_S", - "8.9 GB" - ], - [ - "20b-instruct-q4_0", - "12 GB" - ], - [ - "20b-instruct-q4_1", - "13 GB" - ], - [ - "20b-instruct-q4_K_M", - "13 GB" - ], - [ - "20b-instruct-q4_K_S", - "12 GB" - ], - [ - "20b-instruct-q5_0", - "14 GB" - ], - [ - "20b-instruct-q5_1", - "15 GB" - ], - [ - "20b-instruct-q5_K_M", - "15 GB" - ], - [ - "20b-instruct-q5_K_S", - "14 GB" - ], - [ - "20b-instruct-q6_K", - "17 GB" - ], - [ - "20b-instruct-q8_0", - "21 GB" - ], - [ - "8b-base-f16", - "16 GB" - ], - [ - "8b-base", - "4.6 GB" - ], - [ - "8b-base-fp16", - "16 GB" - ], - [ - "8b-base-q2_K", - "3.1 GB" - ], - [ - "8b-base-q3_K_L", - "4.3 GB" - ], - [ - "8b-base-q3_K_M", - "3.9 GB" - ], - [ - "8b-base-q3_K_S", - "3.5 GB" - ], - [ - "8b-base-q4_0", - "4.6 GB" - ], - [ - "8b-base-q4_1", - "5.1 GB" - ], - [ - "8b-base-q4_K_M", - "4.9 GB" - ], - [ - "8b-base-q4_K_S", - "4.6 GB" - ], - [ - "8b-base-q5_0", - "5.6 GB" - ], - [ - "8b-base-q5_1", - "6.1 GB" - ], - [ - "8b-base-q5_K_M", - "5.7 GB" - ], - [ - "8b-base-q5_K_S", - "5.6 GB" - ], - [ - "8b-base-q6_K", - "6.6 GB" - ], - [ - "8b-base-q8_0", - "8.6 GB" - ], - [ - "8b-instruct", - "4.6 GB" - ], - [ - "8b-instruct-f16", - "16 GB" - ], - [ - "8b-instruct-fp16", - "16 GB" - ], - [ - "8b-instruct-q2_K", - "3.1 GB" - ], - [ - "8b-instruct-q3_K_L", - "4.3 GB" - ], - [ - "8b-instruct-q3_K_M", - "3.9 GB" - ], - [ - "8b-instruct-q3_K_S", - "3.5 GB" - ], - [ - "8b-instruct-q4_0", - "4.6 GB" - ], - [ - "8b-instruct-q4_1", - "5.1 GB" - ], - [ - "8b-instruct-q4_K_M", - "4.9 GB" - ], - [ - "8b-instruct-q4_K_S", - "4.6 GB" - ], - [ - "8b-instruct-q5_0", - "5.6 GB" - ], - [ - "8b-instruct-q5_1", - "6.1 GB" - ], - [ - "8b-instruct-q5_K_M", - "5.7 GB" - ], - [ - "8b-instruct-q5_K_S", - "5.6 GB" - ], - [ - "8b-instruct-q6_K", - "6.6 GB" - ], - [ - "8b-instruct-q8_0", - "8.6 GB" - ], - [ - "3b-base-f16", - "7.0 GB" - ], - [ - "3b-base", - "2.0 GB" - ], - [ - "3b-base-fp16", - "7.0 GB" - ], - [ - "3b-base-q2_K", - "1.3 GB" - ], - [ - "3b-base-q3_K_L", - "1.9 GB" - ], - [ - "3b-base-q3_K_M", - "1.7 GB" - ], - [ - "3b-base-q3_K_S", - "1.6 GB" - ], - [ - "3b-base-q4_0", - "2.0 GB" - ], - [ - "3b-base-q4_1", - "2.2 GB" - ], - [ - "3b-base-q4_K_M", - "2.1 GB" - ], - [ - "3b-base-q4_K_S", - "2.0 GB" - ], - [ - "3b-base-q5_0", - "2.4 GB" - ], - [ - "3b-base-q5_1", - "2.6 GB" - ], - [ - "3b-base-q5_K_M", - "2.5 GB" - ], - [ - "3b-base-q5_K_S", - "2.4 GB" - ], - [ - "3b-base-q6_K", - "2.9 GB" - ], - [ - "3b-base-q8_0", - "3.7 GB" - ], - [ - "3b-instruct-f16", - "7.0 GB" - ], - [ - "3b-instruct", - "2.0 GB" - ], - [ - "3b-instruct-fp16", - "7.0 GB" - ], - [ - "3b-instruct-q2_K", - "1.3 GB" - ], - [ - "3b-instruct-q3_K_L", - "1.9 GB" - ], - [ - "3b-instruct-q3_K_M", - "1.7 GB" - ], - [ - "3b-instruct-q3_K_S", - "1.6 GB" - ], - [ - "3b-instruct-q4_0", - "2.0 GB" - ], - [ - "3b-instruct-q4_1", - "2.2 GB" - ], - [ - "3b-instruct-q4_K_M", - "2.1 GB" - ], - [ - "3b-instruct-q4_K_S", - "2.0 GB" - ], - [ - "3b-instruct-q5_0", - "2.4 GB" - ], - [ - "3b-instruct-q5_1", - "2.6 GB" - ], - [ - "3b-instruct-q5_K_M", - "2.5 GB" - ], - [ - "3b-instruct-q5_K_S", - "2.4 GB" - ], - [ - "3b-instruct-q6_K", - "2.9 GB" - ], - [ - "3b-instruct-q8_0", - "3.7 GB" - ] - ], - "image": false, - "author": "IBM for Code Intelligence" - }, - "all-minilm": { - "url": "https://ollama.com/library/all-minilm", - "description": "Embedding models on very large sentence level datasets.", - "tags": [ - [ - "latest", - "46 MB" - ], - [ - "33m", - "67 MB" - ], - [ - "22m", - "46 MB" - ], - [ - "l12", - "67 MB" - ], - [ - "l6", - "46 MB" - ], - [ - "v2", - "46 MB" - ], - [ - "33m-l12-v2-fp16", - "67 MB" - ], - [ - "22m-l6-v2-fp16", - "46 MB" - ], - [ - "l12-v2", - "67 MB" - ], - [ - "l6-v2", - "46 MB" - ] - ], - "image": false, - "author": "Sentence Transformers" - }, - "phind-codellama": { - "url": "https://ollama.com/library/phind-codellama", - "description": "Code generation model based on Code Llama.", - "tags": [ - [ - "latest", - "19 GB" - ], - [ - "34b", - "19 GB" - ], - [ - "34b-python", - "19 GB" - ], - [ - "34b-python-fp16", - "67 GB" - ], - [ - "34b-python-q2_K", - "14 GB" - ], - [ - "34b-python-q3_K_L", - "18 GB" - ], - [ - "34b-python-q3_K_M", - "16 GB" - ], - [ - "34b-python-q3_K_S", - "15 GB" - ], - [ - "34b-python-q4_0", - "19 GB" - ], - [ - "34b-python-q4_1", - "21 GB" - ], - [ - "34b-python-q4_K_M", - "20 GB" - ], - [ - "34b-python-q4_K_S", - "19 GB" - ], - [ - "34b-python-q5_0", - "23 GB" - ], - [ - "34b-python-q5_1", - "25 GB" - ], - [ - "34b-python-q5_K_M", - "24 GB" - ], - [ - "34b-python-q5_K_S", - "23 GB" - ], - [ - "34b-python-q6_K", - "28 GB" - ], - [ - "34b-python-q8_0", - "36 GB" - ], - [ - "34b-v2", - "19 GB" - ], - [ - "34b-v2-fp16", - "67 GB" - ], - [ - "34b-v2-q2_K", - "14 GB" - ], - [ - "34b-v2-q3_K_L", - "18 GB" - ], - [ - "34b-v2-q3_K_M", - "16 GB" - ], - [ - "34b-v2-q3_K_S", - "15 GB" - ], - [ - "34b-v2-q4_0", - "19 GB" - ], - [ - "34b-v2-q4_1", - "21 GB" - ], - [ - "34b-v2-q4_K_M", - "20 GB" - ], - [ - "34b-v2-q4_K_S", - "19 GB" - ], - [ - "34b-v2-q5_0", - "23 GB" - ], - [ - "34b-v2-q5_1", - "25 GB" - ], - [ - "34b-v2-q5_K_M", - "24 GB" - ], - [ - "34b-v2-q5_K_S", - "23 GB" - ], - [ - "34b-v2-q6_K", - "28 GB" - ], - [ - "34b-v2-q8_0", - "36 GB" - ], - [ - "34b-fp16", - "67 GB" - ], - [ - "34b-q2_K", - "14 GB" - ], - [ - "34b-q3_K_L", - "18 GB" - ], - [ - "34b-q3_K_M", - "16 GB" - ], - [ - "34b-q3_K_S", - "15 GB" - ], - [ - "34b-q4_0", - "19 GB" - ], - [ - "34b-q4_1", - "21 GB" - ], - [ - "34b-q4_K_M", - "20 GB" - ], - [ - "34b-q4_K_S", - "19 GB" - ], - [ - "34b-q5_0", - "23 GB" - ], - [ - "34b-q5_1", - "25 GB" - ], - [ - "34b-q5_K_M", - "24 GB" - ], - [ - "34b-q5_K_S", - "23 GB" - ], - [ - "34b-q6_K", - "28 GB" - ], - [ - "34b-q8_0", - "36 GB" - ] - ], - "image": false, - "author": "Phind" - }, - "dolphincoder": { - "url": "https://ollama.com/library/dolphincoder", - "description": "A 7B and 15B uncensored variant of the Dolphin model family that excels at coding, based on StarCoder2.", - "tags": [ - [ - "latest", - "4.2 GB" - ], - [ - "15b", - "9.1 GB" - ], - [ - "7b", - "4.2 GB" - ], - [ - "15b-starcoder2", - "9.1 GB" - ], - [ - "15b-starcoder2-fp16", - "32 GB" - ], - [ - "15b-starcoder2-q2_K", - "6.2 GB" - ], - [ - "15b-starcoder2-q3_K_L", - "9.0 GB" - ], - [ - "15b-starcoder2-q3_K_M", - "8.1 GB" - ], - [ - "15b-starcoder2-q3_K_S", - "7.0 GB" - ], - [ - "15b-starcoder2-q4_0", - "9.1 GB" - ], - [ - "15b-starcoder2-q4_1", - "10 GB" - ], - [ - "15b-starcoder2-q4_K_M", - "9.9 GB" - ], - [ - "15b-starcoder2-q4_K_S", - "9.3 GB" - ], - [ - "15b-starcoder2-q5_0", - "11 GB" - ], - [ - "15b-starcoder2-q5_1", - "12 GB" - ], - [ - "15b-starcoder2-q5_K_M", - "11 GB" - ], - [ - "15b-starcoder2-q5_K_S", - "11 GB" - ], - [ - "15b-starcoder2-q6_K", - "13 GB" - ], - [ - "15b-starcoder2-q8_0", - "17 GB" - ], - [ - "7b-starcoder2", - "4.2 GB" - ], - [ - "7b-starcoder2-fp16", - "15 GB" - ], - [ - "7b-starcoder2-q2_K", - "2.9 GB" - ], - [ - "7b-starcoder2-q3_K_L", - "4.2 GB" - ], - [ - "7b-starcoder2-q3_K_M", - "3.8 GB" - ], - [ - "7b-starcoder2-q3_K_S", - "3.3 GB" - ], - [ - "7b-starcoder2-q4_0", - "4.2 GB" - ], - [ - "7b-starcoder2-q4_1", - "4.7 GB" - ], - [ - "7b-starcoder2-q4_K_M", - "4.6 GB" - ], - [ - "7b-starcoder2-q4_K_S", - "4.3 GB" - ], - [ - "7b-starcoder2-q5_0", - "5.1 GB" - ], - [ - "7b-starcoder2-q5_1", - "5.6 GB" - ], - [ - "7b-starcoder2-q5_K_M", - "5.3 GB" - ], - [ - "7b-starcoder2-q5_K_S", - "5.1 GB" - ], - [ - "7b-starcoder2-q6_K", - "6.1 GB" - ], - [ - "7b-starcoder2-q8_0", - "7.9 GB" - ] - ], - "image": false, - "author": "Cognitive Computations" - }, - "nous-hermes": { - "url": "https://ollama.com/library/nous-hermes", - "description": "General use models based on Llama and Llama 2 from Nous Research.", - "tags": [ - [ - "latest", - "3.8 GB" - ], - [ - "13b", - "7.4 GB" - ], - [ - "7b", - "3.8 GB" - ], - [ - "70b-llama2-fp16", - "138 GB" - ], - [ - "70b-llama2-q2_K", - "29 GB" - ], - [ - "70b-llama2-q3_K_L", - "36 GB" - ], - [ - "70b-llama2-q3_K_M", - "33 GB" - ], - [ - "70b-llama2-q3_K_S", - "30 GB" - ], - [ - "70b-llama2-q4_0", - "39 GB" - ], - [ - "70b-llama2-q4_1", - "43 GB" - ], - [ - "70b-llama2-q4_K_M", - "41 GB" - ], - [ - "70b-llama2-q4_K_S", - "39 GB" - ], - [ - "70b-llama2-q5_0", - "47 GB" - ], - [ - "70b-llama2-q5_1", - "52 GB" - ], - [ - "70b-llama2-q5_K_M", - "49 GB" - ], - [ - "70b-llama2-q6_K", - "57 GB" - ], - [ - "13b-llama2", - "7.4 GB" - ], - [ - "13b-llama2-fp16", - "26 GB" - ], - [ - "13b-llama2-q2_K", - "5.4 GB" - ], - [ - "13b-llama2-q3_K_L", - "6.9 GB" - ], - [ - "13b-llama2-q3_K_M", - "6.3 GB" - ], - [ - "13b-llama2-q3_K_S", - "5.7 GB" - ], - [ - "13b-llama2-q4_0", - "7.4 GB" - ], - [ - "13b-llama2-q4_1", - "8.2 GB" - ], - [ - "13b-llama2-q4_K_M", - "7.9 GB" - ], - [ - "13b-llama2-q4_K_S", - "7.4 GB" - ], - [ - "13b-llama2-q5_0", - "9.0 GB" - ], - [ - "13b-llama2-q5_1", - "9.8 GB" - ], - [ - "13b-llama2-q5_K_M", - "9.2 GB" - ], - [ - "13b-llama2-q5_K_S", - "9.0 GB" - ], - [ - "13b-llama2-q6_K", - "11 GB" - ], - [ - "13b-llama2-q8_0", - "14 GB" - ], - [ - "7b-llama2", - "3.8 GB" - ], - [ - "13b-fp16", - "26 GB" - ], - [ - "13b-q2_K", - "5.4 GB" - ], - [ - "13b-q3_K_L", - "6.9 GB" - ], - [ - "13b-q3_K_M", - "6.3 GB" - ], - [ - "13b-q3_K_S", - "5.7 GB" - ], - [ - "13b-q4_0", - "7.4 GB" - ], - [ - "13b-q4_1", - "8.2 GB" - ], - [ - "13b-q4_K_M", - "7.9 GB" - ], - [ - "13b-q4_K_S", - "7.4 GB" - ], - [ - "13b-q5_0", - "9.0 GB" - ], - [ - "13b-q5_1", - "9.8 GB" - ], - [ - "13b-q5_K_M", - "9.2 GB" - ], - [ - "13b-q5_K_S", - "9.0 GB" - ], - [ - "13b-q6_K", - "11 GB" - ], - [ - "13b-q8_0", - "14 GB" - ], - [ - "7b-llama2-fp16", - "13 GB" - ], - [ - "7b-llama2-q2_K", - "2.8 GB" - ], - [ - "7b-llama2-q3_K_L", - "3.6 GB" - ], - [ - "7b-llama2-q3_K_M", - "3.3 GB" - ], - [ - "7b-llama2-q3_K_S", - "2.9 GB" - ], - [ - "7b-llama2-q4_0", - "3.8 GB" - ], - [ - "7b-llama2-q4_1", - "4.2 GB" - ], - [ - "7b-llama2-q4_K_M", - "4.1 GB" - ], - [ - "7b-llama2-q4_K_S", - "3.9 GB" - ], - [ - "7b-llama2-q5_0", - "4.7 GB" - ], - [ - "7b-llama2-q5_1", - "5.1 GB" - ], - [ - "7b-llama2-q5_K_M", - "4.8 GB" - ], - [ - "7b-llama2-q5_K_S", - "4.7 GB" - ], - [ - "7b-llama2-q6_K", - "5.5 GB" - ], - [ - "7b-llama2-q8_0", - "7.2 GB" - ] - ], - "image": false, - "author": "Nous Research" - }, - "sqlcoder": { - "url": "https://ollama.com/library/sqlcoder", - "description": "SQLCoder is a code completion model fined-tuned on StarCoder for SQL generation tasks", - "tags": [ - [ - "latest", - "4.1 GB" - ], - [ - "15b", - "9.0 GB" - ], - [ - "7b", - "4.1 GB" - ], - [ - "70b-alpha-fp16", - "138 GB" - ], - [ - "70b-alpha-q2_K", - "25 GB" - ], - [ - "70b-alpha-q3_K_L", - "36 GB" - ], - [ - "70b-alpha-q3_K_M", - "33 GB" - ], - [ - "70b-alpha-q3_K_S", - "30 GB" - ], - [ - "70b-alpha-q4_0", - "39 GB" - ], - [ - "70b-alpha-q4_1", - "43 GB" - ], - [ - "70b-alpha-q4_K_M", - "41 GB" - ], - [ - "70b-alpha-q4_K_S", - "39 GB" - ], - [ - "70b-alpha-q5_0", - "47 GB" - ], - [ - "70b-alpha-q5_1", - "52 GB" - ], - [ - "70b-alpha-q5_K_M", - "49 GB" - ], - [ - "70b-alpha-q5_K_S", - "47 GB" - ], - [ - "70b-alpha-q6_K", - "57 GB" - ], - [ - "70b-alpha-q8_0", - "73 GB" - ], - [ - "15b-fp16", - "32 GB" - ], - [ - "15b-q2_K", - "6.7 GB" - ], - [ - "15b-q3_K_L", - "9.1 GB" - ], - [ - "15b-q3_K_M", - "8.2 GB" - ], - [ - "15b-q3_K_S", - "6.9 GB" - ], - [ - "15b-q4_0", - "9.0 GB" - ], - [ - "15b-q4_1", - "10.0 GB" - ], - [ - "15b-q4_K_M", - "10.0 GB" - ], - [ - "15b-q4_K_S", - "9.1 GB" - ], - [ - "15b-q5_0", - "11 GB" - ], - [ - "15b-q5_1", - "12 GB" - ], - [ - "15b-q5_K_M", - "12 GB" - ], - [ - "15b-q5_K_S", - "11 GB" - ], - [ - "15b-q6_K", - "13 GB" - ], - [ - "15b-q8_0", - "17 GB" - ], - [ - "7b-fp16", - "14 GB" - ], - [ - "7b-q2_K", - "3.1 GB" - ], - [ - "7b-q3_K_L", - "3.8 GB" - ], - [ - "7b-q3_K_M", - "3.5 GB" - ], - [ - "7b-q3_K_S", - "3.2 GB" - ], - [ - "7b-q4_0", - "4.1 GB" - ], - [ - "7b-q4_1", - "4.6 GB" - ], - [ - "7b-q4_K_M", - "4.4 GB" - ], - [ - "7b-q4_K_S", - "4.1 GB" - ], - [ - "7b-q5_0", - "5.0 GB" - ], - [ - "7b-q5_1", - "5.4 GB" - ], - [ - "7b-q5_K_M", - "5.1 GB" - ], - [ - "7b-q5_K_S", - "5.0 GB" - ], - [ - "7b-q6_K", - "5.9 GB" - ], - [ - "7b-q8_0", - "7.7 GB" - ] - ], - "image": false, - "author": "Defog.ai" - }, "llama3-gradient": { "url": "https://ollama.com/library/llama3-gradient", "description": "This model extends LLama-3 8B's context length from 8k to over 1m tokens.", "tags": [ [ "latest", - "4.7 GB" - ], - [ - "70b", - "40 GB" - ], - [ - "8b", - "4.7 GB" + "4.7\u202fGB" ], [ "1048k", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "8b", + "4.7\u202fGB" + ], + [ + "70b", + "40\u202fGB" ], [ "instruct", - "4.7 GB" - ], - [ - "70b-instruct-1048k-fp16", - "141 GB" - ], - [ - "70b-instruct-1048k-q2_K", - "26 GB" - ], - [ - "70b-instruct-1048k-q3_K_L", - "37 GB" - ], - [ - "70b-instruct-1048k-q3_K_M", - "34 GB" - ], - [ - "70b-instruct-1048k-q3_K_S", - "31 GB" - ], - [ - "70b-instruct-1048k-q4_0", - "40 GB" - ], - [ - "70b-instruct-1048k-q4_1", - "44 GB" - ], - [ - "70b-instruct-1048k-q4_K_M", - "43 GB" - ], - [ - "70b-instruct-1048k-q4_K_S", - "40 GB" - ], - [ - "70b-instruct-1048k-q5_0", - "49 GB" - ], - [ - "70b-instruct-1048k-q5_1", - "53 GB" - ], - [ - "70b-instruct-1048k-q5_K_M", - "50 GB" - ], - [ - "70b-instruct-1048k-q5_K_S", - "49 GB" - ], - [ - "70b-instruct-1048k-q6_K", - "58 GB" - ], - [ - "70b-instruct-1048k-q8_0", - "75 GB" + "4.7\u202fGB" ], [ "8b-instruct-1048k-fp16", - "16 GB" + "16\u202fGB" ], [ "8b-instruct-1048k-q2_K", - "3.2 GB" - ], - [ - "8b-instruct-1048k-q3_K_L", - "4.3 GB" - ], - [ - "8b-instruct-1048k-q3_K_M", - "4.0 GB" + "3.2\u202fGB" ], [ "8b-instruct-1048k-q3_K_S", - "3.7 GB" + "3.7\u202fGB" + ], + [ + "8b-instruct-1048k-q3_K_M", + "4.0\u202fGB" + ], + [ + "8b-instruct-1048k-q3_K_L", + "4.3\u202fGB" ], [ "8b-instruct-1048k-q4_0", - "4.7 GB" + "4.7\u202fGB" ], [ "8b-instruct-1048k-q4_1", - "5.1 GB" - ], - [ - "8b-instruct-1048k-q4_K_M", - "4.9 GB" + "5.1\u202fGB" ], [ "8b-instruct-1048k-q4_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "8b-instruct-1048k-q4_K_M", + "4.9\u202fGB" ], [ "8b-instruct-1048k-q5_0", - "5.6 GB" + "5.6\u202fGB" ], [ "8b-instruct-1048k-q5_1", - "6.1 GB" - ], - [ - "8b-instruct-1048k-q5_K_M", - "5.7 GB" + "6.1\u202fGB" ], [ "8b-instruct-1048k-q5_K_S", - "5.6 GB" + "5.6\u202fGB" + ], + [ + "8b-instruct-1048k-q5_K_M", + "5.7\u202fGB" ], [ "8b-instruct-1048k-q6_K", - "6.6 GB" + "6.6\u202fGB" ], [ "8b-instruct-1048k-q8_0", - "8.5 GB" + "8.5\u202fGB" + ], + [ + "70b-instruct-1048k-fp16", + "141\u202fGB" + ], + [ + "70b-instruct-1048k-q2_K", + "26\u202fGB" + ], + [ + "70b-instruct-1048k-q3_K_S", + "31\u202fGB" + ], + [ + "70b-instruct-1048k-q3_K_M", + "34\u202fGB" + ], + [ + "70b-instruct-1048k-q3_K_L", + "37\u202fGB" + ], + [ + "70b-instruct-1048k-q4_0", + "40\u202fGB" + ], + [ + "70b-instruct-1048k-q4_1", + "44\u202fGB" + ], + [ + "70b-instruct-1048k-q4_K_S", + "40\u202fGB" + ], + [ + "70b-instruct-1048k-q4_K_M", + "43\u202fGB" + ], + [ + "70b-instruct-1048k-q5_0", + "49\u202fGB" + ], + [ + "70b-instruct-1048k-q5_1", + "53\u202fGB" + ], + [ + "70b-instruct-1048k-q5_K_S", + "49\u202fGB" + ], + [ + "70b-instruct-1048k-q5_K_M", + "50\u202fGB" + ], + [ + "70b-instruct-1048k-q6_K", + "58\u202fGB" + ], + [ + "70b-instruct-1048k-q8_0", + "75\u202fGB" ] ], "image": false, "author": "Gradient AI" }, - "starling-lm": { - "url": "https://ollama.com/library/starling-lm", - "description": "Starling is a large language model trained by reinforcement learning from AI feedback focused on improving chatbot helpfulness.", + "phind-codellama": { + "url": "https://ollama.com/library/phind-codellama", + "description": "Code generation model based on Code Llama.", "tags": [ [ "latest", - "4.1 GB" + "19\u202fGB" ], [ - "7b", - "4.1 GB" + "34b", + "19\u202fGB" ], [ - "alpha", - "4.1 GB" + "34b-python", + "19\u202fGB" ], [ - "beta", - "4.1 GB" + "34b-v2", + "19\u202fGB" ], [ - "7b-alpha", - "4.1 GB" + "34b-fp16", + "67\u202fGB" ], [ - "7b-alpha-fp16", - "14 GB" + "34b-q2_K", + "14\u202fGB" ], [ - "7b-alpha-q2_K", - "2.7 GB" + "34b-q3_K_S", + "15\u202fGB" ], [ - "7b-alpha-q3_K_L", - "3.8 GB" + "34b-q3_K_M", + "16\u202fGB" ], [ - "7b-alpha-q3_K_M", - "3.5 GB" + "34b-q3_K_L", + "18\u202fGB" ], [ - "7b-alpha-q3_K_S", - "3.2 GB" + "34b-q4_0", + "19\u202fGB" ], [ - "7b-alpha-q4_0", - "4.1 GB" + "34b-q4_1", + "21\u202fGB" ], [ - "7b-alpha-q4_1", - "4.6 GB" + "34b-q4_K_S", + "19\u202fGB" ], [ - "7b-alpha-q4_K_M", - "4.4 GB" + "34b-q4_K_M", + "20\u202fGB" ], [ - "7b-alpha-q4_K_S", - "4.1 GB" + "34b-q5_0", + "23\u202fGB" ], [ - "7b-alpha-q5_0", - "5.0 GB" + "34b-q5_1", + "25\u202fGB" ], [ - "7b-alpha-q5_1", - "5.4 GB" + "34b-q5_K_S", + "23\u202fGB" ], [ - "7b-alpha-q5_K_M", - "5.1 GB" + "34b-q5_K_M", + "24\u202fGB" ], [ - "7b-alpha-q5_K_S", - "5.0 GB" + "34b-q6_K", + "28\u202fGB" ], [ - "7b-alpha-q6_K", - "5.9 GB" + "34b-q8_0", + "36\u202fGB" ], [ - "7b-alpha-q8_0", - "7.7 GB" + "34b-python-fp16", + "67\u202fGB" ], [ - "7b-beta", - "4.1 GB" + "34b-python-q2_K", + "14\u202fGB" ], [ - "7b-beta-fp16", - "14 GB" + "34b-python-q3_K_S", + "15\u202fGB" ], [ - "7b-beta-q2_K", - "2.7 GB" + "34b-python-q3_K_M", + "16\u202fGB" ], [ - "7b-beta-q3_K_L", - "3.8 GB" + "34b-python-q3_K_L", + "18\u202fGB" ], [ - "7b-beta-q3_K_M", - "3.5 GB" + "34b-python-q4_0", + "19\u202fGB" ], [ - "7b-beta-q3_K_S", - "3.2 GB" + "34b-python-q4_1", + "21\u202fGB" ], [ - "7b-beta-q4_0", - "4.1 GB" + "34b-python-q4_K_S", + "19\u202fGB" ], [ - "7b-beta-q4_1", - "4.6 GB" + "34b-python-q4_K_M", + "20\u202fGB" ], [ - "7b-beta-q4_K_M", - "4.4 GB" + "34b-python-q5_0", + "23\u202fGB" ], [ - "7b-beta-q4_K_S", - "4.1 GB" + "34b-python-q5_1", + "25\u202fGB" ], [ - "7b-beta-q5_0", - "5.0 GB" + "34b-python-q5_K_S", + "23\u202fGB" ], [ - "7b-beta-q5_1", - "5.4 GB" + "34b-python-q5_K_M", + "24\u202fGB" ], [ - "7b-beta-q5_K_M", - "5.1 GB" + "34b-python-q6_K", + "28\u202fGB" ], [ - "7b-beta-q5_K_S", - "5.0 GB" + "34b-python-q8_0", + "36\u202fGB" ], [ - "7b-beta-q6_K", - "5.9 GB" + "34b-v2-fp16", + "67\u202fGB" ], [ - "7b-beta-q8_0", - "7.7 GB" + "34b-v2-q2_K", + "14\u202fGB" + ], + [ + "34b-v2-q3_K_S", + "15\u202fGB" + ], + [ + "34b-v2-q3_K_M", + "16\u202fGB" + ], + [ + "34b-v2-q3_K_L", + "18\u202fGB" + ], + [ + "34b-v2-q4_0", + "19\u202fGB" + ], + [ + "34b-v2-q4_1", + "21\u202fGB" + ], + [ + "34b-v2-q4_K_S", + "19\u202fGB" + ], + [ + "34b-v2-q4_K_M", + "20\u202fGB" + ], + [ + "34b-v2-q5_0", + "23\u202fGB" + ], + [ + "34b-v2-q5_1", + "25\u202fGB" + ], + [ + "34b-v2-q5_K_S", + "23\u202fGB" + ], + [ + "34b-v2-q5_K_M", + "24\u202fGB" + ], + [ + "34b-v2-q6_K", + "28\u202fGB" + ], + [ + "34b-v2-q8_0", + "36\u202fGB" ] ], "image": false, - "author": "Berkeley Nest" + "author": "Phind" }, - "yarn-llama2": { - "url": "https://ollama.com/library/yarn-llama2", - "description": "An extension of Llama 2 that supports a context of up to 128k tokens.", + "nous-hermes": { + "url": "https://ollama.com/library/nous-hermes", + "description": "General use models based on Llama and Llama 2 from Nous Research.", "tags": [ [ "latest", - "3.8 GB" - ], - [ - "13b", - "7.4 GB" + "3.8\u202fGB" ], [ "7b", - "3.8 GB" + "3.8\u202fGB" ], [ - "13b-128k", - "7.4 GB" + "13b", + "7.4\u202fGB" ], [ - "13b-128k-fp16", - "26 GB" + "7b-llama2", + "3.8\u202fGB" ], [ - "13b-128k-q2_K", - "5.4 GB" + "13b-llama2", + "7.4\u202fGB" ], [ - "13b-128k-q3_K_L", - "6.9 GB" + "13b-fp16", + "26\u202fGB" ], [ - "13b-128k-q3_K_M", - "6.3 GB" + "13b-q2_K", + "5.4\u202fGB" ], [ - "13b-128k-q3_K_S", - "5.7 GB" + "13b-q3_K_S", + "5.7\u202fGB" ], [ - "13b-128k-q4_0", - "7.4 GB" + "13b-q3_K_M", + "6.3\u202fGB" ], [ - "13b-128k-q4_1", - "8.2 GB" + "13b-q3_K_L", + "6.9\u202fGB" ], [ - "13b-128k-q4_K_M", - "7.9 GB" + "13b-q4_0", + "7.4\u202fGB" ], [ - "13b-128k-q4_K_S", - "7.4 GB" + "13b-q4_1", + "8.2\u202fGB" ], [ - "13b-128k-q5_0", - "9.0 GB" + "13b-q4_K_S", + "7.4\u202fGB" ], [ - "13b-128k-q5_1", - "9.8 GB" + "13b-q4_K_M", + "7.9\u202fGB" ], [ - "13b-128k-q5_K_M", - "9.2 GB" + "13b-q5_0", + "9.0\u202fGB" ], [ - "13b-128k-q5_K_S", - "9.0 GB" + "13b-q5_1", + "9.8\u202fGB" ], [ - "13b-128k-q6_K", - "11 GB" + "13b-q5_K_S", + "9.0\u202fGB" ], [ - "13b-128k-q8_0", - "14 GB" + "13b-q5_K_M", + "9.2\u202fGB" ], [ - "13b-64k", - "7.4 GB" + "13b-q6_K", + "11\u202fGB" ], [ - "13b-64k-fp16", - "26 GB" + "13b-q8_0", + "14\u202fGB" ], [ - "13b-64k-q2_K", - "5.4 GB" + "7b-llama2-fp16", + "13\u202fGB" ], [ - "13b-64k-q3_K_L", - "6.9 GB" + "7b-llama2-q2_K", + "2.8\u202fGB" ], [ - "13b-64k-q3_K_M", - "6.3 GB" + "7b-llama2-q3_K_S", + "2.9\u202fGB" ], [ - "13b-64k-q3_K_S", - "5.7 GB" + "7b-llama2-q3_K_M", + "3.3\u202fGB" ], [ - "13b-64k-q4_0", - "7.4 GB" + "7b-llama2-q3_K_L", + "3.6\u202fGB" ], [ - "13b-64k-q4_1", - "8.2 GB" + "7b-llama2-q4_0", + "3.8\u202fGB" ], [ - "13b-64k-q4_K_M", - "7.9 GB" + "7b-llama2-q4_1", + "4.2\u202fGB" ], [ - "13b-64k-q4_K_S", - "7.4 GB" + "7b-llama2-q4_K_S", + "3.9\u202fGB" ], [ - "13b-64k-q5_0", - "9.0 GB" + "7b-llama2-q4_K_M", + "4.1\u202fGB" ], [ - "13b-64k-q5_1", - "9.8 GB" + "7b-llama2-q5_0", + "4.7\u202fGB" ], [ - "13b-64k-q5_K_M", - "9.2 GB" + "7b-llama2-q5_1", + "5.1\u202fGB" ], [ - "13b-64k-q5_K_S", - "9.0 GB" + "7b-llama2-q5_K_S", + "4.7\u202fGB" ], [ - "13b-64k-q6_K", - "11 GB" + "7b-llama2-q5_K_M", + "4.8\u202fGB" ], [ - "13b-64k-q8_0", - "14 GB" + "7b-llama2-q6_K", + "5.5\u202fGB" ], [ - "7b-128k", - "3.8 GB" + "7b-llama2-q8_0", + "7.2\u202fGB" ], [ - "7b-128k-fp16", - "13 GB" + "13b-llama2-fp16", + "26\u202fGB" ], [ - "7b-128k-q2_K", - "2.8 GB" + "13b-llama2-q2_K", + "5.4\u202fGB" ], [ - "7b-128k-q3_K_L", - "3.6 GB" + "13b-llama2-q3_K_S", + "5.7\u202fGB" ], [ - "7b-128k-q3_K_M", - "3.3 GB" + "13b-llama2-q3_K_M", + "6.3\u202fGB" ], [ - "7b-128k-q3_K_S", - "2.9 GB" + "13b-llama2-q3_K_L", + "6.9\u202fGB" ], [ - "7b-128k-q4_0", - "3.8 GB" + "13b-llama2-q4_0", + "7.4\u202fGB" ], [ - "7b-128k-q4_1", - "4.2 GB" + "13b-llama2-q4_1", + "8.2\u202fGB" ], [ - "7b-128k-q4_K_M", - "4.1 GB" + "13b-llama2-q4_K_S", + "7.4\u202fGB" ], [ - "7b-128k-q4_K_S", - "3.9 GB" + "13b-llama2-q4_K_M", + "7.9\u202fGB" ], [ - "7b-128k-q5_0", - "4.7 GB" + "13b-llama2-q5_0", + "9.0\u202fGB" ], [ - "7b-128k-q5_1", - "5.1 GB" + "13b-llama2-q5_1", + "9.8\u202fGB" ], [ - "7b-128k-q5_K_M", - "4.8 GB" + "13b-llama2-q5_K_S", + "9.0\u202fGB" ], [ - "7b-128k-q5_K_S", - "4.7 GB" + "13b-llama2-q5_K_M", + "9.2\u202fGB" ], [ - "7b-128k-q6_K", - "5.5 GB" + "13b-llama2-q6_K", + "11\u202fGB" ], [ - "7b-128k-q8_0", - "7.2 GB" + "13b-llama2-q8_0", + "14\u202fGB" ], [ - "7b-64k", - "3.8 GB" + "70b-llama2-fp16", + "138\u202fGB" ], [ - "7b-64k-fp16", - "13 GB" + "70b-llama2-q2_K", + "29\u202fGB" ], [ - "7b-64k-q2_K", - "2.8 GB" + "70b-llama2-q3_K_S", + "30\u202fGB" ], [ - "7b-64k-q3_K_L", - "3.6 GB" + "70b-llama2-q3_K_M", + "33\u202fGB" ], [ - "7b-64k-q3_K_M", - "3.3 GB" + "70b-llama2-q3_K_L", + "36\u202fGB" ], [ - "7b-64k-q3_K_S", - "2.9 GB" + "70b-llama2-q4_0", + "39\u202fGB" ], [ - "7b-64k-q4_0", - "3.8 GB" + "70b-llama2-q4_1", + "43\u202fGB" ], [ - "7b-64k-q4_1", - "4.2 GB" + "70b-llama2-q4_K_S", + "39\u202fGB" ], [ - "7b-64k-q4_K_M", - "4.1 GB" + "70b-llama2-q4_K_M", + "41\u202fGB" ], [ - "7b-64k-q4_K_S", - "3.9 GB" + "70b-llama2-q5_0", + "47\u202fGB" ], [ - "7b-64k-q5_0", - "4.7 GB" + "70b-llama2-q5_1", + "52\u202fGB" ], [ - "7b-64k-q5_1", - "5.1 GB" + "70b-llama2-q5_K_M", + "49\u202fGB" ], [ - "7b-64k-q5_K_M", - "4.8 GB" - ], - [ - "7b-64k-q5_K_S", - "4.7 GB" - ], - [ - "7b-64k-q6_K", - "5.5 GB" - ], - [ - "7b-64k-q8_0", - "7.2 GB" + "70b-llama2-q6_K", + "57\u202fGB" ] ], "image": false, "author": "Nous Research" }, + "dolphincoder": { + "url": "https://ollama.com/library/dolphincoder", + "description": "A 7B and 15B uncensored variant of the Dolphin model family that excels at coding, based on StarCoder2.", + "tags": [ + [ + "latest", + "4.2\u202fGB" + ], + [ + "7b", + "4.2\u202fGB" + ], + [ + "15b", + "9.1\u202fGB" + ], + [ + "7b-starcoder2", + "4.2\u202fGB" + ], + [ + "15b-starcoder2", + "9.1\u202fGB" + ], + [ + "7b-starcoder2-fp16", + "15\u202fGB" + ], + [ + "7b-starcoder2-q2_K", + "2.9\u202fGB" + ], + [ + "7b-starcoder2-q3_K_S", + "3.3\u202fGB" + ], + [ + "7b-starcoder2-q3_K_M", + "3.8\u202fGB" + ], + [ + "7b-starcoder2-q3_K_L", + "4.2\u202fGB" + ], + [ + "7b-starcoder2-q4_0", + "4.2\u202fGB" + ], + [ + "7b-starcoder2-q4_1", + "4.7\u202fGB" + ], + [ + "7b-starcoder2-q4_K_S", + "4.3\u202fGB" + ], + [ + "7b-starcoder2-q4_K_M", + "4.6\u202fGB" + ], + [ + "7b-starcoder2-q5_0", + "5.1\u202fGB" + ], + [ + "7b-starcoder2-q5_1", + "5.6\u202fGB" + ], + [ + "7b-starcoder2-q5_K_S", + "5.1\u202fGB" + ], + [ + "7b-starcoder2-q5_K_M", + "5.3\u202fGB" + ], + [ + "7b-starcoder2-q6_K", + "6.1\u202fGB" + ], + [ + "7b-starcoder2-q8_0", + "7.9\u202fGB" + ], + [ + "15b-starcoder2-fp16", + "32\u202fGB" + ], + [ + "15b-starcoder2-q2_K", + "6.2\u202fGB" + ], + [ + "15b-starcoder2-q3_K_S", + "7.0\u202fGB" + ], + [ + "15b-starcoder2-q3_K_M", + "8.1\u202fGB" + ], + [ + "15b-starcoder2-q3_K_L", + "9.0\u202fGB" + ], + [ + "15b-starcoder2-q4_0", + "9.1\u202fGB" + ], + [ + "15b-starcoder2-q4_1", + "10\u202fGB" + ], + [ + "15b-starcoder2-q4_K_S", + "9.3\u202fGB" + ], + [ + "15b-starcoder2-q4_K_M", + "9.9\u202fGB" + ], + [ + "15b-starcoder2-q5_0", + "11\u202fGB" + ], + [ + "15b-starcoder2-q5_1", + "12\u202fGB" + ], + [ + "15b-starcoder2-q5_K_S", + "11\u202fGB" + ], + [ + "15b-starcoder2-q5_K_M", + "11\u202fGB" + ], + [ + "15b-starcoder2-q6_K", + "13\u202fGB" + ], + [ + "15b-starcoder2-q8_0", + "17\u202fGB" + ] + ], + "image": false, + "author": "Cognitive Computations" + }, + "sqlcoder": { + "url": "https://ollama.com/library/sqlcoder", + "description": "SQLCoder is a code completion model fined-tuned on StarCoder for SQL generation tasks", + "tags": [ + [ + "latest", + "4.1\u202fGB" + ], + [ + "7b", + "4.1\u202fGB" + ], + [ + "15b", + "9.0\u202fGB" + ], + [ + "7b-fp16", + "14\u202fGB" + ], + [ + "7b-q2_K", + "3.1\u202fGB" + ], + [ + "7b-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-q4_0", + "4.1\u202fGB" + ], + [ + "7b-q4_1", + "4.6\u202fGB" + ], + [ + "7b-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-q5_0", + "5.0\u202fGB" + ], + [ + "7b-q5_1", + "5.4\u202fGB" + ], + [ + "7b-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-q6_K", + "5.9\u202fGB" + ], + [ + "7b-q8_0", + "7.7\u202fGB" + ], + [ + "15b-fp16", + "32\u202fGB" + ], + [ + "15b-q2_K", + "6.7\u202fGB" + ], + [ + "15b-q3_K_S", + "6.9\u202fGB" + ], + [ + "15b-q3_K_M", + "8.2\u202fGB" + ], + [ + "15b-q3_K_L", + "9.1\u202fGB" + ], + [ + "15b-q4_0", + "9.0\u202fGB" + ], + [ + "15b-q4_1", + "10.0\u202fGB" + ], + [ + "15b-q4_K_S", + "9.1\u202fGB" + ], + [ + "15b-q4_K_M", + "10.0\u202fGB" + ], + [ + "15b-q5_0", + "11\u202fGB" + ], + [ + "15b-q5_1", + "12\u202fGB" + ], + [ + "15b-q5_K_S", + "11\u202fGB" + ], + [ + "15b-q5_K_M", + "12\u202fGB" + ], + [ + "15b-q6_K", + "13\u202fGB" + ], + [ + "15b-q8_0", + "17\u202fGB" + ], + [ + "70b-alpha-fp16", + "138\u202fGB" + ], + [ + "70b-alpha-q2_K", + "25\u202fGB" + ], + [ + "70b-alpha-q3_K_S", + "30\u202fGB" + ], + [ + "70b-alpha-q3_K_M", + "33\u202fGB" + ], + [ + "70b-alpha-q3_K_L", + "36\u202fGB" + ], + [ + "70b-alpha-q4_0", + "39\u202fGB" + ], + [ + "70b-alpha-q4_1", + "43\u202fGB" + ], + [ + "70b-alpha-q4_K_S", + "39\u202fGB" + ], + [ + "70b-alpha-q4_K_M", + "41\u202fGB" + ], + [ + "70b-alpha-q5_0", + "47\u202fGB" + ], + [ + "70b-alpha-q5_1", + "52\u202fGB" + ], + [ + "70b-alpha-q5_K_S", + "47\u202fGB" + ], + [ + "70b-alpha-q5_K_M", + "49\u202fGB" + ], + [ + "70b-alpha-q6_K", + "57\u202fGB" + ], + [ + "70b-alpha-q8_0", + "73\u202fGB" + ] + ], + "image": false, + "author": "Defog.ai" + }, "xwinlm": { "url": "https://ollama.com/library/xwinlm", "description": "Conversational model based on Llama 2 that performs competitively on various benchmarks.", "tags": [ [ "latest", - "3.8 GB" - ], - [ - "13b", - "7.4 GB" + "3.8\u202fGB" ], [ "7b", - "3.8 GB" + "3.8\u202fGB" ], [ - "70b-v0.1", - "39 GB" - ], - [ - "70b-v0.1-fp16", - "138 GB" - ], - [ - "70b-v0.1-q2_K", - "29 GB" - ], - [ - "70b-v0.1-q3_K_L", - "36 GB" - ], - [ - "70b-v0.1-q3_K_M", - "33 GB" - ], - [ - "70b-v0.1-q3_K_S", - "30 GB" - ], - [ - "70b-v0.1-q4_0", - "39 GB" - ], - [ - "70b-v0.1-q4_1", - "43 GB" - ], - [ - "70b-v0.1-q4_K_M", - "41 GB" - ], - [ - "70b-v0.1-q4_K_S", - "39 GB" - ], - [ - "70b-v0.1-q5_0", - "47 GB" - ], - [ - "70b-v0.1-q5_1", - "52 GB" - ], - [ - "70b-v0.1-q5_K_S", - "47 GB" - ], - [ - "70b-v0.1-q6_K", - "57 GB" - ], - [ - "70b-v0.1-q8_0", - "73 GB" - ], - [ - "13b-v0.1", - "7.4 GB" - ], - [ - "13b-v0.1-fp16", - "26 GB" - ], - [ - "13b-v0.1-q2_K", - "5.4 GB" - ], - [ - "13b-v0.1-q3_K_L", - "6.9 GB" - ], - [ - "13b-v0.1-q3_K_M", - "6.3 GB" - ], - [ - "13b-v0.1-q3_K_S", - "5.7 GB" - ], - [ - "13b-v0.1-q4_0", - "7.4 GB" - ], - [ - "13b-v0.1-q4_1", - "8.2 GB" - ], - [ - "13b-v0.1-q4_K_M", - "7.9 GB" - ], - [ - "13b-v0.1-q4_K_S", - "7.4 GB" - ], - [ - "13b-v0.1-q5_0", - "9.0 GB" - ], - [ - "13b-v0.1-q5_1", - "9.8 GB" - ], - [ - "13b-v0.1-q5_K_M", - "9.2 GB" - ], - [ - "13b-v0.1-q5_K_S", - "9.0 GB" - ], - [ - "13b-v0.1-q6_K", - "11 GB" - ], - [ - "13b-v0.1-q8_0", - "14 GB" - ], - [ - "13b-v0.2", - "7.4 GB" - ], - [ - "13b-v0.2-fp16", - "26 GB" - ], - [ - "13b-v0.2-q2_K", - "5.4 GB" - ], - [ - "13b-v0.2-q3_K_L", - "6.9 GB" - ], - [ - "13b-v0.2-q3_K_M", - "6.3 GB" - ], - [ - "13b-v0.2-q3_K_S", - "5.7 GB" - ], - [ - "13b-v0.2-q4_0", - "7.4 GB" - ], - [ - "13b-v0.2-q4_1", - "8.2 GB" - ], - [ - "13b-v0.2-q4_K_M", - "7.9 GB" - ], - [ - "13b-v0.2-q4_K_S", - "7.4 GB" - ], - [ - "13b-v0.2-q5_0", - "9.0 GB" - ], - [ - "13b-v0.2-q5_1", - "9.8 GB" - ], - [ - "13b-v0.2-q5_K_M", - "9.2 GB" - ], - [ - "13b-v0.2-q5_K_S", - "9.0 GB" - ], - [ - "13b-v0.2-q6_K", - "11 GB" - ], - [ - "13b-v0.2-q8_0", - "14 GB" + "13b", + "7.4\u202fGB" ], [ "7b-v0.1", - "3.8 GB" - ], - [ - "7b-v0.1-fp16", - "13 GB" - ], - [ - "7b-v0.1-q2_K", - "2.8 GB" - ], - [ - "7b-v0.1-q3_K_L", - "3.6 GB" - ], - [ - "7b-v0.1-q3_K_M", - "3.3 GB" - ], - [ - "7b-v0.1-q3_K_S", - "2.9 GB" - ], - [ - "7b-v0.1-q4_0", - "3.8 GB" - ], - [ - "7b-v0.1-q4_1", - "4.2 GB" - ], - [ - "7b-v0.1-q4_K_M", - "4.1 GB" - ], - [ - "7b-v0.1-q4_K_S", - "3.9 GB" - ], - [ - "7b-v0.1-q5_0", - "4.7 GB" - ], - [ - "7b-v0.1-q5_1", - "5.1 GB" - ], - [ - "7b-v0.1-q5_K_M", - "4.8 GB" - ], - [ - "7b-v0.1-q5_K_S", - "4.7 GB" - ], - [ - "7b-v0.1-q6_K", - "5.5 GB" - ], - [ - "7b-v0.1-q8_0", - "7.2 GB" + "3.8\u202fGB" ], [ "7b-v0.2", - "3.8 GB" + "3.8\u202fGB" + ], + [ + "13b-v0.1", + "7.4\u202fGB" + ], + [ + "13b-v0.2", + "7.4\u202fGB" + ], + [ + "70b-v0.1", + "39\u202fGB" + ], + [ + "7b-v0.1-fp16", + "13\u202fGB" + ], + [ + "7b-v0.1-q2_K", + "2.8\u202fGB" + ], + [ + "7b-v0.1-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-v0.1-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-v0.1-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-v0.1-q4_0", + "3.8\u202fGB" + ], + [ + "7b-v0.1-q4_1", + "4.2\u202fGB" + ], + [ + "7b-v0.1-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-v0.1-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-v0.1-q5_0", + "4.7\u202fGB" + ], + [ + "7b-v0.1-q5_1", + "5.1\u202fGB" + ], + [ + "7b-v0.1-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-v0.1-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-v0.1-q6_K", + "5.5\u202fGB" + ], + [ + "7b-v0.1-q8_0", + "7.2\u202fGB" ], [ "7b-v0.2-fp16", - "13 GB" + "13\u202fGB" ], [ "7b-v0.2-q2_K", - "2.8 GB" - ], - [ - "7b-v0.2-q3_K_L", - "3.6 GB" + "2.8\u202fGB" ], [ "7b-v0.2-q3_K_S", - "2.9 GB" + "2.9\u202fGB" + ], + [ + "7b-v0.2-q3_K_L", + "3.6\u202fGB" ], [ "7b-v0.2-q4_0", - "3.8 GB" + "3.8\u202fGB" ], [ "7b-v0.2-q4_1", - "4.2 GB" - ], - [ - "7b-v0.2-q4_K_M", - "4.1 GB" + "4.2\u202fGB" ], [ "7b-v0.2-q4_K_S", - "3.9 GB" + "3.9\u202fGB" + ], + [ + "7b-v0.2-q4_K_M", + "4.1\u202fGB" ], [ "7b-v0.2-q5_0", - "4.7 GB" - ], - [ - "7b-v0.2-q5_K_M", - "4.8 GB" + "4.7\u202fGB" ], [ "7b-v0.2-q5_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "7b-v0.2-q5_K_M", + "4.8\u202fGB" ], [ "7b-v0.2-q6_K", - "5.5 GB" + "5.5\u202fGB" ], [ "7b-v0.2-q8_0", - "7.2 GB" + "7.2\u202fGB" + ], + [ + "13b-v0.1-fp16", + "26\u202fGB" + ], + [ + "13b-v0.1-q2_K", + "5.4\u202fGB" + ], + [ + "13b-v0.1-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-v0.1-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-v0.1-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-v0.1-q4_0", + "7.4\u202fGB" + ], + [ + "13b-v0.1-q4_1", + "8.2\u202fGB" + ], + [ + "13b-v0.1-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-v0.1-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-v0.1-q5_0", + "9.0\u202fGB" + ], + [ + "13b-v0.1-q5_1", + "9.8\u202fGB" + ], + [ + "13b-v0.1-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-v0.1-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-v0.1-q6_K", + "11\u202fGB" + ], + [ + "13b-v0.1-q8_0", + "14\u202fGB" + ], + [ + "13b-v0.2-fp16", + "26\u202fGB" + ], + [ + "13b-v0.2-q2_K", + "5.4\u202fGB" + ], + [ + "13b-v0.2-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-v0.2-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-v0.2-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-v0.2-q4_0", + "7.4\u202fGB" + ], + [ + "13b-v0.2-q4_1", + "8.2\u202fGB" + ], + [ + "13b-v0.2-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-v0.2-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-v0.2-q5_0", + "9.0\u202fGB" + ], + [ + "13b-v0.2-q5_1", + "9.8\u202fGB" + ], + [ + "13b-v0.2-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-v0.2-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-v0.2-q6_K", + "11\u202fGB" + ], + [ + "13b-v0.2-q8_0", + "14\u202fGB" + ], + [ + "70b-v0.1-fp16", + "138\u202fGB" + ], + [ + "70b-v0.1-q2_K", + "29\u202fGB" + ], + [ + "70b-v0.1-q3_K_S", + "30\u202fGB" + ], + [ + "70b-v0.1-q3_K_M", + "33\u202fGB" + ], + [ + "70b-v0.1-q3_K_L", + "36\u202fGB" + ], + [ + "70b-v0.1-q4_0", + "39\u202fGB" + ], + [ + "70b-v0.1-q4_1", + "43\u202fGB" + ], + [ + "70b-v0.1-q4_K_S", + "39\u202fGB" + ], + [ + "70b-v0.1-q4_K_M", + "41\u202fGB" + ], + [ + "70b-v0.1-q5_0", + "47\u202fGB" + ], + [ + "70b-v0.1-q5_1", + "52\u202fGB" + ], + [ + "70b-v0.1-q5_K_S", + "47\u202fGB" + ], + [ + "70b-v0.1-q6_K", + "57\u202fGB" + ], + [ + "70b-v0.1-q8_0", + "73\u202fGB" ] ], "image": false, @@ -15837,983 +15957,1559 @@ "tags": [ [ "latest", - "4.0 GB" - ], - [ - "67b", - "38 GB" + "4.0\u202fGB" ], [ "7b", - "4.0 GB" + "4.0\u202fGB" ], [ - "67b-base", - "38 GB" - ], - [ - "67b-base-fp16", - "135 GB" - ], - [ - "67b-base-q2_K", - "28 GB" - ], - [ - "67b-base-q3_K_L", - "36 GB" - ], - [ - "67b-base-q3_K_M", - "33 GB" - ], - [ - "67b-base-q3_K_S", - "29 GB" - ], - [ - "67b-base-q4_0", - "38 GB" - ], - [ - "67b-base-q4_1", - "42 GB" - ], - [ - "67b-base-q4_K_M", - "40 GB" - ], - [ - "67b-base-q4_K_S", - "38 GB" - ], - [ - "67b-base-q5_0", - "46 GB" - ], - [ - "67b-base-q5_1", - "51 GB" - ], - [ - "67b-base-q5_K_M", - "48 GB" - ], - [ - "67b-base-q5_K_S", - "46 GB" - ], - [ - "67b-base-q6_K", - "55 GB" - ], - [ - "67b-base-q8_0", - "72 GB" - ], - [ - "67b-chat", - "38 GB" - ], - [ - "67b-chat-fp16", - "135 GB" - ], - [ - "67b-chat-q2_K", - "28 GB" - ], - [ - "67b-chat-q3_K_L", - "36 GB" - ], - [ - "67b-chat-q3_K_M", - "33 GB" - ], - [ - "67b-chat-q3_K_S", - "29 GB" - ], - [ - "67b-chat-q4_0", - "38 GB" - ], - [ - "67b-chat-q4_1", - "42 GB" - ], - [ - "67b-chat-q4_K_M", - "40 GB" - ], - [ - "67b-chat-q4_K_S", - "38 GB" - ], - [ - "67b-chat-q5_0", - "46 GB" - ], - [ - "67b-chat-q5_1", - "51 GB" - ], - [ - "67b-chat-q5_K_S", - "46 GB" + "67b", + "38\u202fGB" ], [ "7b-base", - "4.0 GB" - ], - [ - "7b-base-fp16", - "14 GB" - ], - [ - "7b-base-q2_K", - "3.0 GB" - ], - [ - "7b-base-q3_K_L", - "3.7 GB" - ], - [ - "7b-base-q3_K_M", - "3.5 GB" - ], - [ - "7b-base-q3_K_S", - "3.1 GB" - ], - [ - "7b-base-q4_0", - "4.0 GB" - ], - [ - "7b-base-q4_1", - "4.4 GB" - ], - [ - "7b-base-q4_K_M", - "4.2 GB" - ], - [ - "7b-base-q4_K_S", - "4.0 GB" - ], - [ - "7b-base-q5_0", - "4.8 GB" - ], - [ - "7b-base-q5_1", - "5.2 GB" - ], - [ - "7b-base-q5_K_M", - "4.9 GB" - ], - [ - "7b-base-q5_K_S", - "4.8 GB" - ], - [ - "7b-base-q6_K", - "5.7 GB" - ], - [ - "7b-base-q8_0", - "7.3 GB" + "4.0\u202fGB" ], [ "7b-chat", - "4.0 GB" + "4.0\u202fGB" + ], + [ + "67b-base", + "38\u202fGB" + ], + [ + "67b-chat", + "38\u202fGB" + ], + [ + "7b-base-fp16", + "14\u202fGB" + ], + [ + "7b-base-q2_K", + "3.0\u202fGB" + ], + [ + "7b-base-q3_K_S", + "3.1\u202fGB" + ], + [ + "7b-base-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-base-q3_K_L", + "3.7\u202fGB" + ], + [ + "7b-base-q4_0", + "4.0\u202fGB" + ], + [ + "7b-base-q4_1", + "4.4\u202fGB" + ], + [ + "7b-base-q4_K_S", + "4.0\u202fGB" + ], + [ + "7b-base-q4_K_M", + "4.2\u202fGB" + ], + [ + "7b-base-q5_0", + "4.8\u202fGB" + ], + [ + "7b-base-q5_1", + "5.2\u202fGB" + ], + [ + "7b-base-q5_K_S", + "4.8\u202fGB" + ], + [ + "7b-base-q5_K_M", + "4.9\u202fGB" + ], + [ + "7b-base-q6_K", + "5.7\u202fGB" + ], + [ + "7b-base-q8_0", + "7.3\u202fGB" ], [ "7b-chat-fp16", - "14 GB" + "14\u202fGB" ], [ "7b-chat-q2_K", - "3.0 GB" - ], - [ - "7b-chat-q3_K_L", - "3.7 GB" - ], - [ - "7b-chat-q3_K_M", - "3.5 GB" + "3.0\u202fGB" ], [ "7b-chat-q3_K_S", - "3.1 GB" + "3.1\u202fGB" + ], + [ + "7b-chat-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-chat-q3_K_L", + "3.7\u202fGB" ], [ "7b-chat-q4_0", - "4.0 GB" + "4.0\u202fGB" ], [ "7b-chat-q4_1", - "4.4 GB" - ], - [ - "7b-chat-q4_K_M", - "4.2 GB" + "4.4\u202fGB" ], [ "7b-chat-q4_K_S", - "4.0 GB" + "4.0\u202fGB" + ], + [ + "7b-chat-q4_K_M", + "4.2\u202fGB" ], [ "7b-chat-q5_0", - "4.8 GB" + "4.8\u202fGB" ], [ "7b-chat-q5_1", - "5.2 GB" - ], - [ - "7b-chat-q5_K_M", - "4.9 GB" + "5.2\u202fGB" ], [ "7b-chat-q5_K_S", - "4.8 GB" + "4.8\u202fGB" + ], + [ + "7b-chat-q5_K_M", + "4.9\u202fGB" ], [ "7b-chat-q6_K", - "5.7 GB" + "5.7\u202fGB" ], [ "7b-chat-q8_0", - "7.3 GB" + "7.3\u202fGB" + ], + [ + "67b-base-fp16", + "135\u202fGB" + ], + [ + "67b-base-q2_K", + "28\u202fGB" + ], + [ + "67b-base-q3_K_S", + "29\u202fGB" + ], + [ + "67b-base-q3_K_M", + "33\u202fGB" + ], + [ + "67b-base-q3_K_L", + "36\u202fGB" + ], + [ + "67b-base-q4_0", + "38\u202fGB" + ], + [ + "67b-base-q4_1", + "42\u202fGB" + ], + [ + "67b-base-q4_K_S", + "38\u202fGB" + ], + [ + "67b-base-q4_K_M", + "40\u202fGB" + ], + [ + "67b-base-q5_0", + "46\u202fGB" + ], + [ + "67b-base-q5_1", + "51\u202fGB" + ], + [ + "67b-base-q5_K_S", + "46\u202fGB" + ], + [ + "67b-base-q5_K_M", + "48\u202fGB" + ], + [ + "67b-base-q6_K", + "55\u202fGB" + ], + [ + "67b-base-q8_0", + "72\u202fGB" + ], + [ + "67b-chat-fp16", + "135\u202fGB" + ], + [ + "67b-chat-q2_K", + "28\u202fGB" + ], + [ + "67b-chat-q3_K_S", + "29\u202fGB" + ], + [ + "67b-chat-q3_K_M", + "33\u202fGB" + ], + [ + "67b-chat-q3_K_L", + "36\u202fGB" + ], + [ + "67b-chat-q4_0", + "38\u202fGB" + ], + [ + "67b-chat-q4_1", + "42\u202fGB" + ], + [ + "67b-chat-q4_K_S", + "38\u202fGB" + ], + [ + "67b-chat-q4_K_M", + "40\u202fGB" + ], + [ + "67b-chat-q5_0", + "46\u202fGB" + ], + [ + "67b-chat-q5_1", + "51\u202fGB" + ], + [ + "67b-chat-q5_K_S", + "46\u202fGB" ] ], "image": false, "author": "DeepSeek Team" }, + "yarn-llama2": { + "url": "https://ollama.com/library/yarn-llama2", + "description": "An extension of Llama 2 that supports a context of up to 128k tokens.", + "tags": [ + [ + "latest", + "3.8\u202fGB" + ], + [ + "7b", + "3.8\u202fGB" + ], + [ + "13b", + "7.4\u202fGB" + ], + [ + "7b-128k", + "3.8\u202fGB" + ], + [ + "7b-64k", + "3.8\u202fGB" + ], + [ + "13b-128k", + "7.4\u202fGB" + ], + [ + "13b-64k", + "7.4\u202fGB" + ], + [ + "7b-128k-fp16", + "13\u202fGB" + ], + [ + "7b-128k-q2_K", + "2.8\u202fGB" + ], + [ + "7b-128k-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-128k-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-128k-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-128k-q4_0", + "3.8\u202fGB" + ], + [ + "7b-128k-q4_1", + "4.2\u202fGB" + ], + [ + "7b-128k-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-128k-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-128k-q5_0", + "4.7\u202fGB" + ], + [ + "7b-128k-q5_1", + "5.1\u202fGB" + ], + [ + "7b-128k-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-128k-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-128k-q6_K", + "5.5\u202fGB" + ], + [ + "7b-128k-q8_0", + "7.2\u202fGB" + ], + [ + "7b-64k-fp16", + "13\u202fGB" + ], + [ + "7b-64k-q2_K", + "2.8\u202fGB" + ], + [ + "7b-64k-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-64k-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-64k-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-64k-q4_0", + "3.8\u202fGB" + ], + [ + "7b-64k-q4_1", + "4.2\u202fGB" + ], + [ + "7b-64k-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-64k-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-64k-q5_0", + "4.7\u202fGB" + ], + [ + "7b-64k-q5_1", + "5.1\u202fGB" + ], + [ + "7b-64k-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-64k-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-64k-q6_K", + "5.5\u202fGB" + ], + [ + "7b-64k-q8_0", + "7.2\u202fGB" + ], + [ + "13b-128k-fp16", + "26\u202fGB" + ], + [ + "13b-128k-q2_K", + "5.4\u202fGB" + ], + [ + "13b-128k-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-128k-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-128k-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-128k-q4_0", + "7.4\u202fGB" + ], + [ + "13b-128k-q4_1", + "8.2\u202fGB" + ], + [ + "13b-128k-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-128k-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-128k-q5_0", + "9.0\u202fGB" + ], + [ + "13b-128k-q5_1", + "9.8\u202fGB" + ], + [ + "13b-128k-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-128k-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-128k-q6_K", + "11\u202fGB" + ], + [ + "13b-128k-q8_0", + "14\u202fGB" + ], + [ + "13b-64k-fp16", + "26\u202fGB" + ], + [ + "13b-64k-q2_K", + "5.4\u202fGB" + ], + [ + "13b-64k-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-64k-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-64k-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-64k-q4_0", + "7.4\u202fGB" + ], + [ + "13b-64k-q4_1", + "8.2\u202fGB" + ], + [ + "13b-64k-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-64k-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-64k-q5_0", + "9.0\u202fGB" + ], + [ + "13b-64k-q5_1", + "9.8\u202fGB" + ], + [ + "13b-64k-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-64k-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-64k-q6_K", + "11\u202fGB" + ], + [ + "13b-64k-q8_0", + "14\u202fGB" + ] + ], + "image": false, + "author": "Nous Research" + }, "llama3-chatqa": { "url": "https://ollama.com/library/llama3-chatqa", "description": "A model from NVIDIA based on Llama 3 that excels at conversational question answering (QA) and retrieval-augmented generation (RAG).", "tags": [ [ "latest", - "4.7 GB" - ], - [ - "70b", - "40 GB" + "4.7\u202fGB" ], [ "8b", - "4.7 GB" + "4.7\u202fGB" ], [ - "70b-v1.5", - "40 GB" - ], - [ - "70b-v1.5-fp16", - "141 GB" - ], - [ - "70b-v1.5-q2_K", - "26 GB" - ], - [ - "70b-v1.5-q3_K_L", - "37 GB" - ], - [ - "70b-v1.5-q3_K_M", - "34 GB" - ], - [ - "70b-v1.5-q3_K_S", - "31 GB" - ], - [ - "70b-v1.5-q4_0", - "40 GB" - ], - [ - "70b-v1.5-q4_1", - "44 GB" - ], - [ - "70b-v1.5-q4_K_M", - "43 GB" - ], - [ - "70b-v1.5-q4_K_S", - "40 GB" - ], - [ - "70b-v1.5-q5_0", - "49 GB" - ], - [ - "70b-v1.5-q5_1", - "53 GB" - ], - [ - "70b-v1.5-q5_K_M", - "50 GB" - ], - [ - "70b-v1.5-q5_K_S", - "49 GB" - ], - [ - "70b-v1.5-q6_K", - "58 GB" - ], - [ - "70b-v1.5-q8_0", - "75 GB" + "70b", + "40\u202fGB" ], [ "8b-v1.5", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "70b-v1.5", + "40\u202fGB" ], [ "8b-v1.5-fp16", - "16 GB" + "16\u202fGB" ], [ "8b-v1.5-q2_K", - "3.2 GB" - ], - [ - "8b-v1.5-q3_K_L", - "4.3 GB" - ], - [ - "8b-v1.5-q3_K_M", - "4.0 GB" + "3.2\u202fGB" ], [ "8b-v1.5-q3_K_S", - "3.7 GB" + "3.7\u202fGB" + ], + [ + "8b-v1.5-q3_K_M", + "4.0\u202fGB" + ], + [ + "8b-v1.5-q3_K_L", + "4.3\u202fGB" ], [ "8b-v1.5-q4_0", - "4.7 GB" + "4.7\u202fGB" ], [ "8b-v1.5-q4_1", - "5.1 GB" - ], - [ - "8b-v1.5-q4_K_M", - "4.9 GB" + "5.1\u202fGB" ], [ "8b-v1.5-q4_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "8b-v1.5-q4_K_M", + "4.9\u202fGB" ], [ "8b-v1.5-q5_0", - "5.6 GB" + "5.6\u202fGB" ], [ "8b-v1.5-q5_1", - "6.1 GB" - ], - [ - "8b-v1.5-q5_K_M", - "5.7 GB" + "6.1\u202fGB" ], [ "8b-v1.5-q5_K_S", - "5.6 GB" + "5.6\u202fGB" + ], + [ + "8b-v1.5-q5_K_M", + "5.7\u202fGB" ], [ "8b-v1.5-q6_K", - "6.6 GB" + "6.6\u202fGB" ], [ "8b-v1.5-q8_0", - "8.5 GB" + "8.5\u202fGB" + ], + [ + "70b-v1.5-fp16", + "141\u202fGB" + ], + [ + "70b-v1.5-q2_K", + "26\u202fGB" + ], + [ + "70b-v1.5-q3_K_S", + "31\u202fGB" + ], + [ + "70b-v1.5-q3_K_M", + "34\u202fGB" + ], + [ + "70b-v1.5-q3_K_L", + "37\u202fGB" + ], + [ + "70b-v1.5-q4_0", + "40\u202fGB" + ], + [ + "70b-v1.5-q4_1", + "44\u202fGB" + ], + [ + "70b-v1.5-q4_K_S", + "40\u202fGB" + ], + [ + "70b-v1.5-q4_K_M", + "43\u202fGB" + ], + [ + "70b-v1.5-q5_0", + "49\u202fGB" + ], + [ + "70b-v1.5-q5_1", + "53\u202fGB" + ], + [ + "70b-v1.5-q5_K_S", + "49\u202fGB" + ], + [ + "70b-v1.5-q5_K_M", + "50\u202fGB" + ], + [ + "70b-v1.5-q6_K", + "58\u202fGB" + ], + [ + "70b-v1.5-q8_0", + "75\u202fGB" ] ], "image": false, "author": "Nvidia" }, + "wizardlm": { + "url": "https://ollama.com/library/wizardlm", + "description": "General use model based on Llama 2.", + "tags": [ + [ + "7b-fp16", + "13\u202fGB" + ], + [ + "7b-q2_K", + "2.8\u202fGB" + ], + [ + "7b-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-q4_0", + "3.8\u202fGB" + ], + [ + "7b-q4_1", + "4.2\u202fGB" + ], + [ + "7b-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-q5_0", + "4.7\u202fGB" + ], + [ + "7b-q5_1", + "5.1\u202fGB" + ], + [ + "7b-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-q6_K", + "5.5\u202fGB" + ], + [ + "7b-q8_0", + "7.2\u202fGB" + ], + [ + "13b-fp16", + "26\u202fGB" + ], + [ + "13b-q2_K", + "5.4\u202fGB" + ], + [ + "13b-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-q4_0", + "7.4\u202fGB" + ], + [ + "13b-q4_1", + "8.2\u202fGB" + ], + [ + "13b-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-q5_0", + "9.0\u202fGB" + ], + [ + "13b-q5_1", + "9.8\u202fGB" + ], + [ + "13b-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-q6_K", + "11\u202fGB" + ], + [ + "13b-q8_0", + "14\u202fGB" + ], + [ + "30b-fp16", + "65\u202fGB" + ], + [ + "30b-q2_K", + "14\u202fGB" + ], + [ + "30b-q3_K_S", + "14\u202fGB" + ], + [ + "30b-q3_K_M", + "16\u202fGB" + ], + [ + "30b-q3_K_L", + "17\u202fGB" + ], + [ + "30b-q4_0", + "18\u202fGB" + ], + [ + "30b-q4_1", + "20\u202fGB" + ], + [ + "30b-q4_K_S", + "18\u202fGB" + ], + [ + "30b-q4_K_M", + "20\u202fGB" + ], + [ + "30b-q5_0", + "22\u202fGB" + ], + [ + "30b-q5_1", + "24\u202fGB" + ], + [ + "30b-q5_K_S", + "22\u202fGB" + ], + [ + "30b-q5_K_M", + "23\u202fGB" + ], + [ + "30b-q6_K", + "27\u202fGB" + ], + [ + "30b-q8_0", + "35\u202fGB" + ], + [ + "13b-llama2-fp16", + "26\u202fGB" + ], + [ + "13b-llama2-q2_K", + "5.4\u202fGB" + ], + [ + "13b-llama2-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-llama2-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-llama2-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-llama2-q4_0", + "7.4\u202fGB" + ], + [ + "13b-llama2-q4_1", + "8.2\u202fGB" + ], + [ + "13b-llama2-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-llama2-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-llama2-q5_0", + "9.0\u202fGB" + ], + [ + "13b-llama2-q5_1", + "9.8\u202fGB" + ], + [ + "13b-llama2-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-llama2-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-llama2-q6_K", + "11\u202fGB" + ], + [ + "13b-llama2-q8_0", + "14\u202fGB" + ], + [ + "70b-llama2-q2_K", + "29\u202fGB" + ], + [ + "70b-llama2-q3_K_S", + "30\u202fGB" + ], + [ + "70b-llama2-q3_K_M", + "33\u202fGB" + ], + [ + "70b-llama2-q3_K_L", + "36\u202fGB" + ], + [ + "70b-llama2-q4_0", + "39\u202fGB" + ], + [ + "70b-llama2-q4_1", + "43\u202fGB" + ], + [ + "70b-llama2-q4_K_S", + "39\u202fGB" + ], + [ + "70b-llama2-q4_K_M", + "41\u202fGB" + ], + [ + "70b-llama2-q5_0", + "47\u202fGB" + ], + [ + "70b-llama2-q5_K_S", + "47\u202fGB" + ], + [ + "70b-llama2-q5_K_M", + "49\u202fGB" + ], + [ + "70b-llama2-q6_K", + "57\u202fGB" + ], + [ + "70b-llama2-q8_0", + "73\u202fGB" + ] + ], + "image": false, + "author": "WizardLM Team" + }, + "starling-lm": { + "url": "https://ollama.com/library/starling-lm", + "description": "Starling is a large language model trained by reinforcement learning from AI feedback focused on improving chatbot helpfulness.", + "tags": [ + [ + "latest", + "4.1\u202fGB" + ], + [ + "7b", + "4.1\u202fGB" + ], + [ + "alpha", + "4.1\u202fGB" + ], + [ + "beta", + "4.1\u202fGB" + ], + [ + "7b-alpha", + "4.1\u202fGB" + ], + [ + "7b-beta", + "4.1\u202fGB" + ], + [ + "7b-alpha-fp16", + "14\u202fGB" + ], + [ + "7b-alpha-q2_K", + "2.7\u202fGB" + ], + [ + "7b-alpha-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-alpha-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-alpha-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-alpha-q4_0", + "4.1\u202fGB" + ], + [ + "7b-alpha-q4_1", + "4.6\u202fGB" + ], + [ + "7b-alpha-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-alpha-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-alpha-q5_0", + "5.0\u202fGB" + ], + [ + "7b-alpha-q5_1", + "5.4\u202fGB" + ], + [ + "7b-alpha-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-alpha-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-alpha-q6_K", + "5.9\u202fGB" + ], + [ + "7b-alpha-q8_0", + "7.7\u202fGB" + ], + [ + "7b-beta-fp16", + "14\u202fGB" + ], + [ + "7b-beta-q2_K", + "2.7\u202fGB" + ], + [ + "7b-beta-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-beta-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-beta-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-beta-q4_0", + "4.1\u202fGB" + ], + [ + "7b-beta-q4_1", + "4.6\u202fGB" + ], + [ + "7b-beta-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-beta-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-beta-q5_0", + "5.0\u202fGB" + ], + [ + "7b-beta-q5_1", + "5.4\u202fGB" + ], + [ + "7b-beta-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-beta-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-beta-q6_K", + "5.9\u202fGB" + ], + [ + "7b-beta-q8_0", + "7.7\u202fGB" + ] + ], + "image": false, + "author": "Berkeley Nest" + }, + "codegeex4": { + "url": "https://ollama.com/library/codegeex4", + "description": "A versatile model for AI software development scenarios, including code completion.", + "tags": [ + [ + "latest", + "5.5\u202fGB" + ], + [ + "9b", + "5.5\u202fGB" + ], + [ + "9b-all-fp16", + "19\u202fGB" + ], + [ + "9b-all-q2_K", + "4.0\u202fGB" + ], + [ + "9b-all-q3_K_S", + "4.6\u202fGB" + ], + [ + "9b-all-q3_K_M", + "5.1\u202fGB" + ], + [ + "9b-all-q3_K_L", + "5.3\u202fGB" + ], + [ + "9b-all-q4_0", + "5.5\u202fGB" + ], + [ + "9b-all-q4_1", + "6.0\u202fGB" + ], + [ + "9b-all-q4_K_S", + "5.8\u202fGB" + ], + [ + "9b-all-q4_K_M", + "6.3\u202fGB" + ], + [ + "9b-all-q5_0", + "6.6\u202fGB" + ], + [ + "9b-all-q5_1", + "7.1\u202fGB" + ], + [ + "9b-all-q5_K_S", + "6.7\u202fGB" + ], + [ + "9b-all-q5_K_M", + "7.1\u202fGB" + ], + [ + "9b-all-q6_K", + "8.3\u202fGB" + ], + [ + "9b-all-q8_0", + "10.0\u202fGB" + ] + ], + "image": false, + "author": "THUDM" + }, + "snowflake-arctic-embed": { + "url": "https://ollama.com/library/snowflake-arctic-embed", + "description": "A suite of text embedding models by Snowflake, optimized for performance.", + "tags": [ + [ + "latest", + "669\u202fMB" + ], + [ + "22m", + "46\u202fMB" + ], + [ + "33m", + "67\u202fMB" + ], + [ + "110m", + "219\u202fMB" + ], + [ + "137m", + "274\u202fMB" + ], + [ + "335m", + "669\u202fMB" + ], + [ + "l", + "669\u202fMB" + ], + [ + "m", + "219\u202fMB" + ], + [ + "m-long", + "274\u202fMB" + ], + [ + "s", + "67\u202fMB" + ], + [ + "xs", + "46\u202fMB" + ], + [ + "22m-xs-fp16", + "46\u202fMB" + ], + [ + "33m-s-fp16", + "67\u202fMB" + ], + [ + "110m-m-fp16", + "219\u202fMB" + ], + [ + "137m-m-long-fp16", + "274\u202fMB" + ], + [ + "335m-l-fp16", + "669\u202fMB" + ] + ], + "image": false, + "author": "Snowflake" + }, "orca2": { "url": "https://ollama.com/library/orca2", "description": "Orca 2 is built by Microsoft research, and are a fine-tuned version of Meta's Llama 2 models. The model is designed to excel particularly in reasoning.", "tags": [ [ "latest", - "3.8 GB" - ], - [ - "13b", - "7.4 GB" + "3.8\u202fGB" ], [ "7b", - "3.8 GB" + "3.8\u202fGB" ], [ - "13b-fp16", - "26 GB" - ], - [ - "13b-q2_K", - "5.4 GB" - ], - [ - "13b-q3_K_L", - "6.9 GB" - ], - [ - "13b-q3_K_M", - "6.3 GB" - ], - [ - "13b-q3_K_S", - "5.7 GB" - ], - [ - "13b-q4_0", - "7.4 GB" - ], - [ - "13b-q4_1", - "8.2 GB" - ], - [ - "13b-q4_K_M", - "7.9 GB" - ], - [ - "13b-q4_K_S", - "7.4 GB" - ], - [ - "13b-q5_0", - "9.0 GB" - ], - [ - "13b-q5_1", - "9.8 GB" - ], - [ - "13b-q5_K_M", - "9.2 GB" - ], - [ - "13b-q5_K_S", - "9.0 GB" - ], - [ - "13b-q6_K", - "11 GB" - ], - [ - "13b-q8_0", - "14 GB" + "13b", + "7.4\u202fGB" ], [ "7b-fp16", - "13 GB" + "13\u202fGB" ], [ "7b-q2_K", - "2.8 GB" - ], - [ - "7b-q3_K_L", - "3.6 GB" - ], - [ - "7b-q3_K_M", - "3.3 GB" + "2.8\u202fGB" ], [ "7b-q3_K_S", - "2.9 GB" + "2.9\u202fGB" + ], + [ + "7b-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-q3_K_L", + "3.6\u202fGB" ], [ "7b-q4_0", - "3.8 GB" + "3.8\u202fGB" ], [ "7b-q4_1", - "4.2 GB" - ], - [ - "7b-q4_K_M", - "4.1 GB" + "4.2\u202fGB" ], [ "7b-q4_K_S", - "3.9 GB" + "3.9\u202fGB" + ], + [ + "7b-q4_K_M", + "4.1\u202fGB" ], [ "7b-q5_0", - "4.7 GB" + "4.7\u202fGB" ], [ "7b-q5_1", - "5.1 GB" - ], - [ - "7b-q5_K_M", - "4.8 GB" + "5.1\u202fGB" ], [ "7b-q5_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "7b-q5_K_M", + "4.8\u202fGB" ], [ "7b-q6_K", - "5.5 GB" + "5.5\u202fGB" ], [ "7b-q8_0", - "7.2 GB" + "7.2\u202fGB" + ], + [ + "13b-fp16", + "26\u202fGB" + ], + [ + "13b-q2_K", + "5.4\u202fGB" + ], + [ + "13b-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-q4_0", + "7.4\u202fGB" + ], + [ + "13b-q4_1", + "8.2\u202fGB" + ], + [ + "13b-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-q5_0", + "9.0\u202fGB" + ], + [ + "13b-q5_1", + "9.8\u202fGB" + ], + [ + "13b-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-q6_K", + "11\u202fGB" + ], + [ + "13b-q8_0", + "14\u202fGB" ] ], "image": false, "author": "Microsoft Research" }, - "wizardlm": { - "url": "https://ollama.com/library/wizardlm", - "description": "General use model based on Llama 2.", - "tags": [ - [ - "70b-llama2-q2_K", - "29 GB" - ], - [ - "70b-llama2-q3_K_L", - "36 GB" - ], - [ - "70b-llama2-q3_K_M", - "33 GB" - ], - [ - "70b-llama2-q3_K_S", - "30 GB" - ], - [ - "70b-llama2-q4_0", - "39 GB" - ], - [ - "70b-llama2-q4_1", - "43 GB" - ], - [ - "70b-llama2-q4_K_M", - "41 GB" - ], - [ - "70b-llama2-q4_K_S", - "39 GB" - ], - [ - "70b-llama2-q5_0", - "47 GB" - ], - [ - "70b-llama2-q5_K_M", - "49 GB" - ], - [ - "70b-llama2-q5_K_S", - "47 GB" - ], - [ - "70b-llama2-q6_K", - "57 GB" - ], - [ - "70b-llama2-q8_0", - "73 GB" - ], - [ - "30b-fp16", - "65 GB" - ], - [ - "30b-q2_K", - "14 GB" - ], - [ - "30b-q3_K_L", - "17 GB" - ], - [ - "30b-q3_K_M", - "16 GB" - ], - [ - "30b-q3_K_S", - "14 GB" - ], - [ - "30b-q4_0", - "18 GB" - ], - [ - "30b-q4_1", - "20 GB" - ], - [ - "30b-q4_K_M", - "20 GB" - ], - [ - "30b-q4_K_S", - "18 GB" - ], - [ - "30b-q5_0", - "22 GB" - ], - [ - "30b-q5_1", - "24 GB" - ], - [ - "30b-q5_K_M", - "23 GB" - ], - [ - "30b-q5_K_S", - "22 GB" - ], - [ - "30b-q6_K", - "27 GB" - ], - [ - "30b-q8_0", - "35 GB" - ], - [ - "13b-llama2-fp16", - "26 GB" - ], - [ - "13b-llama2-q2_K", - "5.4 GB" - ], - [ - "13b-llama2-q3_K_L", - "6.9 GB" - ], - [ - "13b-llama2-q3_K_M", - "6.3 GB" - ], - [ - "13b-llama2-q3_K_S", - "5.7 GB" - ], - [ - "13b-llama2-q4_0", - "7.4 GB" - ], - [ - "13b-llama2-q4_1", - "8.2 GB" - ], - [ - "13b-llama2-q4_K_M", - "7.9 GB" - ], - [ - "13b-llama2-q4_K_S", - "7.4 GB" - ], - [ - "13b-llama2-q5_0", - "9.0 GB" - ], - [ - "13b-llama2-q5_1", - "9.8 GB" - ], - [ - "13b-llama2-q5_K_M", - "9.2 GB" - ], - [ - "13b-llama2-q5_K_S", - "9.0 GB" - ], - [ - "13b-llama2-q6_K", - "11 GB" - ], - [ - "13b-llama2-q8_0", - "14 GB" - ], - [ - "13b-fp16", - "26 GB" - ], - [ - "13b-q2_K", - "5.4 GB" - ], - [ - "13b-q3_K_L", - "6.9 GB" - ], - [ - "13b-q3_K_M", - "6.3 GB" - ], - [ - "13b-q3_K_S", - "5.7 GB" - ], - [ - "13b-q4_0", - "7.4 GB" - ], - [ - "13b-q4_1", - "8.2 GB" - ], - [ - "13b-q4_K_M", - "7.9 GB" - ], - [ - "13b-q4_K_S", - "7.4 GB" - ], - [ - "13b-q5_0", - "9.0 GB" - ], - [ - "13b-q5_1", - "9.8 GB" - ], - [ - "13b-q5_K_M", - "9.2 GB" - ], - [ - "13b-q5_K_S", - "9.0 GB" - ], - [ - "13b-q6_K", - "11 GB" - ], - [ - "13b-q8_0", - "14 GB" - ], - [ - "7b-fp16", - "13 GB" - ], - [ - "7b-q2_K", - "2.8 GB" - ], - [ - "7b-q3_K_L", - "3.6 GB" - ], - [ - "7b-q3_K_M", - "3.3 GB" - ], - [ - "7b-q3_K_S", - "2.9 GB" - ], - [ - "7b-q4_0", - "3.8 GB" - ], - [ - "7b-q4_1", - "4.2 GB" - ], - [ - "7b-q4_K_M", - "4.1 GB" - ], - [ - "7b-q4_K_S", - "3.9 GB" - ], - [ - "7b-q5_0", - "4.7 GB" - ], - [ - "7b-q5_1", - "5.1 GB" - ], - [ - "7b-q5_K_M", - "4.8 GB" - ], - [ - "7b-q5_K_S", - "4.7 GB" - ], - [ - "7b-q6_K", - "5.5 GB" - ], - [ - "7b-q8_0", - "7.2 GB" - ] - ], - "image": false, - "author": "WizardLM Team" - }, "solar": { "url": "https://ollama.com/library/solar", "description": "A compact, yet powerful 10.7B large language model designed for single-turn conversation.", "tags": [ [ "latest", - "6.1 GB" + "6.1\u202fGB" ], [ "10.7b", - "6.1 GB" + "6.1\u202fGB" ], [ "10.7b-instruct-v1-fp16", - "21 GB" + "21\u202fGB" ], [ "10.7b-instruct-v1-q2_K", - "4.5 GB" - ], - [ - "10.7b-instruct-v1-q3_K_L", - "5.7 GB" - ], - [ - "10.7b-instruct-v1-q3_K_M", - "5.2 GB" + "4.5\u202fGB" ], [ "10.7b-instruct-v1-q3_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "10.7b-instruct-v1-q3_K_M", + "5.2\u202fGB" + ], + [ + "10.7b-instruct-v1-q3_K_L", + "5.7\u202fGB" ], [ "10.7b-instruct-v1-q4_0", - "6.1 GB" + "6.1\u202fGB" ], [ "10.7b-instruct-v1-q4_1", - "6.7 GB" - ], - [ - "10.7b-instruct-v1-q4_K_M", - "6.5 GB" + "6.7\u202fGB" ], [ "10.7b-instruct-v1-q4_K_S", - "6.1 GB" + "6.1\u202fGB" + ], + [ + "10.7b-instruct-v1-q4_K_M", + "6.5\u202fGB" ], [ "10.7b-instruct-v1-q5_0", - "7.4 GB" + "7.4\u202fGB" ], [ "10.7b-instruct-v1-q5_1", - "8.1 GB" - ], - [ - "10.7b-instruct-v1-q5_K_M", - "7.6 GB" + "8.1\u202fGB" ], [ "10.7b-instruct-v1-q5_K_S", - "7.4 GB" + "7.4\u202fGB" + ], + [ + "10.7b-instruct-v1-q5_K_M", + "7.6\u202fGB" ], [ "10.7b-instruct-v1-q6_K", - "8.8 GB" + "8.8\u202fGB" ], [ "10.7b-instruct-v1-q8_0", - "11 GB" + "11\u202fGB" ], [ "10.7b-text-v1-fp16", - "21 GB" + "21\u202fGB" ], [ "10.7b-text-v1-q2_K", - "4.5 GB" - ], - [ - "10.7b-text-v1-q3_K_L", - "5.7 GB" - ], - [ - "10.7b-text-v1-q3_K_M", - "5.2 GB" + "4.5\u202fGB" ], [ "10.7b-text-v1-q3_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "10.7b-text-v1-q3_K_M", + "5.2\u202fGB" + ], + [ + "10.7b-text-v1-q3_K_L", + "5.7\u202fGB" ], [ "10.7b-text-v1-q4_0", - "6.1 GB" + "6.1\u202fGB" ], [ "10.7b-text-v1-q4_1", - "6.7 GB" - ], - [ - "10.7b-text-v1-q4_K_M", - "6.5 GB" + "6.7\u202fGB" ], [ "10.7b-text-v1-q4_K_S", - "6.1 GB" + "6.1\u202fGB" + ], + [ + "10.7b-text-v1-q4_K_M", + "6.5\u202fGB" ], [ "10.7b-text-v1-q5_0", - "7.4 GB" + "7.4\u202fGB" ], [ "10.7b-text-v1-q5_1", - "8.1 GB" - ], - [ - "10.7b-text-v1-q5_K_M", - "7.6 GB" + "8.1\u202fGB" ], [ "10.7b-text-v1-q5_K_S", - "7.4 GB" + "7.4\u202fGB" + ], + [ + "10.7b-text-v1-q5_K_M", + "7.6\u202fGB" ], [ "10.7b-text-v1-q6_K", - "8.8 GB" + "8.8\u202fGB" ], [ "10.7b-text-v1-q8_0", - "11 GB" + "11\u202fGB" ] ], "image": false, @@ -16825,783 +17521,1335 @@ "tags": [ [ "latest", - "4.1 GB" + "4.1\u202fGB" ], [ "7b", - "4.1 GB" - ], - [ - "7b-instruct-fp16", - "14 GB" - ], - [ - "7b-instruct-q2_K", - "3.1 GB" - ], - [ - "7b-instruct-q3_K_L", - "3.8 GB" - ], - [ - "7b-instruct-q3_K_M", - "3.5 GB" - ], - [ - "7b-instruct-q3_K_S", - "3.2 GB" - ], - [ - "7b-instruct-q4_0", - "4.1 GB" - ], - [ - "7b-instruct-q4_1", - "4.6 GB" - ], - [ - "7b-instruct-q4_K_M", - "4.4 GB" - ], - [ - "7b-instruct-q4_K_S", - "4.1 GB" - ], - [ - "7b-instruct-q5_0", - "5.0 GB" - ], - [ - "7b-instruct-q5_1", - "5.4 GB" - ], - [ - "7b-instruct-q5_K_M", - "5.1 GB" - ], - [ - "7b-instruct-q5_K_S", - "5.0 GB" - ], - [ - "7b-instruct-q6_K", - "5.9 GB" - ], - [ - "7b-instruct-q8_0", - "7.7 GB" + "4.1\u202fGB" ], [ "7b-text", - "4.1 GB" - ], - [ - "7b-text-fp16", - "14 GB" - ], - [ - "7b-text-q2_K", - "3.1 GB" - ], - [ - "7b-text-q3_K_L", - "3.8 GB" - ], - [ - "7b-text-q3_K_M", - "3.5 GB" - ], - [ - "7b-text-q3_K_S", - "3.2 GB" - ], - [ - "7b-text-q4_0", - "4.1 GB" - ], - [ - "7b-text-q4_1", - "4.6 GB" - ], - [ - "7b-text-q4_K_M", - "4.4 GB" - ], - [ - "7b-text-q4_K_S", - "4.1 GB" - ], - [ - "7b-text-q5_0", - "5.0 GB" - ], - [ - "7b-text-q5_1", - "5.4 GB" - ], - [ - "7b-text-q5_K_M", - "5.1 GB" - ], - [ - "7b-text-q5_K_S", - "5.0 GB" - ], - [ - "7b-text-q6_K", - "5.9 GB" - ], - [ - "7b-text-q8_0", - "7.7 GB" + "4.1\u202fGB" ], [ "7b-v1.2-text", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-instruct-fp16", + "14\u202fGB" + ], + [ + "7b-instruct-q2_K", + "3.1\u202fGB" + ], + [ + "7b-instruct-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-instruct-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-instruct-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-instruct-q4_0", + "4.1\u202fGB" + ], + [ + "7b-instruct-q4_1", + "4.6\u202fGB" + ], + [ + "7b-instruct-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-instruct-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-instruct-q5_0", + "5.0\u202fGB" + ], + [ + "7b-instruct-q5_1", + "5.4\u202fGB" + ], + [ + "7b-instruct-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-instruct-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-instruct-q6_K", + "5.9\u202fGB" + ], + [ + "7b-instruct-q8_0", + "7.7\u202fGB" + ], + [ + "7b-text-fp16", + "14\u202fGB" + ], + [ + "7b-text-q2_K", + "3.1\u202fGB" + ], + [ + "7b-text-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-text-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-text-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-text-q4_0", + "4.1\u202fGB" + ], + [ + "7b-text-q4_1", + "4.6\u202fGB" + ], + [ + "7b-text-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-text-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-text-q5_0", + "5.0\u202fGB" + ], + [ + "7b-text-q5_1", + "5.4\u202fGB" + ], + [ + "7b-text-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-text-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-text-q6_K", + "5.9\u202fGB" + ], + [ + "7b-text-q8_0", + "7.7\u202fGB" ], [ "7b-v1.2-text-fp16", - "14 GB" + "14\u202fGB" ], [ "7b-v1.2-text-q2_K", - "3.1 GB" - ], - [ - "7b-v1.2-text-q3_K_L", - "3.8 GB" - ], - [ - "7b-v1.2-text-q3_K_M", - "3.5 GB" + "3.1\u202fGB" ], [ "7b-v1.2-text-q3_K_S", - "3.2 GB" + "3.2\u202fGB" + ], + [ + "7b-v1.2-text-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v1.2-text-q3_K_L", + "3.8\u202fGB" ], [ "7b-v1.2-text-q4_0", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v1.2-text-q4_1", - "4.6 GB" - ], - [ - "7b-v1.2-text-q4_K_M", - "4.4 GB" + "4.6\u202fGB" ], [ "7b-v1.2-text-q4_K_S", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-v1.2-text-q4_K_M", + "4.4\u202fGB" ], [ "7b-v1.2-text-q5_0", - "5.0 GB" + "5.0\u202fGB" ], [ "7b-v1.2-text-q5_1", - "5.4 GB" - ], - [ - "7b-v1.2-text-q5_K_M", - "5.1 GB" + "5.4\u202fGB" ], [ "7b-v1.2-text-q5_K_S", - "5.0 GB" + "5.0\u202fGB" + ], + [ + "7b-v1.2-text-q5_K_M", + "5.1\u202fGB" ], [ "7b-v1.2-text-q6_K", - "5.9 GB" + "5.9\u202fGB" ], [ "7b-v1.2-text-q8_0", - "7.7 GB" + "7.7\u202fGB" ] ], "image": false, "author": "Eric Hartford" }, - "dolphin-phi": { - "url": "https://ollama.com/library/dolphin-phi", - "description": "2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language model by Microsoft Research.", - "tags": [ - [ - "latest", - "1.6 GB" - ], - [ - "2.7b", - "1.6 GB" - ], - [ - "2.7b-v2.6", - "1.6 GB" - ], - [ - "2.7b-v2.6-q2_K", - "1.2 GB" - ], - [ - "2.7b-v2.6-q3_K_L", - "1.6 GB" - ], - [ - "2.7b-v2.6-q3_K_M", - "1.5 GB" - ], - [ - "2.7b-v2.6-q3_K_S", - "1.3 GB" - ], - [ - "2.7b-v2.6-q4_0", - "1.6 GB" - ], - [ - "2.7b-v2.6-q4_K_M", - "1.8 GB" - ], - [ - "2.7b-v2.6-q4_K_S", - "1.6 GB" - ], - [ - "2.7b-v2.6-q5_0", - "1.9 GB" - ], - [ - "2.7b-v2.6-q5_K_M", - "2.1 GB" - ], - [ - "2.7b-v2.6-q5_K_S", - "1.9 GB" - ], - [ - "2.7b-v2.6-q6_K", - "2.3 GB" - ], - [ - "2.7b-v2.6-q8_0", - "3.0 GB" - ] - ], - "image": false, - "author": "Eric Hartford" - }, - "stable-beluga": { - "url": "https://ollama.com/library/stable-beluga", - "description": "Llama 2 based model fine tuned on an Orca-style dataset. Originally called Free Willy.", - "tags": [ - [ - "latest", - "3.8 GB" - ], - [ - "70b", - "39 GB" - ], - [ - "13b", - "7.4 GB" - ], - [ - "7b", - "3.8 GB" - ], - [ - "70b-fp16", - "138 GB" - ], - [ - "70b-q2_K", - "29 GB" - ], - [ - "70b-q3_K_L", - "36 GB" - ], - [ - "70b-q3_K_M", - "33 GB" - ], - [ - "70b-q3_K_S", - "30 GB" - ], - [ - "70b-q4_0", - "39 GB" - ], - [ - "70b-q4_1", - "43 GB" - ], - [ - "70b-q4_K_M", - "41 GB" - ], - [ - "70b-q4_K_S", - "39 GB" - ], - [ - "70b-q5_0", - "47 GB" - ], - [ - "70b-q5_1", - "52 GB" - ], - [ - "70b-q5_K_M", - "49 GB" - ], - [ - "70b-q5_K_S", - "47 GB" - ], - [ - "70b-q6_K", - "57 GB" - ], - [ - "70b-q8_0", - "73 GB" - ], - [ - "13b-fp16", - "26 GB" - ], - [ - "13b-q2_K", - "5.4 GB" - ], - [ - "13b-q3_K_L", - "6.9 GB" - ], - [ - "13b-q3_K_M", - "6.3 GB" - ], - [ - "13b-q3_K_S", - "5.7 GB" - ], - [ - "13b-q4_0", - "7.4 GB" - ], - [ - "13b-q4_1", - "8.2 GB" - ], - [ - "13b-q4_K_M", - "7.9 GB" - ], - [ - "13b-q4_K_S", - "7.4 GB" - ], - [ - "13b-q5_0", - "9.0 GB" - ], - [ - "13b-q5_1", - "9.8 GB" - ], - [ - "13b-q5_K_M", - "9.2 GB" - ], - [ - "13b-q5_K_S", - "9.0 GB" - ], - [ - "13b-q6_K", - "11 GB" - ], - [ - "13b-q8_0", - "14 GB" - ], - [ - "7b-fp16", - "13 GB" - ], - [ - "7b-q2_K", - "2.8 GB" - ], - [ - "7b-q3_K_L", - "3.6 GB" - ], - [ - "7b-q3_K_M", - "3.3 GB" - ], - [ - "7b-q3_K_S", - "2.9 GB" - ], - [ - "7b-q4_0", - "3.8 GB" - ], - [ - "7b-q4_1", - "4.2 GB" - ], - [ - "7b-q4_K_M", - "4.1 GB" - ], - [ - "7b-q4_K_S", - "3.9 GB" - ], - [ - "7b-q5_0", - "4.7 GB" - ], - [ - "7b-q5_1", - "5.1 GB" - ], - [ - "7b-q5_K_M", - "4.8 GB" - ], - [ - "7b-q5_K_S", - "4.7 GB" - ], - [ - "7b-q6_K", - "5.5 GB" - ], - [ - "7b-q8_0", - "7.2 GB" - ] - ], - "image": false, - "author": "Stability AI" - }, "moondream": { "url": "https://ollama.com/library/moondream", "description": "moondream2 is a small vision language model designed to run efficiently on edge devices.", "tags": [ [ "latest", - "1.7 GB" + "1.7\u202fGB" ], [ "1.8b", - "1.7 GB" + "1.7\u202fGB" ], [ "v2", - "1.7 GB" + "1.7\u202fGB" ], [ "1.8b-v2-fp16", - "3.7 GB" + "3.7\u202fGB" ], [ "1.8b-v2-q2_K", - "1.5 GB" - ], - [ - "1.8b-v2-q3_K_L", - "1.7 GB" - ], - [ - "1.8b-v2-q3_K_M", - "1.7 GB" + "1.5\u202fGB" ], [ "1.8b-v2-q3_K_S", - "1.6 GB" + "1.6\u202fGB" + ], + [ + "1.8b-v2-q3_K_M", + "1.7\u202fGB" + ], + [ + "1.8b-v2-q3_K_L", + "1.7\u202fGB" ], [ "1.8b-v2-q4_0", - "1.7 GB" + "1.7\u202fGB" ], [ "1.8b-v2-q4_1", - "1.8 GB" - ], - [ - "1.8b-v2-q4_K_M", - "1.8 GB" + "1.8\u202fGB" ], [ "1.8b-v2-q4_K_S", - "1.7 GB" + "1.7\u202fGB" + ], + [ + "1.8b-v2-q4_K_M", + "1.8\u202fGB" ], [ "1.8b-v2-q5_0", - "1.9 GB" + "1.9\u202fGB" ], [ "1.8b-v2-q5_1", - "2.0 GB" - ], - [ - "1.8b-v2-q5_K_M", - "2.0 GB" + "2.0\u202fGB" ], [ "1.8b-v2-q5_K_S", - "1.9 GB" + "1.9\u202fGB" + ], + [ + "1.8b-v2-q5_K_M", + "2.0\u202fGB" ], [ "1.8b-v2-q6_K", - "2.1 GB" + "2.1\u202fGB" ], [ "1.8b-v2-q8_0", - "2.4 GB" + "2.4\u202fGB" ] ], "image": true, "author": "Vikhyatk" }, - "bakllava": { - "url": "https://ollama.com/library/bakllava", - "description": "BakLLaVA is a multimodal model consisting of the Mistral 7B base model augmented with the LLaVA architecture.", + "smollm": { + "url": "https://ollama.com/library/smollm", + "description": "\ud83e\ude90 A family of small models with 135M, 360M, and 1.7B parameters, trained on a new high-quality dataset.", "tags": [ [ "latest", - "4.7 GB" + "991\u202fMB" + ], + [ + "135m", + "92\u202fMB" + ], + [ + "360m", + "229\u202fMB" + ], + [ + "1.7b", + "991\u202fMB" + ], + [ + "135m-base-v0.2-fp16", + "271\u202fMB" + ], + [ + "135m-base-v0.2-q2_K", + "88\u202fMB" + ], + [ + "135m-base-v0.2-q3_K_S", + "88\u202fMB" + ], + [ + "135m-base-v0.2-q3_K_M", + "94\u202fMB" + ], + [ + "135m-base-v0.2-q3_K_L", + "98\u202fMB" + ], + [ + "135m-base-v0.2-q4_0", + "92\u202fMB" + ], + [ + "135m-base-v0.2-q4_1", + "98\u202fMB" + ], + [ + "135m-base-v0.2-q4_K_S", + "102\u202fMB" + ], + [ + "135m-base-v0.2-q4_K_M", + "105\u202fMB" + ], + [ + "135m-base-v0.2-q5_0", + "105\u202fMB" + ], + [ + "135m-base-v0.2-q5_1", + "112\u202fMB" + ], + [ + "135m-base-v0.2-q5_K_S", + "110\u202fMB" + ], + [ + "135m-base-v0.2-q5_K_M", + "112\u202fMB" + ], + [ + "135m-base-v0.2-q6_K", + "138\u202fMB" + ], + [ + "135m-base-v0.2-q8_0", + "145\u202fMB" + ], + [ + "135m-instruct-v0.2-fp16", + "271\u202fMB" + ], + [ + "135m-instruct-v0.2-q2_K", + "88\u202fMB" + ], + [ + "135m-instruct-v0.2-q3_K_S", + "88\u202fMB" + ], + [ + "135m-instruct-v0.2-q3_K_M", + "94\u202fMB" + ], + [ + "135m-instruct-v0.2-q3_K_L", + "98\u202fMB" + ], + [ + "135m-instruct-v0.2-q4_0", + "92\u202fMB" + ], + [ + "135m-instruct-v0.2-q4_1", + "98\u202fMB" + ], + [ + "135m-instruct-v0.2-q4_K_S", + "102\u202fMB" + ], + [ + "135m-instruct-v0.2-q4_K_M", + "105\u202fMB" + ], + [ + "135m-instruct-v0.2-q5_0", + "105\u202fMB" + ], + [ + "135m-instruct-v0.2-q5_1", + "112\u202fMB" + ], + [ + "135m-instruct-v0.2-q5_K_S", + "110\u202fMB" + ], + [ + "135m-instruct-v0.2-q5_K_M", + "112\u202fMB" + ], + [ + "135m-instruct-v0.2-q6_K", + "138\u202fMB" + ], + [ + "135m-instruct-v0.2-q8_0", + "145\u202fMB" + ], + [ + "360m-base-v0.2-fp16", + "726\u202fMB" + ], + [ + "360m-base-v0.2-q2_K", + "219\u202fMB" + ], + [ + "360m-base-v0.2-q3_K_S", + "219\u202fMB" + ], + [ + "360m-base-v0.2-q3_K_M", + "235\u202fMB" + ], + [ + "360m-base-v0.2-q3_K_L", + "246\u202fMB" + ], + [ + "360m-base-v0.2-q4_0", + "229\u202fMB" + ], + [ + "360m-base-v0.2-q4_1", + "249\u202fMB" + ], + [ + "360m-base-v0.2-q4_K_S", + "260\u202fMB" + ], + [ + "360m-base-v0.2-q4_K_M", + "271\u202fMB" + ], + [ + "360m-base-v0.2-q5_0", + "268\u202fMB" + ], + [ + "360m-base-v0.2-q5_1", + "288\u202fMB" + ], + [ + "360m-base-v0.2-q5_K_S", + "283\u202fMB" + ], + [ + "360m-base-v0.2-q5_K_M", + "290\u202fMB" + ], + [ + "360m-base-v0.2-q6_K", + "367\u202fMB" + ], + [ + "360m-base-v0.2-q8_0", + "386\u202fMB" + ], + [ + "360m-instruct-v0.2-fp16", + "726\u202fMB" + ], + [ + "360m-instruct-v0.2-q2_K", + "219\u202fMB" + ], + [ + "360m-instruct-v0.2-q3_K_S", + "219\u202fMB" + ], + [ + "360m-instruct-v0.2-q3_K_M", + "235\u202fMB" + ], + [ + "360m-instruct-v0.2-q3_K_L", + "246\u202fMB" + ], + [ + "360m-instruct-v0.2-q4_0", + "229\u202fMB" + ], + [ + "360m-instruct-v0.2-q4_1", + "249\u202fMB" + ], + [ + "360m-instruct-v0.2-q4_K_S", + "260\u202fMB" + ], + [ + "360m-instruct-v0.2-q4_K_M", + "271\u202fMB" + ], + [ + "360m-instruct-v0.2-q5_0", + "268\u202fMB" + ], + [ + "360m-instruct-v0.2-q5_1", + "288\u202fMB" + ], + [ + "360m-instruct-v0.2-q5_K_S", + "283\u202fMB" + ], + [ + "360m-instruct-v0.2-q5_K_M", + "290\u202fMB" + ], + [ + "360m-instruct-v0.2-q6_K", + "367\u202fMB" + ], + [ + "360m-instruct-v0.2-q8_0", + "386\u202fMB" + ], + [ + "1.7b-base-v0.2-fp16", + "3.4\u202fGB" + ], + [ + "1.7b-base-v0.2-q2_K", + "675\u202fMB" + ], + [ + "1.7b-base-v0.2-q3_K_S", + "777\u202fMB" + ], + [ + "1.7b-base-v0.2-q3_K_M", + "860\u202fMB" + ], + [ + "1.7b-base-v0.2-q3_K_L", + "933\u202fMB" + ], + [ + "1.7b-base-v0.2-q4_0", + "991\u202fMB" + ], + [ + "1.7b-base-v0.2-q4_1", + "1.1\u202fGB" + ], + [ + "1.7b-base-v0.2-q4_K_S", + "999\u202fMB" + ], + [ + "1.7b-base-v0.2-q4_K_M", + "1.1\u202fGB" + ], + [ + "1.7b-base-v0.2-q5_0", + "1.2\u202fGB" + ], + [ + "1.7b-base-v0.2-q5_1", + "1.3\u202fGB" + ], + [ + "1.7b-base-v0.2-q5_K_S", + "1.2\u202fGB" + ], + [ + "1.7b-base-v0.2-q5_K_M", + "1.2\u202fGB" + ], + [ + "1.7b-base-v0.2-q6_K", + "1.4\u202fGB" + ], + [ + "1.7b-base-v0.2-q8_0", + "1.8\u202fGB" + ], + [ + "1.7b-instruct-v0.2-fp16", + "3.4\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q2_K", + "675\u202fMB" + ], + [ + "1.7b-instruct-v0.2-q3_K_S", + "777\u202fMB" + ], + [ + "1.7b-instruct-v0.2-q3_K_M", + "860\u202fMB" + ], + [ + "1.7b-instruct-v0.2-q3_K_L", + "933\u202fMB" + ], + [ + "1.7b-instruct-v0.2-q4_0", + "991\u202fMB" + ], + [ + "1.7b-instruct-v0.2-q4_1", + "1.1\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q4_K_S", + "999\u202fMB" + ], + [ + "1.7b-instruct-v0.2-q4_K_M", + "1.1\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q5_0", + "1.2\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q5_1", + "1.3\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q5_K_S", + "1.2\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q5_K_M", + "1.2\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q6_K", + "1.4\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q8_0", + "1.8\u202fGB" + ] + ], + "image": false, + "author": "HuggingFaceTB" + }, + "stable-beluga": { + "url": "https://ollama.com/library/stable-beluga", + "description": "\ud83e\ude90 A family of small models with 135M, 360M, and 1.7B parameters, trained on a new high-quality dataset.", + "tags": [ + [ + "latest", + "991\u202fMB" + ], + [ + "135m", + "92\u202fMB" + ], + [ + "360m", + "229\u202fMB" + ], + [ + "1.7b", + "991\u202fMB" + ], + [ + "135m-base-v0.2-fp16", + "271\u202fMB" + ], + [ + "135m-base-v0.2-q2_K", + "88\u202fMB" + ], + [ + "135m-base-v0.2-q3_K_S", + "88\u202fMB" + ], + [ + "135m-base-v0.2-q3_K_M", + "94\u202fMB" + ], + [ + "135m-base-v0.2-q3_K_L", + "98\u202fMB" + ], + [ + "135m-base-v0.2-q4_0", + "92\u202fMB" + ], + [ + "135m-base-v0.2-q4_1", + "98\u202fMB" + ], + [ + "135m-base-v0.2-q4_K_S", + "102\u202fMB" + ], + [ + "135m-base-v0.2-q4_K_M", + "105\u202fMB" + ], + [ + "135m-base-v0.2-q5_0", + "105\u202fMB" + ], + [ + "135m-base-v0.2-q5_1", + "112\u202fMB" + ], + [ + "135m-base-v0.2-q5_K_S", + "110\u202fMB" + ], + [ + "135m-base-v0.2-q5_K_M", + "112\u202fMB" + ], + [ + "135m-base-v0.2-q6_K", + "138\u202fMB" + ], + [ + "135m-base-v0.2-q8_0", + "145\u202fMB" + ], + [ + "135m-instruct-v0.2-fp16", + "271\u202fMB" + ], + [ + "135m-instruct-v0.2-q2_K", + "88\u202fMB" + ], + [ + "135m-instruct-v0.2-q3_K_S", + "88\u202fMB" + ], + [ + "135m-instruct-v0.2-q3_K_M", + "94\u202fMB" + ], + [ + "135m-instruct-v0.2-q3_K_L", + "98\u202fMB" + ], + [ + "135m-instruct-v0.2-q4_0", + "92\u202fMB" + ], + [ + "135m-instruct-v0.2-q4_1", + "98\u202fMB" + ], + [ + "135m-instruct-v0.2-q4_K_S", + "102\u202fMB" + ], + [ + "135m-instruct-v0.2-q4_K_M", + "105\u202fMB" + ], + [ + "135m-instruct-v0.2-q5_0", + "105\u202fMB" + ], + [ + "135m-instruct-v0.2-q5_1", + "112\u202fMB" + ], + [ + "135m-instruct-v0.2-q5_K_S", + "110\u202fMB" + ], + [ + "135m-instruct-v0.2-q5_K_M", + "112\u202fMB" + ], + [ + "135m-instruct-v0.2-q6_K", + "138\u202fMB" + ], + [ + "135m-instruct-v0.2-q8_0", + "145\u202fMB" + ], + [ + "360m-base-v0.2-fp16", + "726\u202fMB" + ], + [ + "360m-base-v0.2-q2_K", + "219\u202fMB" + ], + [ + "360m-base-v0.2-q3_K_S", + "219\u202fMB" + ], + [ + "360m-base-v0.2-q3_K_M", + "235\u202fMB" + ], + [ + "360m-base-v0.2-q3_K_L", + "246\u202fMB" + ], + [ + "360m-base-v0.2-q4_0", + "229\u202fMB" + ], + [ + "360m-base-v0.2-q4_1", + "249\u202fMB" + ], + [ + "360m-base-v0.2-q4_K_S", + "260\u202fMB" + ], + [ + "360m-base-v0.2-q4_K_M", + "271\u202fMB" + ], + [ + "360m-base-v0.2-q5_0", + "268\u202fMB" + ], + [ + "360m-base-v0.2-q5_1", + "288\u202fMB" + ], + [ + "360m-base-v0.2-q5_K_S", + "283\u202fMB" + ], + [ + "360m-base-v0.2-q5_K_M", + "290\u202fMB" + ], + [ + "360m-base-v0.2-q6_K", + "367\u202fMB" + ], + [ + "360m-base-v0.2-q8_0", + "386\u202fMB" + ], + [ + "360m-instruct-v0.2-fp16", + "726\u202fMB" + ], + [ + "360m-instruct-v0.2-q2_K", + "219\u202fMB" + ], + [ + "360m-instruct-v0.2-q3_K_S", + "219\u202fMB" + ], + [ + "360m-instruct-v0.2-q3_K_M", + "235\u202fMB" + ], + [ + "360m-instruct-v0.2-q3_K_L", + "246\u202fMB" + ], + [ + "360m-instruct-v0.2-q4_0", + "229\u202fMB" + ], + [ + "360m-instruct-v0.2-q4_1", + "249\u202fMB" + ], + [ + "360m-instruct-v0.2-q4_K_S", + "260\u202fMB" + ], + [ + "360m-instruct-v0.2-q4_K_M", + "271\u202fMB" + ], + [ + "360m-instruct-v0.2-q5_0", + "268\u202fMB" + ], + [ + "360m-instruct-v0.2-q5_1", + "288\u202fMB" + ], + [ + "360m-instruct-v0.2-q5_K_S", + "283\u202fMB" + ], + [ + "360m-instruct-v0.2-q5_K_M", + "290\u202fMB" + ], + [ + "360m-instruct-v0.2-q6_K", + "367\u202fMB" + ], + [ + "360m-instruct-v0.2-q8_0", + "386\u202fMB" + ], + [ + "1.7b-base-v0.2-fp16", + "3.4\u202fGB" + ], + [ + "1.7b-base-v0.2-q2_K", + "675\u202fMB" + ], + [ + "1.7b-base-v0.2-q3_K_S", + "777\u202fMB" + ], + [ + "1.7b-base-v0.2-q3_K_M", + "860\u202fMB" + ], + [ + "1.7b-base-v0.2-q3_K_L", + "933\u202fMB" + ], + [ + "1.7b-base-v0.2-q4_0", + "991\u202fMB" + ], + [ + "1.7b-base-v0.2-q4_1", + "1.1\u202fGB" + ], + [ + "1.7b-base-v0.2-q4_K_S", + "999\u202fMB" + ], + [ + "1.7b-base-v0.2-q4_K_M", + "1.1\u202fGB" + ], + [ + "1.7b-base-v0.2-q5_0", + "1.2\u202fGB" + ], + [ + "1.7b-base-v0.2-q5_1", + "1.3\u202fGB" + ], + [ + "1.7b-base-v0.2-q5_K_S", + "1.2\u202fGB" + ], + [ + "1.7b-base-v0.2-q5_K_M", + "1.2\u202fGB" + ], + [ + "1.7b-base-v0.2-q6_K", + "1.4\u202fGB" + ], + [ + "1.7b-base-v0.2-q8_0", + "1.8\u202fGB" + ], + [ + "1.7b-instruct-v0.2-fp16", + "3.4\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q2_K", + "675\u202fMB" + ], + [ + "1.7b-instruct-v0.2-q3_K_S", + "777\u202fMB" + ], + [ + "1.7b-instruct-v0.2-q3_K_M", + "860\u202fMB" + ], + [ + "1.7b-instruct-v0.2-q3_K_L", + "933\u202fMB" + ], + [ + "1.7b-instruct-v0.2-q4_0", + "991\u202fMB" + ], + [ + "1.7b-instruct-v0.2-q4_1", + "1.1\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q4_K_S", + "999\u202fMB" + ], + [ + "1.7b-instruct-v0.2-q4_K_M", + "1.1\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q5_0", + "1.2\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q5_1", + "1.3\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q5_K_S", + "1.2\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q5_K_M", + "1.2\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q6_K", + "1.4\u202fGB" + ], + [ + "1.7b-instruct-v0.2-q8_0", + "1.8\u202fGB" + ] + ], + "image": false, + "author": "Stability AI" + }, + "qwen2-math": { + "url": "https://ollama.com/library/qwen2-math", + "description": "Qwen2 Math is a series of specialized math language models built upon the Qwen2 LLMs, which significantly outperforms the mathematical capabilities of open-source models and even closed-source models (e.g., GPT4o).", + "tags": [ + [ + "latest", + "4.4\u202fGB" + ], + [ + "1.5b", + "935\u202fMB" ], [ "7b", - "4.7 GB" + "4.4\u202fGB" ], [ - "7b-v1-fp16", - "15 GB" + "72b", + "41\u202fGB" ], [ - "7b-v1-q2_K", - "3.7 GB" + "1.5b-instruct", + "935\u202fMB" ], [ - "7b-v1-q3_K_L", - "4.4 GB" + "7b-instruct", + "4.4\u202fGB" ], [ - "7b-v1-q3_K_M", - "4.1 GB" + "72b-instruct", + "41\u202fGB" ], [ - "7b-v1-q3_K_S", - "3.8 GB" + "1.5b-instruct-fp16", + "3.1\u202fGB" ], [ - "7b-v1-q4_0", - "4.7 GB" + "1.5b-instruct-q2_K", + "676\u202fMB" ], [ - "7b-v1-q4_1", - "5.2 GB" + "1.5b-instruct-q3_K_S", + "761\u202fMB" ], [ - "7b-v1-q4_K_M", - "5.0 GB" + "1.5b-instruct-q3_K_M", + "824\u202fMB" ], [ - "7b-v1-q4_K_S", - "4.8 GB" + "1.5b-instruct-q3_K_L", + "880\u202fMB" ], [ - "7b-v1-q5_0", - "5.6 GB" + "1.5b-instruct-q4_0", + "935\u202fMB" ], [ - "7b-v1-q5_1", - "6.1 GB" + "1.5b-instruct-q4_1", + "1.0\u202fGB" ], [ - "7b-v1-q5_K_M", - "5.8 GB" + "1.5b-instruct-q4_K_S", + "940\u202fMB" ], [ - "7b-v1-q5_K_S", - "5.6 GB" + "1.5b-instruct-q4_K_M", + "986\u202fMB" ], [ - "7b-v1-q6_K", - "6.6 GB" + "1.5b-instruct-q5_0", + "1.1\u202fGB" ], [ - "7b-v1-q8_0", - "8.3 GB" - ] - ], - "image": true, - "author": "Skunkworks AI" - }, - "wizardlm-uncensored": { - "url": "https://ollama.com/library/wizardlm-uncensored", - "description": "Uncensored version of Wizard LM model", - "tags": [ - [ - "latest", - "7.4 GB" + "1.5b-instruct-q5_1", + "1.2\u202fGB" ], [ - "13b", - "7.4 GB" + "1.5b-instruct-q5_K_S", + "1.1\u202fGB" ], [ - "13b-llama2", - "7.4 GB" + "1.5b-instruct-q5_K_M", + "1.1\u202fGB" ], [ - "13b-llama2-fp16", - "26 GB" + "1.5b-instruct-q6_K", + "1.3\u202fGB" ], [ - "13b-llama2-q2_K", - "5.4 GB" + "1.5b-instruct-q8_0", + "1.6\u202fGB" ], [ - "13b-llama2-q3_K_L", - "6.9 GB" + "7b-instruct-fp16", + "15\u202fGB" ], [ - "13b-llama2-q3_K_M", - "6.3 GB" + "7b-instruct-q2_K", + "3.0\u202fGB" ], [ - "13b-llama2-q3_K_S", - "5.7 GB" + "7b-instruct-q3_K_S", + "3.5\u202fGB" ], [ - "13b-llama2-q4_0", - "7.4 GB" + "7b-instruct-q3_K_M", + "3.8\u202fGB" ], [ - "13b-llama2-q4_1", - "8.2 GB" + "7b-instruct-q3_K_L", + "4.1\u202fGB" ], [ - "13b-llama2-q4_K_M", - "7.9 GB" + "7b-instruct-q4_0", + "4.4\u202fGB" ], [ - "13b-llama2-q4_K_S", - "7.4 GB" + "7b-instruct-q4_1", + "4.9\u202fGB" ], [ - "13b-llama2-q5_0", - "9.0 GB" + "7b-instruct-q4_K_S", + "4.5\u202fGB" ], [ - "13b-llama2-q5_1", - "9.8 GB" + "7b-instruct-q4_K_M", + "4.7\u202fGB" ], [ - "13b-llama2-q5_K_M", - "9.2 GB" + "7b-instruct-q5_0", + "5.3\u202fGB" ], [ - "13b-llama2-q5_K_S", - "9.0 GB" + "7b-instruct-q5_1", + "5.8\u202fGB" ], [ - "13b-llama2-q6_K", - "11 GB" + "7b-instruct-q5_K_S", + "5.3\u202fGB" ], [ - "13b-llama2-q8_0", - "14 GB" + "7b-instruct-q5_K_M", + "5.4\u202fGB" + ], + [ + "7b-instruct-q6_K", + "6.3\u202fGB" + ], + [ + "7b-instruct-q8_0", + "8.1\u202fGB" + ], + [ + "72b-instruct-fp16", + "145\u202fGB" + ], + [ + "72b-instruct-q2_K", + "30\u202fGB" + ], + [ + "72b-instruct-q3_K_S", + "34\u202fGB" + ], + [ + "72b-instruct-q3_K_M", + "38\u202fGB" + ], + [ + "72b-instruct-q3_K_L", + "40\u202fGB" + ], + [ + "72b-instruct-q4_0", + "41\u202fGB" + ], + [ + "72b-instruct-q4_1", + "46\u202fGB" + ], + [ + "72b-instruct-q4_K_S", + "44\u202fGB" + ], + [ + "72b-instruct-q4_K_M", + "47\u202fGB" + ], + [ + "72b-instruct-q5_0", + "50\u202fGB" + ], + [ + "72b-instruct-q5_1", + "55\u202fGB" + ], + [ + "72b-instruct-q5_K_S", + "51\u202fGB" + ], + [ + "72b-instruct-q5_K_M", + "54\u202fGB" + ], + [ + "72b-instruct-q6_K", + "64\u202fGB" + ], + [ + "72b-instruct-q8_0", + "77\u202fGB" ] ], "image": false, - "author": "TheBloke AI" + "author": "Alibaba" }, - "snowflake-arctic-embed": { - "url": "https://ollama.com/library/snowflake-arctic-embed", - "description": "A suite of text embedding models by Snowflake, optimized for performance.", + "dolphin-phi": { + "url": "https://ollama.com/library/dolphin-phi", + "description": "2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language model by Microsoft Research.", "tags": [ [ "latest", - "669 MB" + "1.6\u202fGB" ], [ - "335m", - "669 MB" + "2.7b", + "1.6\u202fGB" ], [ - "137m", - "274 MB" + "2.7b-v2.6", + "1.6\u202fGB" ], [ - "110m", - "219 MB" + "2.7b-v2.6-q2_K", + "1.2\u202fGB" ], [ - "33m", - "67 MB" + "2.7b-v2.6-q3_K_S", + "1.3\u202fGB" ], [ - "22m", - "46 MB" + "2.7b-v2.6-q3_K_M", + "1.5\u202fGB" ], [ - "l", - "669 MB" + "2.7b-v2.6-q3_K_L", + "1.6\u202fGB" ], [ - "m", - "219 MB" + "2.7b-v2.6-q4_0", + "1.6\u202fGB" ], [ - "s", - "67 MB" + "2.7b-v2.6-q4_K_S", + "1.6\u202fGB" ], [ - "xs", - "46 MB" + "2.7b-v2.6-q4_K_M", + "1.8\u202fGB" ], [ - "335m-l-fp16", - "669 MB" + "2.7b-v2.6-q5_0", + "1.9\u202fGB" ], [ - "137m-m-long-fp16", - "274 MB" + "2.7b-v2.6-q5_K_S", + "1.9\u202fGB" ], [ - "110m-m-fp16", - "219 MB" + "2.7b-v2.6-q5_K_M", + "2.1\u202fGB" ], [ - "33m-s-fp16", - "67 MB" + "2.7b-v2.6-q6_K", + "2.3\u202fGB" ], [ - "22m-xs-fp16", - "46 MB" - ], - [ - "m-long", - "274 MB" + "2.7b-v2.6-q8_0", + "3.0\u202fGB" ] ], "image": false, - "author": "Snowflake" + "author": "Eric Hartford" }, "deepseek-v2": { "url": "https://ollama.com/library/deepseek-v2", @@ -17609,1083 +18857,227 @@ "tags": [ [ "latest", - "8.9 GB" - ], - [ - "236b", - "133 GB" + "8.9\u202fGB" ], [ "16b", - "8.9 GB" + "8.9\u202fGB" + ], + [ + "236b", + "133\u202fGB" ], [ "lite", - "8.9 GB" - ], - [ - "236b-chat-f16", - "472 GB" - ], - [ - "236b-chat-fp16", - "472 GB" - ], - [ - "236b-chat-q2_K", - "86 GB" - ], - [ - "236b-chat-q3_K_L", - "122 GB" - ], - [ - "236b-chat-q3_K_M", - "113 GB" - ], - [ - "236b-chat-q3_K_S", - "102 GB" - ], - [ - "236b-chat-q4_0", - "133 GB" - ], - [ - "236b-chat-q4_1", - "148 GB" - ], - [ - "236b-chat-q4_K_M", - "142 GB" - ], - [ - "236b-chat-q4_K_S", - "134 GB" - ], - [ - "236b-chat-q5_0", - "162 GB" - ], - [ - "236b-chat-q5_1", - "177 GB" - ], - [ - "236b-chat-q5_K_M", - "167 GB" - ], - [ - "236b-chat-q5_K_S", - "162 GB" - ], - [ - "236b-chat-q6_K", - "194 GB" - ], - [ - "236b-chat-q8_0", - "251 GB" + "8.9\u202fGB" ], [ "16b-lite-chat-f16", - "31 GB" + "31\u202fGB" + ], + [ + "236b-chat-f16", + "472\u202fGB" ], [ "16b-lite-chat-fp16", - "31 GB" + "31\u202fGB" ], [ "16b-lite-chat-q2_K", - "6.4 GB" - ], - [ - "16b-lite-chat-q3_K_L", - "8.5 GB" - ], - [ - "16b-lite-chat-q3_K_M", - "8.1 GB" + "6.4\u202fGB" ], [ "16b-lite-chat-q3_K_S", - "7.5 GB" + "7.5\u202fGB" + ], + [ + "16b-lite-chat-q3_K_M", + "8.1\u202fGB" + ], + [ + "16b-lite-chat-q3_K_L", + "8.5\u202fGB" ], [ "16b-lite-chat-q4_0", - "8.9 GB" + "8.9\u202fGB" ], [ "16b-lite-chat-q4_1", - "9.9 GB" - ], - [ - "16b-lite-chat-q4_K_M", - "10 GB" + "9.9\u202fGB" ], [ "16b-lite-chat-q4_K_S", - "9.5 GB" + "9.5\u202fGB" + ], + [ + "16b-lite-chat-q4_K_M", + "10\u202fGB" ], [ "16b-lite-chat-q5_0", - "11 GB" + "11\u202fGB" ], [ "16b-lite-chat-q5_1", - "12 GB" - ], - [ - "16b-lite-chat-q5_K_M", - "12 GB" + "12\u202fGB" ], [ "16b-lite-chat-q5_K_S", - "11 GB" + "11\u202fGB" + ], + [ + "16b-lite-chat-q5_K_M", + "12\u202fGB" ], [ "16b-lite-chat-q6_K", - "14 GB" + "14\u202fGB" ], [ "16b-lite-chat-q8_0", - "17 GB" + "17\u202fGB" + ], + [ + "236b-chat-fp16", + "472\u202fGB" + ], + [ + "236b-chat-q2_K", + "86\u202fGB" + ], + [ + "236b-chat-q3_K_S", + "102\u202fGB" + ], + [ + "236b-chat-q3_K_M", + "113\u202fGB" + ], + [ + "236b-chat-q3_K_L", + "122\u202fGB" + ], + [ + "236b-chat-q4_0", + "133\u202fGB" + ], + [ + "236b-chat-q4_1", + "148\u202fGB" + ], + [ + "236b-chat-q4_K_S", + "134\u202fGB" + ], + [ + "236b-chat-q4_K_M", + "142\u202fGB" + ], + [ + "236b-chat-q5_0", + "162\u202fGB" + ], + [ + "236b-chat-q5_1", + "177\u202fGB" + ], + [ + "236b-chat-q5_K_S", + "162\u202fGB" + ], + [ + "236b-chat-q5_K_M", + "167\u202fGB" + ], + [ + "236b-chat-q6_K", + "194\u202fGB" + ], + [ + "236b-chat-q8_0", + "251\u202fGB" ] ], "image": false, "author": "DeepSeek Team" }, - "medllama2": { - "url": "https://ollama.com/library/medllama2", - "description": "Fine-tuned Llama 2 model to answer medical questions based on an open source medical dataset.", + "bakllava": { + "url": "https://ollama.com/library/bakllava", + "description": "BakLLaVA is a multimodal model consisting of the Mistral 7B base model augmented with the LLaVA architecture.", "tags": [ [ "latest", - "3.8 GB" + "4.7\u202fGB" ], [ "7b", - "3.8 GB" + "4.7\u202fGB" ], [ - "7b-fp16", - "13 GB" + "7b-v1-fp16", + "15\u202fGB" ], [ - "7b-q2_K", - "2.8 GB" + "7b-v1-q2_K", + "3.7\u202fGB" ], [ - "7b-q3_K_L", - "3.6 GB" + "7b-v1-q3_K_S", + "3.8\u202fGB" ], [ - "7b-q3_K_M", - "3.3 GB" + "7b-v1-q3_K_M", + "4.1\u202fGB" ], [ - "7b-q3_K_S", - "2.9 GB" + "7b-v1-q3_K_L", + "4.4\u202fGB" ], [ - "7b-q4_0", - "3.8 GB" + "7b-v1-q4_0", + "4.7\u202fGB" ], [ - "7b-q4_1", - "4.2 GB" + "7b-v1-q4_1", + "5.2\u202fGB" ], [ - "7b-q4_K_M", - "4.1 GB" + "7b-v1-q4_K_S", + "4.8\u202fGB" ], [ - "7b-q4_K_S", - "3.9 GB" + "7b-v1-q4_K_M", + "5.0\u202fGB" ], [ - "7b-q5_0", - "4.7 GB" + "7b-v1-q5_0", + "5.6\u202fGB" ], [ - "7b-q5_1", - "5.1 GB" + "7b-v1-q5_1", + "6.1\u202fGB" ], [ - "7b-q5_K_M", - "4.8 GB" + "7b-v1-q5_K_S", + "5.6\u202fGB" ], [ - "7b-q5_K_S", - "4.7 GB" + "7b-v1-q5_K_M", + "5.8\u202fGB" ], [ - "7b-q6_K", - "5.5 GB" + "7b-v1-q6_K", + "6.6\u202fGB" ], [ - "7b-q8_0", - "7.2 GB" + "7b-v1-q8_0", + "8.3\u202fGB" ] ], - "image": false, - "author": "Siraj Raval" - }, - "yarn-mistral": { - "url": "https://ollama.com/library/yarn-mistral", - "description": "An extension of Mistral to support context windows of 64K or 128K.", - "tags": [ - [ - "latest", - "4.1 GB" - ], - [ - "7b", - "4.1 GB" - ], - [ - "7b-128k", - "4.1 GB" - ], - [ - "7b-128k-fp16", - "14 GB" - ], - [ - "7b-128k-q2_K", - "3.1 GB" - ], - [ - "7b-128k-q3_K_L", - "3.8 GB" - ], - [ - "7b-128k-q3_K_M", - "3.5 GB" - ], - [ - "7b-128k-q3_K_S", - "3.2 GB" - ], - [ - "7b-128k-q4_0", - "4.1 GB" - ], - [ - "7b-128k-q4_1", - "4.6 GB" - ], - [ - "7b-128k-q4_K_M", - "4.4 GB" - ], - [ - "7b-128k-q4_K_S", - "4.1 GB" - ], - [ - "7b-128k-q5_0", - "5.0 GB" - ], - [ - "7b-128k-q5_1", - "5.4 GB" - ], - [ - "7b-128k-q5_K_M", - "5.1 GB" - ], - [ - "7b-128k-q5_K_S", - "5.0 GB" - ], - [ - "7b-128k-q6_K", - "5.9 GB" - ], - [ - "7b-128k-q8_0", - "7.7 GB" - ], - [ - "7b-64k", - "4.1 GB" - ], - [ - "7b-64k-q2_K", - "3.1 GB" - ], - [ - "7b-64k-q3_K_L", - "3.8 GB" - ], - [ - "7b-64k-q3_K_M", - "3.5 GB" - ], - [ - "7b-64k-q3_K_S", - "3.2 GB" - ], - [ - "7b-64k-q4_0", - "4.1 GB" - ], - [ - "7b-64k-q4_1", - "4.6 GB" - ], - [ - "7b-64k-q4_K_M", - "4.4 GB" - ], - [ - "7b-64k-q4_K_S", - "4.1 GB" - ], - [ - "7b-64k-q5_0", - "5.0 GB" - ], - [ - "7b-64k-q5_1", - "5.4 GB" - ], - [ - "7b-64k-q5_K_M", - "5.1 GB" - ], - [ - "7b-64k-q5_K_S", - "5.0 GB" - ], - [ - "7b-64k-q6_K", - "5.9 GB" - ], - [ - "7b-64k-q8_0", - "7.7 GB" - ] - ], - "image": false, - "author": "Nous Research" - }, - "llama-pro": { - "url": "https://ollama.com/library/llama-pro", - "description": "An expansion of Llama 2 that specializes in integrating both general language understanding and domain-specific knowledge, particularly in programming and mathematics.", - "tags": [ - [ - "latest", - "4.7 GB" - ], - [ - "instruct", - "4.7 GB" - ], - [ - "text", - "4.7 GB" - ], - [ - "8b-instruct-fp16", - "17 GB" - ], - [ - "8b-instruct-q2_K", - "3.5 GB" - ], - [ - "8b-instruct-q3_K_L", - "4.5 GB" - ], - [ - "8b-instruct-q3_K_M", - "4.1 GB" - ], - [ - "8b-instruct-q3_K_S", - "3.6 GB" - ], - [ - "8b-instruct-q4_0", - "4.7 GB" - ], - [ - "8b-instruct-q4_1", - "5.3 GB" - ], - [ - "8b-instruct-q4_K_M", - "5.1 GB" - ], - [ - "8b-instruct-q4_K_S", - "4.8 GB" - ], - [ - "8b-instruct-q5_0", - "5.8 GB" - ], - [ - "8b-instruct-q5_1", - "6.3 GB" - ], - [ - "8b-instruct-q5_K_M", - "5.9 GB" - ], - [ - "8b-instruct-q5_K_S", - "5.8 GB" - ], - [ - "8b-instruct-q6_K", - "6.9 GB" - ], - [ - "8b-instruct-q8_0", - "8.9 GB" - ], - [ - "8b-text-fp16", - "17 GB" - ], - [ - "8b-text-q2_K", - "3.5 GB" - ], - [ - "8b-text-q3_K_L", - "4.5 GB" - ], - [ - "8b-text-q3_K_M", - "4.1 GB" - ], - [ - "8b-text-q3_K_S", - "3.6 GB" - ], - [ - "8b-text-q4_0", - "4.7 GB" - ], - [ - "8b-text-q4_1", - "5.3 GB" - ], - [ - "8b-text-q4_K_M", - "5.1 GB" - ], - [ - "8b-text-q4_K_S", - "4.8 GB" - ], - [ - "8b-text-q5_0", - "5.8 GB" - ], - [ - "8b-text-q5_1", - "6.3 GB" - ], - [ - "8b-text-q5_K_M", - "5.9 GB" - ], - [ - "8b-text-q5_K_S", - "5.8 GB" - ], - [ - "8b-text-q6_K", - "6.9 GB" - ], - [ - "8b-text-q8_0", - "8.9 GB" - ] - ], - "image": false, - "author": "Tencent" - }, - "nous-hermes2-mixtral": { - "url": "https://ollama.com/library/nous-hermes2-mixtral", - "description": "The Nous Hermes 2 model from Nous Research, now trained over Mixtral.", - "tags": [ - [ - "latest", - "26 GB" - ], - [ - "8x7b", - "26 GB" - ], - [ - "dpo", - "26 GB" - ], - [ - "8x7b-dpo-fp16", - "93 GB" - ], - [ - "8x7b-dpo-q2_K", - "16 GB" - ], - [ - "8x7b-dpo-q3_K_L", - "20 GB" - ], - [ - "8x7b-dpo-q3_K_M", - "20 GB" - ], - [ - "8x7b-dpo-q3_K_S", - "20 GB" - ], - [ - "8x7b-dpo-q4_0", - "26 GB" - ], - [ - "8x7b-dpo-q4_1", - "29 GB" - ], - [ - "8x7b-dpo-q4_K_M", - "26 GB" - ], - [ - "8x7b-dpo-q4_K_S", - "26 GB" - ], - [ - "8x7b-dpo-q5_0", - "32 GB" - ], - [ - "8x7b-dpo-q5_1", - "35 GB" - ], - [ - "8x7b-dpo-q5_K_M", - "32 GB" - ], - [ - "8x7b-dpo-q5_K_S", - "32 GB" - ], - [ - "8x7b-dpo-q6_K", - "38 GB" - ], - [ - "8x7b-dpo-q8_0", - "50 GB" - ] - ], - "image": false, - "author": "Nous Research" - }, - "meditron": { - "url": "https://ollama.com/library/meditron", - "description": "Open-source medical large language model adapted from Llama 2 to the medical domain.", - "tags": [ - [ - "latest", - "3.8 GB" - ], - [ - "70b", - "39 GB" - ], - [ - "7b", - "3.8 GB" - ], - [ - "70b-q4_0", - "39 GB" - ], - [ - "70b-q4_1", - "43 GB" - ], - [ - "70b-q4_K_S", - "39 GB" - ], - [ - "70b-q5_1", - "52 GB" - ], - [ - "7b-fp16", - "13 GB" - ], - [ - "7b-q2_K", - "2.8 GB" - ], - [ - "7b-q3_K_L", - "3.6 GB" - ], - [ - "7b-q3_K_M", - "3.3 GB" - ], - [ - "7b-q3_K_S", - "2.9 GB" - ], - [ - "7b-q4_0", - "3.8 GB" - ], - [ - "7b-q4_1", - "4.2 GB" - ], - [ - "7b-q4_K_M", - "4.1 GB" - ], - [ - "7b-q4_K_S", - "3.9 GB" - ], - [ - "7b-q5_0", - "4.7 GB" - ], - [ - "7b-q5_1", - "5.1 GB" - ], - [ - "7b-q5_K_M", - "4.8 GB" - ], - [ - "7b-q5_K_S", - "4.7 GB" - ], - [ - "7b-q6_K", - "5.5 GB" - ], - [ - "7b-q8_0", - "7.2 GB" - ] - ], - "image": false, - "author": "EPFL LLM Team" - }, - "codeup": { - "url": "https://ollama.com/library/codeup", - "description": "Great code generation model based on Llama2.", - "tags": [ - [ - "latest", - "7.4 GB" - ], - [ - "13b", - "7.4 GB" - ], - [ - "13b-llama2-chat", - "7.4 GB" - ], - [ - "13b-llama2", - "7.4 GB" - ], - [ - "13b-llama2-chat-fp16", - "26 GB" - ], - [ - "13b-llama2-chat-q2_K", - "5.4 GB" - ], - [ - "13b-llama2-chat-q3_K_L", - "6.9 GB" - ], - [ - "13b-llama2-chat-q3_K_M", - "6.3 GB" - ], - [ - "13b-llama2-chat-q3_K_S", - "5.7 GB" - ], - [ - "13b-llama2-chat-q4_0", - "7.4 GB" - ], - [ - "13b-llama2-chat-q4_1", - "8.2 GB" - ], - [ - "13b-llama2-chat-q4_K_M", - "7.9 GB" - ], - [ - "13b-llama2-chat-q4_K_S", - "7.4 GB" - ], - [ - "13b-llama2-chat-q5_0", - "9.0 GB" - ], - [ - "13b-llama2-chat-q5_1", - "9.8 GB" - ], - [ - "13b-llama2-chat-q5_K_M", - "9.2 GB" - ], - [ - "13b-llama2-chat-q5_K_S", - "9.0 GB" - ], - [ - "13b-llama2-chat-q6_K", - "11 GB" - ], - [ - "13b-llama2-chat-q8_0", - "14 GB" - ] - ], - "image": false, - "author": "DeepSE" - }, - "nexusraven": { - "url": "https://ollama.com/library/nexusraven", - "description": "Nexus Raven is a 13B instruction tuned model for function calling tasks.", - "tags": [ - [ - "latest", - "7.4 GB" - ], - [ - "13b", - "7.4 GB" - ], - [ - "13b-v2-fp16", - "26 GB" - ], - [ - "13b-v2-q2_K", - "5.4 GB" - ], - [ - "13b-v2-q3_K_L", - "6.9 GB" - ], - [ - "13b-v2-q3_K_M", - "6.3 GB" - ], - [ - "13b-v2-q3_K_S", - "5.7 GB" - ], - [ - "13b-v2-q4_0", - "7.4 GB" - ], - [ - "13b-v2-q4_1", - "8.2 GB" - ], - [ - "13b-v2-q4_K_M", - "7.9 GB" - ], - [ - "13b-v2-q4_K_S", - "7.4 GB" - ], - [ - "13b-v2-q5_0", - "9.0 GB" - ], - [ - "13b-v2-q5_1", - "9.8 GB" - ], - [ - "13b-v2-q5_K_M", - "9.2 GB" - ], - [ - "13b-v2-q5_K_S", - "9.0 GB" - ], - [ - "13b-v2-q6_K", - "11 GB" - ], - [ - "13b-v2-q8_0", - "14 GB" - ], - [ - "13b-fp16", - "26 GB" - ], - [ - "13b-q2_K", - "5.4 GB" - ], - [ - "13b-q3_K_L", - "6.9 GB" - ], - [ - "13b-q3_K_M", - "6.3 GB" - ], - [ - "13b-q3_K_S", - "5.7 GB" - ], - [ - "13b-q4_0", - "7.4 GB" - ], - [ - "13b-q4_1", - "8.2 GB" - ], - [ - "13b-q4_K_M", - "7.9 GB" - ], - [ - "13b-q4_K_S", - "7.4 GB" - ], - [ - "13b-q5_0", - "9.0 GB" - ], - [ - "13b-q5_1", - "9.8 GB" - ], - [ - "13b-q5_K_M", - "9.2 GB" - ], - [ - "13b-q5_K_S", - "9.0 GB" - ], - [ - "13b-q6_K", - "11 GB" - ], - [ - "13b-q8_0", - "14 GB" - ] - ], - "image": false, - "author": "NexusFlow AI" - }, - "everythinglm": { - "url": "https://ollama.com/library/everythinglm", - "description": "Uncensored Llama2 based model with support for a 16K context window.", - "tags": [ - [ - "latest", - "7.4 GB" - ], - [ - "13b", - "7.4 GB" - ], - [ - "13b-16k", - "7.4 GB" - ], - [ - "13b-16k-fp16", - "26 GB" - ], - [ - "13b-16k-q2_K", - "5.4 GB" - ], - [ - "13b-16k-q3_K_L", - "6.9 GB" - ], - [ - "13b-16k-q3_K_M", - "6.3 GB" - ], - [ - "13b-16k-q3_K_S", - "5.7 GB" - ], - [ - "13b-16k-q4_0", - "7.4 GB" - ], - [ - "13b-16k-q4_1", - "8.2 GB" - ], - [ - "13b-16k-q4_K_M", - "7.9 GB" - ], - [ - "13b-16k-q4_K_S", - "7.4 GB" - ], - [ - "13b-16k-q5_0", - "9.0 GB" - ], - [ - "13b-16k-q5_1", - "9.8 GB" - ], - [ - "13b-16k-q5_K_M", - "9.2 GB" - ], - [ - "13b-16k-q5_K_S", - "9.0 GB" - ], - [ - "13b-16k-q6_K", - "11 GB" - ], - [ - "13b-16k-q8_0", - "14 GB" - ] - ], - "image": false, - "author": "Totally Not An LLM" - }, - "llava-phi3": { - "url": "https://ollama.com/library/llava-phi3", - "description": "A new small LLaVA model fine-tuned from Phi 3 Mini.", - "tags": [ - [ - "latest", - "2.9 GB" - ], - [ - "3.8b", - "2.9 GB" - ], - [ - "3.8b-mini-fp16", - "8.3 GB" - ], - [ - "3.8b-mini-q4_0", - "2.9 GB" - ] - ], - "image": false, - "author": "Xtuner" - }, - "codegeex4": { - "url": "https://ollama.com/library/codegeex4", - "description": "A versatile model for AI software development scenarios, including code completion.", - "tags": [ - [ - "latest", - "5.5 GB" - ], - [ - "9b", - "5.5 GB" - ], - [ - "9b-all-fp16", - "19 GB" - ], - [ - "9b-all-q2_K", - "4.0 GB" - ], - [ - "9b-all-q3_K_L", - "5.3 GB" - ], - [ - "9b-all-q3_K_M", - "5.1 GB" - ], - [ - "9b-all-q3_K_S", - "4.6 GB" - ], - [ - "9b-all-q4_0", - "5.5 GB" - ], - [ - "9b-all-q4_1", - "6.0 GB" - ], - [ - "9b-all-q4_K_M", - "6.3 GB" - ], - [ - "9b-all-q4_K_S", - "5.8 GB" - ], - [ - "9b-all-q5_0", - "6.6 GB" - ], - [ - "9b-all-q5_1", - "7.1 GB" - ], - [ - "9b-all-q5_K_M", - "7.1 GB" - ], - [ - "9b-all-q5_K_S", - "6.7 GB" - ], - [ - "9b-all-q6_K", - "8.3 GB" - ], - [ - "9b-all-q8_0", - "10.0 GB" - ] - ], - "image": false, - "author": "THUDM" + "image": true, + "author": "Skunkworks AI" }, "glm4": { "url": "https://ollama.com/library/glm4", @@ -18693,211 +19085,1695 @@ "tags": [ [ "latest", - "5.5 GB" + "5.5\u202fGB" ], [ "9b", - "5.5 GB" + "5.5\u202fGB" ], [ "9b-chat-fp16", - "19 GB" + "19\u202fGB" ], [ "9b-chat-q2_K", - "4.0 GB" - ], - [ - "9b-chat-q3_K_L", - "5.3 GB" - ], - [ - "9b-chat-q3_K_M", - "5.1 GB" + "4.0\u202fGB" ], [ "9b-chat-q3_K_S", - "4.6 GB" + "4.6\u202fGB" + ], + [ + "9b-chat-q3_K_M", + "5.1\u202fGB" + ], + [ + "9b-chat-q3_K_L", + "5.3\u202fGB" ], [ "9b-chat-q4_0", - "5.5 GB" + "5.5\u202fGB" ], [ "9b-chat-q4_1", - "6.0 GB" - ], - [ - "9b-chat-q4_K_M", - "6.3 GB" + "6.0\u202fGB" ], [ "9b-chat-q4_K_S", - "5.8 GB" + "5.8\u202fGB" + ], + [ + "9b-chat-q4_K_M", + "6.3\u202fGB" ], [ "9b-chat-q5_0", - "6.6 GB" + "6.6\u202fGB" ], [ "9b-chat-q5_1", - "7.1 GB" - ], - [ - "9b-chat-q5_K_M", - "7.1 GB" + "7.1\u202fGB" ], [ "9b-chat-q5_K_S", - "6.7 GB" + "6.7\u202fGB" + ], + [ + "9b-chat-q5_K_M", + "7.1\u202fGB" ], [ "9b-chat-q6_K", - "8.3 GB" + "8.3\u202fGB" ], [ "9b-chat-q8_0", - "10.0 GB" + "10.0\u202fGB" ], [ "9b-text-fp16", - "19 GB" + "19\u202fGB" ], [ "9b-text-q2_K", - "4.0 GB" - ], - [ - "9b-text-q3_K_L", - "5.3 GB" - ], - [ - "9b-text-q3_K_M", - "5.1 GB" + "4.0\u202fGB" ], [ "9b-text-q3_K_S", - "4.6 GB" + "4.6\u202fGB" + ], + [ + "9b-text-q3_K_M", + "5.1\u202fGB" + ], + [ + "9b-text-q3_K_L", + "5.3\u202fGB" ], [ "9b-text-q4_0", - "5.5 GB" + "5.5\u202fGB" ], [ "9b-text-q4_1", - "6.0 GB" - ], - [ - "9b-text-q4_K_M", - "6.3 GB" + "6.0\u202fGB" ], [ "9b-text-q4_K_S", - "5.8 GB" + "5.8\u202fGB" + ], + [ + "9b-text-q4_K_M", + "6.3\u202fGB" ], [ "9b-text-q5_0", - "6.6 GB" + "6.6\u202fGB" ], [ "9b-text-q5_1", - "7.1 GB" - ], - [ - "9b-text-q5_K_M", - "7.1 GB" + "7.1\u202fGB" ], [ "9b-text-q5_K_S", - "6.7 GB" + "6.7\u202fGB" + ], + [ + "9b-text-q5_K_M", + "7.1\u202fGB" ], [ "9b-text-q6_K", - "8.3 GB" + "8.3\u202fGB" ], [ "9b-text-q8_0", - "10.0 GB" + "10.0\u202fGB" ] ], "image": false, "author": "THUDM" }, + "wizardlm-uncensored": { + "url": "https://ollama.com/library/wizardlm-uncensored", + "description": "Uncensored version of Wizard LM model", + "tags": [ + [ + "latest", + "7.4\u202fGB" + ], + [ + "13b", + "7.4\u202fGB" + ], + [ + "13b-llama2", + "7.4\u202fGB" + ], + [ + "13b-llama2-fp16", + "26\u202fGB" + ], + [ + "13b-llama2-q2_K", + "5.4\u202fGB" + ], + [ + "13b-llama2-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-llama2-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-llama2-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-llama2-q4_0", + "7.4\u202fGB" + ], + [ + "13b-llama2-q4_1", + "8.2\u202fGB" + ], + [ + "13b-llama2-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-llama2-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-llama2-q5_0", + "9.0\u202fGB" + ], + [ + "13b-llama2-q5_1", + "9.8\u202fGB" + ], + [ + "13b-llama2-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-llama2-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-llama2-q6_K", + "11\u202fGB" + ], + [ + "13b-llama2-q8_0", + "14\u202fGB" + ] + ], + "image": false, + "author": "TheBloke AI" + }, + "yarn-mistral": { + "url": "https://ollama.com/library/yarn-mistral", + "description": "An extension of Mistral to support context windows of 64K or 128K.", + "tags": [ + [ + "latest", + "4.1\u202fGB" + ], + [ + "7b", + "4.1\u202fGB" + ], + [ + "7b-128k", + "4.1\u202fGB" + ], + [ + "7b-64k", + "4.1\u202fGB" + ], + [ + "7b-128k-fp16", + "14\u202fGB" + ], + [ + "7b-128k-q2_K", + "3.1\u202fGB" + ], + [ + "7b-128k-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-128k-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-128k-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-128k-q4_0", + "4.1\u202fGB" + ], + [ + "7b-128k-q4_1", + "4.6\u202fGB" + ], + [ + "7b-128k-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-128k-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-128k-q5_0", + "5.0\u202fGB" + ], + [ + "7b-128k-q5_1", + "5.4\u202fGB" + ], + [ + "7b-128k-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-128k-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-128k-q6_K", + "5.9\u202fGB" + ], + [ + "7b-128k-q8_0", + "7.7\u202fGB" + ], + [ + "7b-64k-q2_K", + "3.1\u202fGB" + ], + [ + "7b-64k-q3_K_S", + "3.2\u202fGB" + ], + [ + "7b-64k-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-64k-q3_K_L", + "3.8\u202fGB" + ], + [ + "7b-64k-q4_0", + "4.1\u202fGB" + ], + [ + "7b-64k-q4_1", + "4.6\u202fGB" + ], + [ + "7b-64k-q4_K_S", + "4.1\u202fGB" + ], + [ + "7b-64k-q4_K_M", + "4.4\u202fGB" + ], + [ + "7b-64k-q5_0", + "5.0\u202fGB" + ], + [ + "7b-64k-q5_1", + "5.4\u202fGB" + ], + [ + "7b-64k-q5_K_S", + "5.0\u202fGB" + ], + [ + "7b-64k-q5_K_M", + "5.1\u202fGB" + ], + [ + "7b-64k-q6_K", + "5.9\u202fGB" + ], + [ + "7b-64k-q8_0", + "7.7\u202fGB" + ] + ], + "image": false, + "author": "Nous Research" + }, + "phi3.5": { + "url": "https://ollama.com/library/phi3.5", + "description": "A lightweight AI model with 3.8 billion parameters with performance overtaking similarly and larger sized models.", + "tags": [ + [ + "latest", + "2.2\u202fGB" + ], + [ + "3.8b", + "2.2\u202fGB" + ], + [ + "3.8b-mini-instruct-fp16", + "7.6\u202fGB" + ], + [ + "3.8b-mini-instruct-q2_K", + "1.4\u202fGB" + ], + [ + "3.8b-mini-instruct-q3_K_S", + "1.7\u202fGB" + ], + [ + "3.8b-mini-instruct-q3_K_M", + "2.0\u202fGB" + ], + [ + "3.8b-mini-instruct-q3_K_L", + "2.1\u202fGB" + ], + [ + "3.8b-mini-instruct-q4_0", + "2.2\u202fGB" + ], + [ + "3.8b-mini-instruct-q4_1", + "2.4\u202fGB" + ], + [ + "3.8b-mini-instruct-q4_K_S", + "2.2\u202fGB" + ], + [ + "3.8b-mini-instruct-q4_K_M", + "2.4\u202fGB" + ], + [ + "3.8b-mini-instruct-q5_0", + "2.6\u202fGB" + ], + [ + "3.8b-mini-instruct-q5_1", + "2.9\u202fGB" + ], + [ + "3.8b-mini-instruct-q5_K_S", + "2.6\u202fGB" + ], + [ + "3.8b-mini-instruct-q5_K_M", + "2.8\u202fGB" + ], + [ + "3.8b-mini-instruct-q6_K", + "3.1\u202fGB" + ], + [ + "3.8b-mini-instruct-q8_0", + "4.1\u202fGB" + ] + ], + "image": false, + "author": "Microsoft" + }, + "medllama2": { + "url": "https://ollama.com/library/medllama2", + "description": "Fine-tuned Llama 2 model to answer medical questions based on an open source medical dataset.", + "tags": [ + [ + "latest", + "3.8\u202fGB" + ], + [ + "7b", + "3.8\u202fGB" + ], + [ + "7b-fp16", + "13\u202fGB" + ], + [ + "7b-q2_K", + "2.8\u202fGB" + ], + [ + "7b-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-q4_0", + "3.8\u202fGB" + ], + [ + "7b-q4_1", + "4.2\u202fGB" + ], + [ + "7b-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-q5_0", + "4.7\u202fGB" + ], + [ + "7b-q5_1", + "5.1\u202fGB" + ], + [ + "7b-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-q6_K", + "5.5\u202fGB" + ], + [ + "7b-q8_0", + "7.2\u202fGB" + ] + ], + "image": false, + "author": "Siraj Raval" + }, + "llama-pro": { + "url": "https://ollama.com/library/llama-pro", + "description": "An expansion of Llama 2 that specializes in integrating both general language understanding and domain-specific knowledge, particularly in programming and mathematics.", + "tags": [ + [ + "latest", + "4.7\u202fGB" + ], + [ + "instruct", + "4.7\u202fGB" + ], + [ + "text", + "4.7\u202fGB" + ], + [ + "8b-instruct-fp16", + "17\u202fGB" + ], + [ + "8b-instruct-q2_K", + "3.5\u202fGB" + ], + [ + "8b-instruct-q3_K_S", + "3.6\u202fGB" + ], + [ + "8b-instruct-q3_K_M", + "4.1\u202fGB" + ], + [ + "8b-instruct-q3_K_L", + "4.5\u202fGB" + ], + [ + "8b-instruct-q4_0", + "4.7\u202fGB" + ], + [ + "8b-instruct-q4_1", + "5.3\u202fGB" + ], + [ + "8b-instruct-q4_K_S", + "4.8\u202fGB" + ], + [ + "8b-instruct-q4_K_M", + "5.1\u202fGB" + ], + [ + "8b-instruct-q5_0", + "5.8\u202fGB" + ], + [ + "8b-instruct-q5_1", + "6.3\u202fGB" + ], + [ + "8b-instruct-q5_K_S", + "5.8\u202fGB" + ], + [ + "8b-instruct-q5_K_M", + "5.9\u202fGB" + ], + [ + "8b-instruct-q6_K", + "6.9\u202fGB" + ], + [ + "8b-instruct-q8_0", + "8.9\u202fGB" + ], + [ + "8b-text-fp16", + "17\u202fGB" + ], + [ + "8b-text-q2_K", + "3.5\u202fGB" + ], + [ + "8b-text-q3_K_S", + "3.6\u202fGB" + ], + [ + "8b-text-q3_K_M", + "4.1\u202fGB" + ], + [ + "8b-text-q3_K_L", + "4.5\u202fGB" + ], + [ + "8b-text-q4_0", + "4.7\u202fGB" + ], + [ + "8b-text-q4_1", + "5.3\u202fGB" + ], + [ + "8b-text-q4_K_S", + "4.8\u202fGB" + ], + [ + "8b-text-q4_K_M", + "5.1\u202fGB" + ], + [ + "8b-text-q5_0", + "5.8\u202fGB" + ], + [ + "8b-text-q5_1", + "6.3\u202fGB" + ], + [ + "8b-text-q5_K_S", + "5.8\u202fGB" + ], + [ + "8b-text-q5_K_M", + "5.9\u202fGB" + ], + [ + "8b-text-q6_K", + "6.9\u202fGB" + ], + [ + "8b-text-q8_0", + "8.9\u202fGB" + ] + ], + "image": false, + "author": "Tencent" + }, + "llava-phi3": { + "url": "https://ollama.com/library/llava-phi3", + "description": "A new small LLaVA model fine-tuned from Phi 3 Mini.", + "tags": [ + [ + "latest", + "2.9\u202fGB" + ], + [ + "3.8b", + "2.9\u202fGB" + ], + [ + "3.8b-mini-fp16", + "8.3\u202fGB" + ], + [ + "3.8b-mini-q4_0", + "2.9\u202fGB" + ] + ], + "image": false, + "author": "Xtuner" + }, + "meditron": { + "url": "https://ollama.com/library/meditron", + "description": "Open-source medical large language model adapted from Llama 2 to the medical domain.", + "tags": [ + [ + "latest", + "3.8\u202fGB" + ], + [ + "7b", + "3.8\u202fGB" + ], + [ + "70b", + "39\u202fGB" + ], + [ + "7b-fp16", + "13\u202fGB" + ], + [ + "7b-q2_K", + "2.8\u202fGB" + ], + [ + "7b-q3_K_S", + "2.9\u202fGB" + ], + [ + "7b-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-q3_K_L", + "3.6\u202fGB" + ], + [ + "7b-q4_0", + "3.8\u202fGB" + ], + [ + "7b-q4_1", + "4.2\u202fGB" + ], + [ + "7b-q4_K_S", + "3.9\u202fGB" + ], + [ + "7b-q4_K_M", + "4.1\u202fGB" + ], + [ + "7b-q5_0", + "4.7\u202fGB" + ], + [ + "7b-q5_1", + "5.1\u202fGB" + ], + [ + "7b-q5_K_S", + "4.7\u202fGB" + ], + [ + "7b-q5_K_M", + "4.8\u202fGB" + ], + [ + "7b-q6_K", + "5.5\u202fGB" + ], + [ + "7b-q8_0", + "7.2\u202fGB" + ], + [ + "70b-q4_0", + "39\u202fGB" + ], + [ + "70b-q4_1", + "43\u202fGB" + ], + [ + "70b-q4_K_S", + "39\u202fGB" + ], + [ + "70b-q5_1", + "52\u202fGB" + ] + ], + "image": false, + "author": "EPFL LLM Team" + }, + "nous-hermes2-mixtral": { + "url": "https://ollama.com/library/nous-hermes2-mixtral", + "description": "The Nous Hermes 2 model from Nous Research, now trained over Mixtral.", + "tags": [ + [ + "latest", + "26\u202fGB" + ], + [ + "8x7b", + "26\u202fGB" + ], + [ + "dpo", + "26\u202fGB" + ], + [ + "8x7b-dpo-fp16", + "93\u202fGB" + ], + [ + "8x7b-dpo-q2_K", + "16\u202fGB" + ], + [ + "8x7b-dpo-q3_K_S", + "20\u202fGB" + ], + [ + "8x7b-dpo-q3_K_M", + "20\u202fGB" + ], + [ + "8x7b-dpo-q3_K_L", + "20\u202fGB" + ], + [ + "8x7b-dpo-q4_0", + "26\u202fGB" + ], + [ + "8x7b-dpo-q4_1", + "29\u202fGB" + ], + [ + "8x7b-dpo-q4_K_S", + "26\u202fGB" + ], + [ + "8x7b-dpo-q4_K_M", + "26\u202fGB" + ], + [ + "8x7b-dpo-q5_0", + "32\u202fGB" + ], + [ + "8x7b-dpo-q5_1", + "35\u202fGB" + ], + [ + "8x7b-dpo-q5_K_S", + "32\u202fGB" + ], + [ + "8x7b-dpo-q5_K_M", + "32\u202fGB" + ], + [ + "8x7b-dpo-q6_K", + "38\u202fGB" + ], + [ + "8x7b-dpo-q8_0", + "50\u202fGB" + ] + ], + "image": false, + "author": "Nous Research" + }, + "nexusraven": { + "url": "https://ollama.com/library/nexusraven", + "description": "Nexus Raven is a 13B instruction tuned model for function calling tasks.", + "tags": [ + [ + "latest", + "7.4\u202fGB" + ], + [ + "13b", + "7.4\u202fGB" + ], + [ + "13b-fp16", + "26\u202fGB" + ], + [ + "13b-q2_K", + "5.4\u202fGB" + ], + [ + "13b-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-q4_0", + "7.4\u202fGB" + ], + [ + "13b-q4_1", + "8.2\u202fGB" + ], + [ + "13b-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-q5_0", + "9.0\u202fGB" + ], + [ + "13b-q5_1", + "9.8\u202fGB" + ], + [ + "13b-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-q6_K", + "11\u202fGB" + ], + [ + "13b-q8_0", + "14\u202fGB" + ], + [ + "13b-v2-fp16", + "26\u202fGB" + ], + [ + "13b-v2-q2_K", + "5.4\u202fGB" + ], + [ + "13b-v2-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-v2-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-v2-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-v2-q4_0", + "7.4\u202fGB" + ], + [ + "13b-v2-q4_1", + "8.2\u202fGB" + ], + [ + "13b-v2-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-v2-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-v2-q5_0", + "9.0\u202fGB" + ], + [ + "13b-v2-q5_1", + "9.8\u202fGB" + ], + [ + "13b-v2-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-v2-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-v2-q6_K", + "11\u202fGB" + ], + [ + "13b-v2-q8_0", + "14\u202fGB" + ] + ], + "image": false, + "author": "NexusFlow AI" + }, + "codeup": { + "url": "https://ollama.com/library/codeup", + "description": "Great code generation model based on Llama2.", + "tags": [ + [ + "latest", + "7.4\u202fGB" + ], + [ + "13b", + "7.4\u202fGB" + ], + [ + "13b-llama2", + "7.4\u202fGB" + ], + [ + "13b-llama2-chat", + "7.4\u202fGB" + ], + [ + "13b-llama2-chat-fp16", + "26\u202fGB" + ], + [ + "13b-llama2-chat-q2_K", + "5.4\u202fGB" + ], + [ + "13b-llama2-chat-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-llama2-chat-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-llama2-chat-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-llama2-chat-q4_0", + "7.4\u202fGB" + ], + [ + "13b-llama2-chat-q4_1", + "8.2\u202fGB" + ], + [ + "13b-llama2-chat-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-llama2-chat-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-llama2-chat-q5_0", + "9.0\u202fGB" + ], + [ + "13b-llama2-chat-q5_1", + "9.8\u202fGB" + ], + [ + "13b-llama2-chat-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-llama2-chat-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-llama2-chat-q6_K", + "11\u202fGB" + ], + [ + "13b-llama2-chat-q8_0", + "14\u202fGB" + ] + ], + "image": false, + "author": "DeepSE" + }, + "everythinglm": { + "url": "https://ollama.com/library/everythinglm", + "description": "Uncensored Llama2 based model with support for a 16K context window.", + "tags": [ + [ + "latest", + "7.4\u202fGB" + ], + [ + "13b", + "7.4\u202fGB" + ], + [ + "13b-16k", + "7.4\u202fGB" + ], + [ + "13b-16k-fp16", + "26\u202fGB" + ], + [ + "13b-16k-q2_K", + "5.4\u202fGB" + ], + [ + "13b-16k-q3_K_S", + "5.7\u202fGB" + ], + [ + "13b-16k-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-16k-q3_K_L", + "6.9\u202fGB" + ], + [ + "13b-16k-q4_0", + "7.4\u202fGB" + ], + [ + "13b-16k-q4_1", + "8.2\u202fGB" + ], + [ + "13b-16k-q4_K_S", + "7.4\u202fGB" + ], + [ + "13b-16k-q4_K_M", + "7.9\u202fGB" + ], + [ + "13b-16k-q5_0", + "9.0\u202fGB" + ], + [ + "13b-16k-q5_1", + "9.8\u202fGB" + ], + [ + "13b-16k-q5_K_S", + "9.0\u202fGB" + ], + [ + "13b-16k-q5_K_M", + "9.2\u202fGB" + ], + [ + "13b-16k-q6_K", + "11\u202fGB" + ], + [ + "13b-16k-q8_0", + "14\u202fGB" + ] + ], + "image": false, + "author": "Totally Not An LLM" + }, + "hermes3": { + "url": "https://ollama.com/library/hermes3", + "description": "Hermes 3 is the latest version of the flagship Hermes series of LLMs by Nous Research", + "tags": [ + [ + "latest", + "4.7\u202fGB" + ], + [ + "8b", + "4.7\u202fGB" + ], + [ + "70b", + "40\u202fGB" + ], + [ + "405b", + "229\u202fGB" + ], + [ + "8b-llama3.1-fp16", + "16\u202fGB" + ], + [ + "8b-llama3.1-q2_K", + "3.2\u202fGB" + ], + [ + "8b-llama3.1-q3_K_S", + "3.7\u202fGB" + ], + [ + "8b-llama3.1-q3_K_M", + "4.0\u202fGB" + ], + [ + "8b-llama3.1-q3_K_L", + "4.3\u202fGB" + ], + [ + "8b-llama3.1-q4_0", + "4.7\u202fGB" + ], + [ + "8b-llama3.1-q4_1", + "5.1\u202fGB" + ], + [ + "8b-llama3.1-q4_K_S", + "4.7\u202fGB" + ], + [ + "8b-llama3.1-q4_K_M", + "4.9\u202fGB" + ], + [ + "8b-llama3.1-q5_0", + "5.6\u202fGB" + ], + [ + "8b-llama3.1-q5_1", + "6.1\u202fGB" + ], + [ + "8b-llama3.1-q5_K_S", + "5.6\u202fGB" + ], + [ + "8b-llama3.1-q5_K_M", + "5.7\u202fGB" + ], + [ + "8b-llama3.1-q6_K", + "6.6\u202fGB" + ], + [ + "8b-llama3.1-q8_0", + "8.5\u202fGB" + ], + [ + "70b-llama3.1-fp16", + "141\u202fGB" + ], + [ + "70b-llama3.1-q2_K", + "26\u202fGB" + ], + [ + "70b-llama3.1-q3_K_S", + "31\u202fGB" + ], + [ + "70b-llama3.1-q3_K_M", + "34\u202fGB" + ], + [ + "70b-llama3.1-q3_K_L", + "37\u202fGB" + ], + [ + "70b-llama3.1-q4_0", + "40\u202fGB" + ], + [ + "70b-llama3.1-q4_1", + "44\u202fGB" + ], + [ + "70b-llama3.1-q4_K_S", + "40\u202fGB" + ], + [ + "70b-llama3.1-q4_K_M", + "43\u202fGB" + ], + [ + "70b-llama3.1-q5_0", + "49\u202fGB" + ], + [ + "70b-llama3.1-q5_1", + "53\u202fGB" + ], + [ + "70b-llama3.1-q5_K_S", + "49\u202fGB" + ], + [ + "70b-llama3.1-q5_K_M", + "50\u202fGB" + ], + [ + "70b-llama3.1-q6_K", + "58\u202fGB" + ], + [ + "70b-llama3.1-q8_0", + "75\u202fGB" + ], + [ + "405b-llama3.1-fp16", + "812\u202fGB" + ], + [ + "405b-llama3.1-q2_K", + "149\u202fGB" + ], + [ + "405b-llama3.1-q3_K_S", + "175\u202fGB" + ], + [ + "405b-llama3.1-q3_K_M", + "195\u202fGB" + ], + [ + "405b-llama3.1-q3_K_L", + "213\u202fGB" + ], + [ + "405b-llama3.1-q4_0", + "229\u202fGB" + ], + [ + "405b-llama3.1-q4_1", + "254\u202fGB" + ], + [ + "405b-llama3.1-q4_K_S", + "231\u202fGB" + ], + [ + "405b-llama3.1-q4_K_M", + "243\u202fGB" + ], + [ + "405b-llama3.1-q5_0", + "279\u202fGB" + ], + [ + "405b-llama3.1-q5_1", + "305\u202fGB" + ], + [ + "405b-llama3.1-q5_K_S", + "279\u202fGB" + ], + [ + "405b-llama3.1-q5_K_M", + "287\u202fGB" + ], + [ + "405b-llama3.1-q6_K", + "333\u202fGB" + ], + [ + "405b-llama3.1-q8_0", + "431\u202fGB" + ] + ], + "image": false, + "author": "Nous Research" + }, + "internlm2": { + "url": "https://ollama.com/library/internlm2", + "description": "InternLM2.5 is a 7B parameter model tailored for practical scenarios with outstanding reasoning capability.", + "tags": [ + [ + "latest", + "4.5\u202fGB" + ], + [ + "1m", + "4.5\u202fGB" + ], + [ + "1.8b", + "1.1\u202fGB" + ], + [ + "7b", + "4.5\u202fGB" + ], + [ + "20b", + "11\u202fGB" + ], + [ + "1.8b-chat-v2.5-fp16", + "3.8\u202fGB" + ], + [ + "1.8b-chat-v2.5-q2_K", + "772\u202fMB" + ], + [ + "1.8b-chat-v2.5-q3_K_S", + "888\u202fMB" + ], + [ + "1.8b-chat-v2.5-q3_K_M", + "964\u202fMB" + ], + [ + "1.8b-chat-v2.5-q3_K_L", + "1.0\u202fGB" + ], + [ + "1.8b-chat-v2.5-q4_0", + "1.1\u202fGB" + ], + [ + "1.8b-chat-v2.5-q4_1", + "1.2\u202fGB" + ], + [ + "1.8b-chat-v2.5-q4_K_S", + "1.1\u202fGB" + ], + [ + "1.8b-chat-v2.5-q4_K_M", + "1.2\u202fGB" + ], + [ + "1.8b-chat-v2.5-q5_0", + "1.3\u202fGB" + ], + [ + "1.8b-chat-v2.5-q5_1", + "1.4\u202fGB" + ], + [ + "1.8b-chat-v2.5-q5_K_S", + "1.3\u202fGB" + ], + [ + "1.8b-chat-v2.5-q5_K_M", + "1.4\u202fGB" + ], + [ + "1.8b-chat-v2.5-q6_K", + "1.6\u202fGB" + ], + [ + "1.8b-chat-v2.5-q8_0", + "2.0\u202fGB" + ], + [ + "7b-chat-1m-v2.5-fp16", + "15\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q2_K", + "3.0\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q3_K_S", + "3.5\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q3_K_M", + "3.8\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q3_K_L", + "4.1\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q4_0", + "4.5\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q4_1", + "4.9\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q4_K_S", + "4.5\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q4_K_M", + "4.7\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q5_0", + "5.4\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q5_1", + "5.8\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q5_K_S", + "5.4\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q5_K_M", + "5.5\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q6_K", + "6.4\u202fGB" + ], + [ + "7b-chat-1m-v2.5-q8_0", + "8.2\u202fGB" + ], + [ + "7b-chat-v2.5-fp16", + "15\u202fGB" + ], + [ + "7b-chat-v2.5-q2_K", + "3.0\u202fGB" + ], + [ + "7b-chat-v2.5-q3_K_S", + "3.5\u202fGB" + ], + [ + "7b-chat-v2.5-q3_K_M", + "3.8\u202fGB" + ], + [ + "7b-chat-v2.5-q3_K_L", + "4.1\u202fGB" + ], + [ + "7b-chat-v2.5-q4_0", + "4.5\u202fGB" + ], + [ + "7b-chat-v2.5-q4_1", + "4.9\u202fGB" + ], + [ + "7b-chat-v2.5-q4_K_S", + "4.5\u202fGB" + ], + [ + "7b-chat-v2.5-q4_K_M", + "4.7\u202fGB" + ], + [ + "7b-chat-v2.5-q5_0", + "5.4\u202fGB" + ], + [ + "7b-chat-v2.5-q5_1", + "5.8\u202fGB" + ], + [ + "7b-chat-v2.5-q5_K_S", + "5.4\u202fGB" + ], + [ + "7b-chat-v2.5-q5_K_M", + "5.5\u202fGB" + ], + [ + "7b-chat-v2.5-q6_K", + "6.4\u202fGB" + ], + [ + "7b-chat-v2.5-q8_0", + "8.2\u202fGB" + ], + [ + "20b-chat-v2.5-fp16", + "40\u202fGB" + ], + [ + "20b-chat-v2.5-q2_K", + "7.5\u202fGB" + ], + [ + "20b-chat-v2.5-q3_K_S", + "8.8\u202fGB" + ], + [ + "20b-chat-v2.5-q3_K_M", + "9.7\u202fGB" + ], + [ + "20b-chat-v2.5-q3_K_L", + "11\u202fGB" + ], + [ + "20b-chat-v2.5-q4_0", + "11\u202fGB" + ], + [ + "20b-chat-v2.5-q4_1", + "13\u202fGB" + ], + [ + "20b-chat-v2.5-q4_K_S", + "11\u202fGB" + ], + [ + "20b-chat-v2.5-q4_K_M", + "12\u202fGB" + ], + [ + "20b-chat-v2.5-q5_0", + "14\u202fGB" + ], + [ + "20b-chat-v2.5-q5_1", + "15\u202fGB" + ], + [ + "20b-chat-v2.5-q5_K_S", + "14\u202fGB" + ], + [ + "20b-chat-v2.5-q5_K_M", + "14\u202fGB" + ], + [ + "20b-chat-v2.5-q6_K", + "16\u202fGB" + ], + [ + "20b-chat-v2.5-q8_0", + "21\u202fGB" + ] + ], + "image": false, + "author": "Intern LM" + }, "magicoder": { "url": "https://ollama.com/library/magicoder", "description": "\ud83c\udfa9 Magicoder is a family of 7B parameter models trained on 75K synthetic instruction data using OSS-Instruct, a novel approach to enlightening LLMs with open-source code snippets.", "tags": [ [ "latest", - "3.8 GB" + "3.8\u202fGB" ], [ "7b", - "3.8 GB" + "3.8\u202fGB" ], [ "7b-s-cl", - "3.8 GB" + "3.8\u202fGB" ], [ "7b-s-cl-fp16", - "13 GB" + "13\u202fGB" ], [ "7b-s-cl-q2_K", - "2.8 GB" - ], - [ - "7b-s-cl-q3_K_L", - "3.6 GB" - ], - [ - "7b-s-cl-q3_K_M", - "3.3 GB" + "2.8\u202fGB" ], [ "7b-s-cl-q3_K_S", - "2.9 GB" + "2.9\u202fGB" + ], + [ + "7b-s-cl-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-s-cl-q3_K_L", + "3.6\u202fGB" ], [ "7b-s-cl-q4_0", - "3.8 GB" + "3.8\u202fGB" ], [ "7b-s-cl-q4_1", - "4.2 GB" - ], - [ - "7b-s-cl-q4_K_M", - "4.1 GB" + "4.2\u202fGB" ], [ "7b-s-cl-q4_K_S", - "3.9 GB" + "3.9\u202fGB" + ], + [ + "7b-s-cl-q4_K_M", + "4.1\u202fGB" ], [ "7b-s-cl-q5_0", - "4.7 GB" + "4.7\u202fGB" ], [ "7b-s-cl-q5_1", - "5.1 GB" - ], - [ - "7b-s-cl-q5_K_M", - "4.8 GB" + "5.1\u202fGB" ], [ "7b-s-cl-q5_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "7b-s-cl-q5_K_M", + "4.8\u202fGB" ], [ "7b-s-cl-q6_K", - "5.5 GB" + "5.5\u202fGB" ], [ "7b-s-cl-q8_0", - "7.2 GB" + "7.2\u202fGB" ] ], "image": false, @@ -18909,71 +20785,71 @@ "tags": [ [ "latest", - "1.6 GB" + "1.6\u202fGB" ], [ "3b", - "1.6 GB" + "1.6\u202fGB" ], [ "3b-fp16", - "5.6 GB" + "5.6\u202fGB" ], [ "3b-q2_K", - "1.2 GB" - ], - [ - "3b-q3_K_L", - "1.5 GB" - ], - [ - "3b-q3_K_M", - "1.4 GB" + "1.2\u202fGB" ], [ "3b-q3_K_S", - "1.3 GB" + "1.3\u202fGB" + ], + [ + "3b-q3_K_M", + "1.4\u202fGB" + ], + [ + "3b-q3_K_L", + "1.5\u202fGB" ], [ "3b-q4_0", - "1.6 GB" + "1.6\u202fGB" ], [ "3b-q4_1", - "1.8 GB" - ], - [ - "3b-q4_K_M", - "1.7 GB" + "1.8\u202fGB" ], [ "3b-q4_K_S", - "1.6 GB" + "1.6\u202fGB" + ], + [ + "3b-q4_K_M", + "1.7\u202fGB" ], [ "3b-q5_0", - "1.9 GB" + "1.9\u202fGB" ], [ "3b-q5_1", - "2.1 GB" - ], - [ - "3b-q5_K_M", - "2.0 GB" + "2.1\u202fGB" ], [ "3b-q5_K_S", - "1.9 GB" + "1.9\u202fGB" + ], + [ + "3b-q5_K_M", + "2.0\u202fGB" ], [ "3b-q6_K", - "2.3 GB" + "2.3\u202fGB" ], [ "3b-q8_0", - "3.0 GB" + "3.0\u202fGB" ] ], "image": false, @@ -18985,67 +20861,67 @@ "tags": [ [ "latest", - "19 GB" + "19\u202fGB" ], [ "34b", - "19 GB" + "19\u202fGB" ], [ "34b-v0.1-fp16", - "67 GB" + "67\u202fGB" ], [ "34b-v0.1-q2_K", - "14 GB" - ], - [ - "34b-v0.1-q3_K_L", - "18 GB" - ], - [ - "34b-v0.1-q3_K_M", - "16 GB" + "14\u202fGB" ], [ "34b-v0.1-q3_K_S", - "15 GB" + "15\u202fGB" + ], + [ + "34b-v0.1-q3_K_M", + "16\u202fGB" + ], + [ + "34b-v0.1-q3_K_L", + "18\u202fGB" ], [ "34b-v0.1-q4_0", - "19 GB" + "19\u202fGB" ], [ "34b-v0.1-q4_1", - "21 GB" + "21\u202fGB" ], [ "34b-v0.1-q4_K_M", - "20 GB" + "20\u202fGB" ], [ "34b-v0.1-q5_0", - "23 GB" + "23\u202fGB" ], [ "34b-v0.1-q5_1", - "25 GB" - ], - [ - "34b-v0.1-q5_K_M", - "24 GB" + "25\u202fGB" ], [ "34b-v0.1-q5_K_S", - "23 GB" + "23\u202fGB" + ], + [ + "34b-v0.1-q5_K_M", + "24\u202fGB" ], [ "34b-v0.1-q6_K", - "28 GB" + "28\u202fGB" ], [ "34b-v0.1-q8_0", - "36 GB" + "36\u202fGB" ] ], "image": false, @@ -19057,147 +20933,363 @@ "tags": [ [ "latest", - "4.1 GB" + "4.1\u202fGB" ], [ "7b", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v0.1-fp16", - "14 GB" + "14\u202fGB" ], [ "7b-v0.1-q2_K", - "3.1 GB" - ], - [ - "7b-v0.1-q3_K_L", - "3.8 GB" - ], - [ - "7b-v0.1-q3_K_M", - "3.5 GB" + "3.1\u202fGB" ], [ "7b-v0.1-q3_K_S", - "3.2 GB" + "3.2\u202fGB" + ], + [ + "7b-v0.1-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v0.1-q3_K_L", + "3.8\u202fGB" ], [ "7b-v0.1-q4_0", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v0.1-q4_1", - "4.6 GB" - ], - [ - "7b-v0.1-q4_K_M", - "4.4 GB" + "4.6\u202fGB" ], [ "7b-v0.1-q4_K_S", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-v0.1-q4_K_M", + "4.4\u202fGB" ], [ "7b-v0.1-q5_0", - "5.0 GB" + "5.0\u202fGB" ], [ "7b-v0.1-q5_1", - "5.4 GB" - ], - [ - "7b-v0.1-q5_K_M", - "5.1 GB" + "5.4\u202fGB" ], [ "7b-v0.1-q5_K_S", - "5.0 GB" + "5.0\u202fGB" + ], + [ + "7b-v0.1-q5_K_M", + "5.1\u202fGB" ], [ "7b-v0.1-q6_K", - "5.9 GB" + "5.9\u202fGB" ], [ "7b-v0.1-q8_0", - "7.7 GB" + "7.7\u202fGB" ] ], "image": false, "author": "Amazon Web Services" }, + "llama3-groq-tool-use": { + "url": "https://ollama.com/library/llama3-groq-tool-use", + "description": "A series of models from Groq that represent a significant advancement in open-source AI capabilities for tool use/function calling.", + "tags": [ + [ + "latest", + "4.7\u202fGB" + ], + [ + "8b", + "4.7\u202fGB" + ], + [ + "70b", + "40\u202fGB" + ], + [ + "8b-fp16", + "16\u202fGB" + ], + [ + "8b-q2_K", + "3.2\u202fGB" + ], + [ + "8b-q3_K_S", + "3.7\u202fGB" + ], + [ + "8b-q3_K_M", + "4.0\u202fGB" + ], + [ + "8b-q3_K_L", + "4.3\u202fGB" + ], + [ + "8b-q4_0", + "4.7\u202fGB" + ], + [ + "8b-q4_1", + "5.1\u202fGB" + ], + [ + "8b-q4_K_S", + "4.7\u202fGB" + ], + [ + "8b-q4_K_M", + "4.9\u202fGB" + ], + [ + "8b-q5_0", + "5.6\u202fGB" + ], + [ + "8b-q5_1", + "6.1\u202fGB" + ], + [ + "8b-q5_K_S", + "5.6\u202fGB" + ], + [ + "8b-q5_K_M", + "5.7\u202fGB" + ], + [ + "8b-q6_K", + "6.6\u202fGB" + ], + [ + "8b-q8_0", + "8.5\u202fGB" + ], + [ + "70b-fp16", + "141\u202fGB" + ], + [ + "70b-q2_K", + "26\u202fGB" + ], + [ + "70b-q3_K_S", + "31\u202fGB" + ], + [ + "70b-q3_K_M", + "34\u202fGB" + ], + [ + "70b-q3_K_L", + "37\u202fGB" + ], + [ + "70b-q4_0", + "40\u202fGB" + ], + [ + "70b-q4_1", + "44\u202fGB" + ], + [ + "70b-q4_K_S", + "40\u202fGB" + ], + [ + "70b-q4_K_M", + "43\u202fGB" + ], + [ + "70b-q5_0", + "49\u202fGB" + ], + [ + "70b-q5_1", + "53\u202fGB" + ], + [ + "70b-q5_K_S", + "49\u202fGB" + ], + [ + "70b-q5_K_M", + "50\u202fGB" + ], + [ + "70b-q6_K", + "58\u202fGB" + ], + [ + "70b-q8_0", + "75\u202fGB" + ] + ], + "image": false, + "author": "Groq" + }, + "falcon2": { + "url": "https://ollama.com/library/falcon2", + "description": "Falcon2 is an 11B parameters causal decoder-only model built by TII and trained over 5T tokens.", + "tags": [ + [ + "latest", + "6.4\u202fGB" + ], + [ + "11b", + "6.4\u202fGB" + ], + [ + "11b-fp16", + "22\u202fGB" + ], + [ + "11b-q2_K", + "4.3\u202fGB" + ], + [ + "11b-q3_K_S", + "4.9\u202fGB" + ], + [ + "11b-q3_K_M", + "5.4\u202fGB" + ], + [ + "11b-q3_K_L", + "5.8\u202fGB" + ], + [ + "11b-q4_0", + "6.4\u202fGB" + ], + [ + "11b-q4_1", + "7.1\u202fGB" + ], + [ + "11b-q4_K_S", + "6.4\u202fGB" + ], + [ + "11b-q4_K_M", + "6.8\u202fGB" + ], + [ + "11b-q5_0", + "7.7\u202fGB" + ], + [ + "11b-q5_1", + "8.4\u202fGB" + ], + [ + "11b-q5_K_S", + "7.7\u202fGB" + ], + [ + "11b-q5_K_M", + "8.2\u202fGB" + ], + [ + "11b-q6_K", + "9.2\u202fGB" + ], + [ + "11b-q8_0", + "12\u202fGB" + ] + ], + "image": false, + "author": "Technology Innovation Institute" + }, "wizard-vicuna": { "url": "https://ollama.com/library/wizard-vicuna", "description": "Wizard Vicuna is a 13B parameter model based on Llama 2 trained by MelodysDreamj.", "tags": [ [ "latest", - "7.4 GB" + "7.4\u202fGB" ], [ "13b", - "7.4 GB" + "7.4\u202fGB" ], [ "13b-fp16", - "26 GB" + "26\u202fGB" ], [ "13b-q2_K", - "5.4 GB" - ], - [ - "13b-q3_K_L", - "6.9 GB" - ], - [ - "13b-q3_K_M", - "6.3 GB" + "5.4\u202fGB" ], [ "13b-q3_K_S", - "5.7 GB" + "5.7\u202fGB" + ], + [ + "13b-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-q3_K_L", + "6.9\u202fGB" ], [ "13b-q4_0", - "7.4 GB" + "7.4\u202fGB" ], [ "13b-q4_1", - "8.2 GB" - ], - [ - "13b-q4_K_M", - "7.9 GB" + "8.2\u202fGB" ], [ "13b-q4_K_S", - "7.4 GB" + "7.4\u202fGB" + ], + [ + "13b-q4_K_M", + "7.9\u202fGB" ], [ "13b-q5_0", - "9.0 GB" + "9.0\u202fGB" ], [ "13b-q5_1", - "9.8 GB" - ], - [ - "13b-q5_K_M", - "9.2 GB" + "9.8\u202fGB" ], [ "13b-q5_K_S", - "9.0 GB" + "9.0\u202fGB" + ], + [ + "13b-q5_K_M", + "9.2\u202fGB" ], [ "13b-q6_K", - "11 GB" + "11\u202fGB" ], [ "13b-q8_0", - "14 GB" + "14\u202fGB" ] ], "image": false, @@ -19209,71 +21301,71 @@ "tags": [ [ "latest", - "3.8 GB" + "3.8\u202fGB" ], [ "7b", - "3.8 GB" + "3.8\u202fGB" ], [ "7b-fp16", - "13 GB" + "13\u202fGB" ], [ "7b-q2_K", - "2.5 GB" - ], - [ - "7b-q3_K_L", - "3.6 GB" - ], - [ - "7b-q3_K_M", - "3.3 GB" + "2.5\u202fGB" ], [ "7b-q3_K_S", - "2.9 GB" + "2.9\u202fGB" + ], + [ + "7b-q3_K_M", + "3.3\u202fGB" + ], + [ + "7b-q3_K_L", + "3.6\u202fGB" ], [ "7b-q4_0", - "3.8 GB" + "3.8\u202fGB" ], [ "7b-q4_1", - "4.2 GB" - ], - [ - "7b-q4_K_M", - "4.1 GB" + "4.2\u202fGB" ], [ "7b-q4_K_S", - "3.9 GB" + "3.9\u202fGB" + ], + [ + "7b-q4_K_M", + "4.1\u202fGB" ], [ "7b-q5_0", - "4.7 GB" + "4.7\u202fGB" ], [ "7b-q5_1", - "5.1 GB" - ], - [ - "7b-q5_K_M", - "4.8 GB" + "5.1\u202fGB" ], [ "7b-q5_K_S", - "4.7 GB" + "4.7\u202fGB" + ], + [ + "7b-q5_K_M", + "4.8\u202fGB" ], [ "7b-q6_K", - "5.5 GB" + "5.5\u202fGB" ], [ "7b-q8_0", - "7.2 GB" + "7.2\u202fGB" ] ], "image": false, @@ -19285,463 +21377,387 @@ "tags": [ [ "latest", - "68 GB" + "68\u202fGB" ], [ "120b", - "68 GB" + "68\u202fGB" ], [ "v2.2", - "68 GB" + "68\u202fGB" ], [ "120b-v2.2", - "68 GB" + "68\u202fGB" ], [ "120b-v2.2-fp16", - "241 GB" + "241\u202fGB" ], [ "120b-v2.2-q2_K", - "51 GB" - ], - [ - "120b-v2.2-q3_K_L", - "63 GB" - ], - [ - "120b-v2.2-q3_K_M", - "58 GB" + "51\u202fGB" ], [ "120b-v2.2-q3_K_S", - "52 GB" + "52\u202fGB" + ], + [ + "120b-v2.2-q3_K_M", + "58\u202fGB" + ], + [ + "120b-v2.2-q3_K_L", + "63\u202fGB" ], [ "120b-v2.2-q4_0", - "68 GB" + "68\u202fGB" ], [ "120b-v2.2-q4_1", - "75 GB" - ], - [ - "120b-v2.2-q4_K_M", - "72 GB" + "75\u202fGB" ], [ "120b-v2.2-q4_K_S", - "68 GB" + "68\u202fGB" + ], + [ + "120b-v2.2-q4_K_M", + "72\u202fGB" ], [ "120b-v2.2-q5_0", - "83 GB" + "83\u202fGB" ], [ "120b-v2.2-q5_1", - "90 GB" - ], - [ - "120b-v2.2-q5_K_M", - "85 GB" + "90\u202fGB" ], [ "120b-v2.2-q5_K_S", - "83 GB" + "83\u202fGB" + ], + [ + "120b-v2.2-q5_K_M", + "85\u202fGB" ], [ "120b-v2.2-q6_K", - "99 GB" + "99\u202fGB" ], [ "120b-v2.2-q8_0", - "128 GB" + "128\u202fGB" ] ], "image": false, "author": "Cognitive Computations" }, - "goliath": { - "url": "https://ollama.com/library/goliath", - "description": "A language model created by combining two fine-tuned Llama 2 70B models into one.", - "tags": [ - [ - "latest", - "66 GB" - ], - [ - "120b-q4_0", - "66 GB" - ], - [ - "120b-fp16", - "236 GB" - ], - [ - "120b-q2_K", - "50 GB" - ], - [ - "120b-q3_K_L", - "62 GB" - ], - [ - "120b-q3_K_M", - "56 GB" - ], - [ - "120b-q3_K_S", - "51 GB" - ], - [ - "120b-q4_1", - "74 GB" - ], - [ - "120b-q4_K_M", - "71 GB" - ], - [ - "120b-q4_K_S", - "66 GB" - ], - [ - "120b-q5_0", - "81 GB" - ], - [ - "120b-q5_1", - "88 GB" - ], - [ - "120b-q5_K_M", - "83 GB" - ], - [ - "120b-q5_K_S", - "81 GB" - ], - [ - "120b-q6_K", - "97 GB" - ], - [ - "120b-q8_0", - "125 GB" - ] - ], - "image": false, - "author": "Alpindale" - }, "notux": { "url": "https://ollama.com/library/notux", "description": "A top-performing mixture of experts model, fine-tuned with high-quality data.", "tags": [ [ "latest", - "26 GB" + "26\u202fGB" ], [ "8x7b", - "26 GB" + "26\u202fGB" ], [ "8x7b-v1", - "26 GB" + "26\u202fGB" ], [ "8x7b-v1-fp16", - "93 GB" + "93\u202fGB" ], [ "8x7b-v1-q2_K", - "16 GB" - ], - [ - "8x7b-v1-q3_K_L", - "20 GB" - ], - [ - "8x7b-v1-q3_K_M", - "20 GB" + "16\u202fGB" ], [ "8x7b-v1-q3_K_S", - "20 GB" + "20\u202fGB" + ], + [ + "8x7b-v1-q3_K_M", + "20\u202fGB" + ], + [ + "8x7b-v1-q3_K_L", + "20\u202fGB" ], [ "8x7b-v1-q4_0", - "26 GB" + "26\u202fGB" ], [ "8x7b-v1-q4_1", - "29 GB" - ], - [ - "8x7b-v1-q4_K_M", - "26 GB" + "29\u202fGB" ], [ "8x7b-v1-q4_K_S", - "26 GB" + "26\u202fGB" + ], + [ + "8x7b-v1-q4_K_M", + "26\u202fGB" ], [ "8x7b-v1-q5_0", - "32 GB" + "32\u202fGB" ], [ "8x7b-v1-q5_1", - "35 GB" - ], - [ - "8x7b-v1-q5_K_M", - "32 GB" + "35\u202fGB" ], [ "8x7b-v1-q5_K_S", - "32 GB" + "32\u202fGB" + ], + [ + "8x7b-v1-q5_K_M", + "32\u202fGB" ], [ "8x7b-v1-q6_K", - "38 GB" + "38\u202fGB" ], [ "8x7b-v1-q8_0", - "50 GB" + "50\u202fGB" ] ], "image": false, "author": "Argilla" }, + "goliath": { + "url": "https://ollama.com/library/goliath", + "description": "A language model created by combining two fine-tuned Llama 2 70B models into one.", + "tags": [ + [ + "latest", + "66\u202fGB" + ], + [ + "120b-q4_0", + "66\u202fGB" + ], + [ + "120b-fp16", + "236\u202fGB" + ], + [ + "120b-q2_K", + "50\u202fGB" + ], + [ + "120b-q3_K_S", + "51\u202fGB" + ], + [ + "120b-q3_K_M", + "56\u202fGB" + ], + [ + "120b-q3_K_L", + "62\u202fGB" + ], + [ + "120b-q4_1", + "74\u202fGB" + ], + [ + "120b-q4_K_S", + "66\u202fGB" + ], + [ + "120b-q4_K_M", + "71\u202fGB" + ], + [ + "120b-q5_0", + "81\u202fGB" + ], + [ + "120b-q5_1", + "88\u202fGB" + ], + [ + "120b-q5_K_S", + "81\u202fGB" + ], + [ + "120b-q5_K_M", + "83\u202fGB" + ], + [ + "120b-q6_K", + "97\u202fGB" + ], + [ + "120b-q8_0", + "125\u202fGB" + ] + ], + "image": false, + "author": "Alpindale" + }, "open-orca-platypus2": { "url": "https://ollama.com/library/open-orca-platypus2", "description": "Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. Designed for chat and code generation.", "tags": [ [ "latest", - "7.4 GB" + "7.4\u202fGB" ], [ "13b", - "7.4 GB" + "7.4\u202fGB" ], [ "13b-fp16", - "26 GB" + "26\u202fGB" ], [ "13b-q2_K", - "5.4 GB" - ], - [ - "13b-q3_K_L", - "6.9 GB" - ], - [ - "13b-q3_K_M", - "6.3 GB" + "5.4\u202fGB" ], [ "13b-q3_K_S", - "5.7 GB" + "5.7\u202fGB" + ], + [ + "13b-q3_K_M", + "6.3\u202fGB" + ], + [ + "13b-q3_K_L", + "6.9\u202fGB" ], [ "13b-q4_0", - "7.4 GB" + "7.4\u202fGB" ], [ "13b-q4_1", - "8.2 GB" - ], - [ - "13b-q4_K_M", - "7.9 GB" + "8.2\u202fGB" ], [ "13b-q4_K_S", - "7.4 GB" + "7.4\u202fGB" + ], + [ + "13b-q4_K_M", + "7.9\u202fGB" ], [ "13b-q5_0", - "9.0 GB" + "9.0\u202fGB" ], [ "13b-q5_1", - "9.8 GB" - ], - [ - "13b-q5_K_M", - "9.2 GB" + "9.8\u202fGB" ], [ "13b-q5_K_S", - "9.0 GB" + "9.0\u202fGB" + ], + [ + "13b-q5_K_M", + "9.2\u202fGB" ], [ "13b-q6_K", - "11 GB" + "11\u202fGB" ], [ "13b-q8_0", - "14 GB" + "14\u202fGB" ] ], "image": false, "author": "Open Orca" }, - "falcon2": { - "url": "https://ollama.com/library/falcon2", - "description": "Falcon2 is an 11B parameters causal decoder-only model built by TII and trained over 5T tokens.", - "tags": [ - [ - "latest", - "6.4 GB" - ], - [ - "11b", - "6.4 GB" - ], - [ - "11b-fp16", - "22 GB" - ], - [ - "11b-q2_K", - "4.3 GB" - ], - [ - "11b-q3_K_L", - "5.8 GB" - ], - [ - "11b-q3_K_M", - "5.4 GB" - ], - [ - "11b-q3_K_S", - "4.9 GB" - ], - [ - "11b-q4_0", - "6.4 GB" - ], - [ - "11b-q4_1", - "7.1 GB" - ], - [ - "11b-q4_K_M", - "6.8 GB" - ], - [ - "11b-q4_K_S", - "6.4 GB" - ], - [ - "11b-q5_0", - "7.7 GB" - ], - [ - "11b-q5_1", - "8.4 GB" - ], - [ - "11b-q5_K_M", - "8.2 GB" - ], - [ - "11b-q5_K_S", - "7.7 GB" - ], - [ - "11b-q6_K", - "9.2 GB" - ], - [ - "11b-q8_0", - "12 GB" - ] - ], - "image": false, - "author": "Technology Innovation Institute" - }, "notus": { "url": "https://ollama.com/library/notus", "description": "A 7B chat model fine-tuned with high-quality data and based on Zephyr.", "tags": [ [ "latest", - "4.1 GB" + "4.1\u202fGB" ], [ "7b", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v1", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v1-fp16", - "14 GB" + "14\u202fGB" ], [ "7b-v1-q2_K", - "3.1 GB" - ], - [ - "7b-v1-q3_K_L", - "3.8 GB" - ], - [ - "7b-v1-q3_K_M", - "3.5 GB" + "3.1\u202fGB" ], [ "7b-v1-q3_K_S", - "3.2 GB" + "3.2\u202fGB" + ], + [ + "7b-v1-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v1-q3_K_L", + "3.8\u202fGB" ], [ "7b-v1-q4_0", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v1-q4_1", - "4.6 GB" - ], - [ - "7b-v1-q4_K_M", - "4.4 GB" + "4.6\u202fGB" ], [ "7b-v1-q4_K_S", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-v1-q4_K_M", + "4.4\u202fGB" ], [ "7b-v1-q5_0", - "5.0 GB" + "5.0\u202fGB" ], [ "7b-v1-q5_1", - "5.4 GB" - ], - [ - "7b-v1-q5_K_M", - "5.1 GB" + "5.4\u202fGB" ], [ "7b-v1-q5_K_S", - "5.0 GB" + "5.0\u202fGB" + ], + [ + "7b-v1-q5_K_M", + "5.1\u202fGB" ], [ "7b-v1-q6_K", - "5.9 GB" + "5.9\u202fGB" ], [ "7b-v1-q8_0", - "7.7 GB" + "7.7\u202fGB" ] ], "image": false, @@ -19753,435 +21769,239 @@ "tags": [ [ "latest", - "74 GB" + "74\u202fGB" ], [ "132b", - "74 GB" + "74\u202fGB" ], [ "instruct", - "74 GB" + "74\u202fGB" ], [ "132b-instruct-fp16", - "263 GB" + "263\u202fGB" ], [ "132b-instruct-q2_K", - "48 GB" + "48\u202fGB" ], [ "132b-instruct-q4_0", - "74 GB" + "74\u202fGB" ], [ "132b-instruct-q8_0", - "140 GB" + "140\u202fGB" ] ], "image": false, "author": "Databricks" }, - "internlm2": { - "url": "https://ollama.com/library/internlm2", - "description": "InternLM2.5 is a 7B parameter model tailored for practical scenarios with outstanding reasoning capability.", - "tags": [ - [ - "latest", - "4.5 GB" - ], - [ - "7b", - "4.5 GB" - ], - [ - "7b-chat-v2.5-fp16", - "15 GB" - ], - [ - "7b-chat-v2.5-q2_K", - "3.0 GB" - ], - [ - "7b-chat-v2.5-q3_K_L", - "4.1 GB" - ], - [ - "7b-chat-v2.5-q3_K_M", - "3.8 GB" - ], - [ - "7b-chat-v2.5-q3_K_S", - "3.5 GB" - ], - [ - "7b-chat-v2.5-q4_0", - "4.5 GB" - ], - [ - "7b-chat-v2.5-q4_1", - "4.9 GB" - ], - [ - "7b-chat-v2.5-q4_K_M", - "4.7 GB" - ], - [ - "7b-chat-v2.5-q4_K_S", - "4.5 GB" - ], - [ - "7b-chat-v2.5-q5_0", - "5.4 GB" - ], - [ - "7b-chat-v2.5-q5_1", - "5.8 GB" - ], - [ - "7b-chat-v2.5-q5_K_M", - "5.5 GB" - ], - [ - "7b-chat-v2.5-q5_K_S", - "5.4 GB" - ], - [ - "7b-chat-v2.5-q6_K", - "6.4 GB" - ], - [ - "7b-chat-v2.5-q8_0", - "8.2 GB" - ] - ], - "image": false, - "author": "Intern LM" - }, - "alfred": { - "url": "https://ollama.com/library/alfred", - "description": "A robust conversational model designed to be used for both chat and instruct use cases.", - "tags": [ - [ - "latest", - "24 GB" - ], - [ - "40b", - "24 GB" - ], - [ - "40b-1023-q4_0", - "24 GB" - ], - [ - "40b-1023-q4_1", - "26 GB" - ], - [ - "40b-1023-q5_0", - "29 GB" - ], - [ - "40b-1023-q5_1", - "32 GB" - ], - [ - "40b-1023-q8_0", - "44 GB" - ] - ], - "image": false, - "author": "LightOn AI" - }, - "llama3-groq-tool-use": { - "url": "https://ollama.com/library/llama3-groq-tool-use", - "description": "A series of models from Groq that represent a significant advancement in open-source AI capabilities for tool use/function calling.", - "tags": [ - [ - "latest", - "4.7 GB" - ], - [ - "70b", - "40 GB" - ], - [ - "8b", - "4.7 GB" - ], - [ - "70b-fp16", - "141 GB" - ], - [ - "70b-q2_K", - "26 GB" - ], - [ - "70b-q3_K_L", - "37 GB" - ], - [ - "70b-q3_K_M", - "34 GB" - ], - [ - "70b-q3_K_S", - "31 GB" - ], - [ - "70b-q4_0", - "40 GB" - ], - [ - "70b-q4_1", - "44 GB" - ], - [ - "70b-q4_K_M", - "43 GB" - ], - [ - "70b-q4_K_S", - "40 GB" - ], - [ - "70b-q5_0", - "49 GB" - ], - [ - "70b-q5_1", - "53 GB" - ], - [ - "70b-q5_K_M", - "50 GB" - ], - [ - "70b-q5_K_S", - "49 GB" - ], - [ - "70b-q6_K", - "58 GB" - ], - [ - "70b-q8_0", - "75 GB" - ], - [ - "8b-fp16", - "16 GB" - ], - [ - "8b-q2_K", - "3.2 GB" - ], - [ - "8b-q3_K_L", - "4.3 GB" - ], - [ - "8b-q3_K_M", - "4.0 GB" - ], - [ - "8b-q3_K_S", - "3.7 GB" - ], - [ - "8b-q4_0", - "4.7 GB" - ], - [ - "8b-q4_1", - "5.1 GB" - ], - [ - "8b-q4_K_M", - "4.9 GB" - ], - [ - "8b-q4_K_S", - "4.7 GB" - ], - [ - "8b-q5_0", - "5.6 GB" - ], - [ - "8b-q5_1", - "6.1 GB" - ], - [ - "8b-q5_K_M", - "5.7 GB" - ], - [ - "8b-q5_K_S", - "5.6 GB" - ], - [ - "8b-q6_K", - "6.6 GB" - ], - [ - "8b-q8_0", - "8.5 GB" - ] - ], - "image": false, - "author": "Groq" - }, "mathstral": { "url": "https://ollama.com/library/mathstral", "description": "Math\u03a3tral: a 7B model designed for math reasoning and scientific discovery by Mistral AI.", "tags": [ [ "latest", - "4.1 GB" + "4.1\u202fGB" ], [ "7b", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v0.1-fp16", - "14 GB" + "14\u202fGB" ], [ "7b-v0.1-q2_K", - "2.7 GB" - ], - [ - "7b-v0.1-q3_K_L", - "3.8 GB" - ], - [ - "7b-v0.1-q3_K_M", - "3.5 GB" + "2.7\u202fGB" ], [ "7b-v0.1-q3_K_S", - "3.2 GB" + "3.2\u202fGB" + ], + [ + "7b-v0.1-q3_K_M", + "3.5\u202fGB" + ], + [ + "7b-v0.1-q3_K_L", + "3.8\u202fGB" ], [ "7b-v0.1-q4_0", - "4.1 GB" + "4.1\u202fGB" ], [ "7b-v0.1-q4_1", - "4.6 GB" - ], - [ - "7b-v0.1-q4_K_M", - "4.4 GB" + "4.6\u202fGB" ], [ "7b-v0.1-q4_K_S", - "4.1 GB" + "4.1\u202fGB" + ], + [ + "7b-v0.1-q4_K_M", + "4.4\u202fGB" ], [ "7b-v0.1-q5_0", - "5.0 GB" + "5.0\u202fGB" ], [ "7b-v0.1-q5_1", - "5.4 GB" - ], - [ - "7b-v0.1-q5_K_M", - "5.1 GB" + "5.4\u202fGB" ], [ "7b-v0.1-q5_K_S", - "5.0 GB" + "5.0\u202fGB" + ], + [ + "7b-v0.1-q5_K_M", + "5.1\u202fGB" ], [ "7b-v0.1-q6_K", - "5.9 GB" + "5.9\u202fGB" ], [ "7b-v0.1-q8_0", - "7.7 GB" + "7.7\u202fGB" ] ], "image": false, "author": "Mistral AI" }, + "bge-m3": { + "url": "https://ollama.com/library/bge-m3", + "description": "BGE-M3 is a new model from BAAI distinguished for its versatility in Multi-Functionality, Multi-Linguality, and Multi-Granularity.", + "tags": [ + [ + "latest", + "1.2\u202fGB" + ], + [ + "567m", + "1.2\u202fGB" + ], + [ + "567m-fp16", + "1.2\u202fGB" + ] + ], + "image": false, + "author": "BGE Team" + }, + "alfred": { + "url": "https://ollama.com/library/alfred", + "description": "A robust conversational model designed to be used for both chat and instruct use cases.", + "tags": [ + [ + "latest", + "24\u202fGB" + ], + [ + "40b", + "24\u202fGB" + ], + [ + "40b-1023-q4_0", + "24\u202fGB" + ], + [ + "40b-1023-q4_1", + "26\u202fGB" + ], + [ + "40b-1023-q5_0", + "29\u202fGB" + ], + [ + "40b-1023-q5_1", + "32\u202fGB" + ], + [ + "40b-1023-q8_0", + "44\u202fGB" + ] + ], + "image": false, + "author": "LightOn AI" + }, "firefunction-v2": { "url": "https://ollama.com/library/firefunction-v2", "description": "An open weights function calling model based on Llama 3, competitive with GPT-4o function calling capabilities.", "tags": [ [ "latest", - "40 GB" + "40\u202fGB" ], [ "70b", - "40 GB" + "40\u202fGB" ], [ "70b-fp16", - "141 GB" + "141\u202fGB" ], [ "70b-q2_K", - "26 GB" - ], - [ - "70b-q3_K_L", - "37 GB" - ], - [ - "70b-q3_K_M", - "34 GB" + "26\u202fGB" ], [ "70b-q3_K_S", - "31 GB" + "31\u202fGB" + ], + [ + "70b-q3_K_M", + "34\u202fGB" + ], + [ + "70b-q3_K_L", + "37\u202fGB" ], [ "70b-q4_0", - "40 GB" + "40\u202fGB" ], [ "70b-q4_1", - "44 GB" - ], - [ - "70b-q4_K_M", - "43 GB" + "44\u202fGB" ], [ "70b-q4_K_S", - "40 GB" + "40\u202fGB" + ], + [ + "70b-q4_K_M", + "43\u202fGB" ], [ "70b-q5_0", - "49 GB" + "49\u202fGB" ], [ "70b-q5_1", - "53 GB" - ], - [ - "70b-q5_K_M", - "50 GB" + "53\u202fGB" ], [ "70b-q5_K_S", - "49 GB" + "49\u202fGB" + ], + [ + "70b-q5_K_M", + "50\u202fGB" ], [ "70b-q6_K", - "58 GB" + "58\u202fGB" ], [ "70b-q8_0", - "75 GB" + "75\u202fGB" ] ], "image": false, @@ -20193,74 +22013,114 @@ "tags": [ [ "latest", - "2.2 GB" + "2.2\u202fGB" ], [ "3.8b", - "2.2 GB" + "2.2\u202fGB" ], [ "3.8b-fp16", - "7.6 GB" + "7.6\u202fGB" ], [ "3.8b-q2_K", - "1.4 GB" - ], - [ - "3.8b-q3_K_L", - "2.1 GB" - ], - [ - "3.8b-q3_K_M", - "2.0 GB" + "1.4\u202fGB" ], [ "3.8b-q3_K_S", - "1.7 GB" + "1.7\u202fGB" + ], + [ + "3.8b-q3_K_M", + "2.0\u202fGB" + ], + [ + "3.8b-q3_K_L", + "2.1\u202fGB" ], [ "3.8b-q4_0", - "2.2 GB" + "2.2\u202fGB" ], [ "3.8b-q4_1", - "2.4 GB" - ], - [ - "3.8b-q4_K_M", - "2.4 GB" + "2.4\u202fGB" ], [ "3.8b-q4_K_S", - "2.2 GB" + "2.2\u202fGB" + ], + [ + "3.8b-q4_K_M", + "2.4\u202fGB" ], [ "3.8b-q5_0", - "2.6 GB" + "2.6\u202fGB" ], [ "3.8b-q5_1", - "2.9 GB" - ], - [ - "3.8b-q5_K_M", - "2.8 GB" + "2.9\u202fGB" ], [ "3.8b-q5_K_S", - "2.6 GB" + "2.6\u202fGB" + ], + [ + "3.8b-q5_K_M", + "2.8\u202fGB" ], [ "3.8b-q6_K", - "3.1 GB" + "3.1\u202fGB" ], [ "3.8b-q8_0", - "4.1 GB" + "4.1\u202fGB" ] ], "image": false, "author": "Numind" + }, + "bge-large": { + "url": "https://ollama.com/library/bge-large", + "description": "Embedding model from BAAI mapping texts to vectors.", + "tags": [ + [ + "latest", + "671\u202fMB" + ], + [ + "335m", + "671\u202fMB" + ], + [ + "335m-en-v1.5-fp16", + "671\u202fMB" + ] + ], + "image": false, + "author": "BGE Team" + }, + "paraphrase-multilingual": { + "url": "https://ollama.com/library/paraphrase-multilingual", + "description": "Sentence-transformers model that can be used for tasks like clustering or semantic search.", + "tags": [ + [ + "latest", + "563\u202fMB" + ], + [ + "278m", + "563\u202fMB" + ], + [ + "278m-mpnet-base-v2-fp16", + "563\u202fMB" + ] + ], + "image": false, + "author": "Paraphrase Team" } -} +} \ No newline at end of file diff --git a/src/available_models_descriptions.py b/src/available_models_descriptions.py index 353c011..504f754 100644 --- a/src/available_models_descriptions.py +++ b/src/available_models_descriptions.py @@ -1,6 +1,6 @@ descriptions = { 'llama3.1': _("Llama 3.1 is a new state-of-the-art model from Meta available in 8B, 70B and 405B parameter sizes."), - 'gemma2': _("Google Gemma 2 is now available in 2 sizes, 9B and 27B."), + 'gemma2': _("Google Gemma 2 is a high-performing and efficient model by now available in three sizes: 2B, 9B, and 27B."), 'mistral-nemo': _("A state-of-the-art 12B model with 128k context length, built by Mistral AI in collaboration with NVIDIA."), 'mistral-large': _("Mistral Large 2 is Mistral's new flagship model that is significantly more capable in code generation, mathematics, and reasoning with 128k context window and support for dozens of languages."), 'qwen2': _("Qwen2 is a new series of large language models from Alibaba group"), @@ -17,89 +17,96 @@ descriptions = { 'qwen': _("Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from 0.5B to 110B parameters"), 'llama2': _("Llama 2 is a collection of foundation language models ranging from 7B to 70B parameters."), 'codellama': _("A large language model that can use text prompts to generate and discuss code."), - 'dolphin-mixtral': _("Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of experts models that excels at coding tasks. Created by Eric Hartford."), 'nomic-embed-text': _("A high-performing open embedding model with a large token context window."), - 'llama2-uncensored': _("Uncensored Llama 2 model by George Sung and Jarrad Hope."), + 'dolphin-mixtral': _("Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of experts models that excels at coding tasks. Created by Eric Hartford."), 'phi': _("Phi-2: a 2.7B language model by Microsoft Research that demonstrates outstanding reasoning and language understanding capabilities."), + 'llama2-uncensored': _("Uncensored Llama 2 model by George Sung and Jarrad Hope."), 'deepseek-coder': _("DeepSeek Coder is a capable coding model trained on two trillion code and natural language tokens."), + 'mxbai-embed-large': _("State-of-the-art large embedding model from mixedbread.ai"), + 'zephyr': _("Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models that are trained to act as helpful assistants."), 'dolphin-mistral': _("The uncensored Dolphin model based on Mistral that excels at coding tasks. Updated to version 2.8."), + 'starcoder2': _("StarCoder2 is the next generation of transparently trained open code LLMs that comes in three sizes: 3B, 7B and 15B parameters."), 'orca-mini': _("A general-purpose model ranging from 3 billion parameters to 70 billion, suitable for entry-level hardware."), 'dolphin-llama3': _("Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on Llama 3 that has a variety of instruction, conversational, and coding skills."), - 'mxbai-embed-large': _("State-of-the-art large embedding model from mixedbread.ai"), - 'starcoder2': _("StarCoder2 is the next generation of transparently trained open code LLMs that comes in three sizes: 3B, 7B and 15B parameters."), - 'mistral-openorca': _("Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the Mistral 7B model using the OpenOrca dataset."), 'yi': _("Yi 1.5 is a high-performing, bilingual language model."), - 'zephyr': _("Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models that are trained to act as helpful assistants."), - 'llama2-chinese': _("Llama 2 based model fine tuned to improve Chinese dialogue ability."), + 'mistral-openorca': _("Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the Mistral 7B model using the OpenOrca dataset."), 'llava-llama3': _("A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several benchmarks."), - 'vicuna': _("General use chat model based on Llama and Llama 2 with 2K to 16K context sizes."), - 'nous-hermes2': _("The powerful family of models by Nous Research that excels at scientific discussion and coding tasks."), - 'tinyllama': _("The TinyLlama project is an open endeavor to train a compact 1.1B Llama model on 3 trillion tokens."), - 'wizard-vicuna-uncensored': _("Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on Llama 2 uncensored by Eric Hartford."), - 'codestral': _("Codestral is Mistral AI’s first-ever code model designed for code generation tasks."), 'starcoder': _("StarCoder is a code generation model trained on 80+ programming languages."), - 'wizardlm2': _("State of the art large language model from Microsoft AI with improved performance on complex chat, multilingual, reasoning and agent use cases."), + 'llama2-chinese': _("Llama 2 based model fine tuned to improve Chinese dialogue ability."), + 'vicuna': _("General use chat model based on Llama and Llama 2 with 2K to 16K context sizes."), + 'tinyllama': _("The TinyLlama project is an open endeavor to train a compact 1.1B Llama model on 3 trillion tokens."), + 'codestral': _("Codestral is Mistral AI’s first-ever code model designed for code generation tasks."), + 'wizard-vicuna-uncensored': _("Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on Llama 2 uncensored by Eric Hartford."), + 'nous-hermes2': _("The powerful family of models by Nous Research that excels at scientific discussion and coding tasks."), 'openchat': _("A family of open-source models trained on a wide variety of data, surpassing ChatGPT on various benchmarks. Updated to version 3.5-0106."), 'aya': _("Aya 23, released by Cohere, is a new family of state-of-the-art, multilingual models that support 23 languages."), + 'wizardlm2': _("State of the art large language model from Microsoft AI with improved performance on complex chat, multilingual, reasoning and agent use cases."), 'tinydolphin': _("An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset by Eric Hartford and based on TinyLlama."), - 'openhermes': _("OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully open datasets."), + 'granite-code': _("A family of open foundation models by IBM for Code Intelligence"), 'wizardcoder': _("State-of-the-art code generation model"), 'stable-code': _("Stable Code 3B is a coding model with instruct and code completion variants on par with models such as Code Llama 7B that are 2.5x larger."), + 'openhermes': _("OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully open datasets."), + 'all-minilm': _("Embedding models on very large sentence level datasets."), 'codeqwen': _("CodeQwen1.5 is a large language model pretrained on a large amount of code data."), + 'stablelm2': _("Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model trained on multilingual data in English, Spanish, German, Italian, French, Portuguese, and Dutch."), 'wizard-math': _("Model focused on math and logic problems"), 'neural-chat': _("A fine-tuned model based on Mistral with good coverage of domain and language."), - 'stablelm2': _("Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model trained on multilingual data in English, Spanish, German, Italian, French, Portuguese, and Dutch."), - 'granite-code': _("A family of open foundation models by IBM for Code Intelligence"), - 'all-minilm': _("Embedding models on very large sentence level datasets."), - 'phind-codellama': _("Code generation model based on Code Llama."), - 'dolphincoder': _("A 7B and 15B uncensored variant of the Dolphin model family that excels at coding, based on StarCoder2."), - 'nous-hermes': _("General use models based on Llama and Llama 2 from Nous Research."), - 'sqlcoder': _("SQLCoder is a code completion model fined-tuned on StarCoder for SQL generation tasks"), 'llama3-gradient': _("This model extends LLama-3 8B's context length from 8k to over 1m tokens."), - 'starling-lm': _("Starling is a large language model trained by reinforcement learning from AI feedback focused on improving chatbot helpfulness."), - 'yarn-llama2': _("An extension of Llama 2 that supports a context of up to 128k tokens."), + 'phind-codellama': _("Code generation model based on Code Llama."), + 'nous-hermes': _("General use models based on Llama and Llama 2 from Nous Research."), + 'dolphincoder': _("A 7B and 15B uncensored variant of the Dolphin model family that excels at coding, based on StarCoder2."), + 'sqlcoder': _("SQLCoder is a code completion model fined-tuned on StarCoder for SQL generation tasks"), 'xwinlm': _("Conversational model based on Llama 2 that performs competitively on various benchmarks."), 'deepseek-llm': _("An advanced language model crafted with 2 trillion bilingual tokens."), + 'yarn-llama2': _("An extension of Llama 2 that supports a context of up to 128k tokens."), 'llama3-chatqa': _("A model from NVIDIA based on Llama 3 that excels at conversational question answering (QA) and retrieval-augmented generation (RAG)."), - 'orca2': _("Orca 2 is built by Microsoft research, and are a fine-tuned version of Meta's Llama 2 models. The model is designed to excel particularly in reasoning."), 'wizardlm': _("General use model based on Llama 2."), + 'starling-lm': _("Starling is a large language model trained by reinforcement learning from AI feedback focused on improving chatbot helpfulness."), + 'codegeex4': _("A versatile model for AI software development scenarios, including code completion."), + 'snowflake-arctic-embed': _("A suite of text embedding models by Snowflake, optimized for performance."), + 'orca2': _("Orca 2 is built by Microsoft research, and are a fine-tuned version of Meta's Llama 2 models. The model is designed to excel particularly in reasoning."), 'solar': _("A compact, yet powerful 10.7B large language model designed for single-turn conversation."), 'samantha-mistral': _("A companion assistant trained in philosophy, psychology, and personal relationships. Based on Mistral."), - 'dolphin-phi': _("2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language model by Microsoft Research."), - 'stable-beluga': _("Llama 2 based model fine tuned on an Orca-style dataset. Originally called Free Willy."), 'moondream': _("moondream2 is a small vision language model designed to run efficiently on edge devices."), - 'bakllava': _("BakLLaVA is a multimodal model consisting of the Mistral 7B base model augmented with the LLaVA architecture."), - 'wizardlm-uncensored': _("Uncensored version of Wizard LM model"), - 'snowflake-arctic-embed': _("A suite of text embedding models by Snowflake, optimized for performance."), + 'smollm': _("🪐 A family of small models with 135M, 360M, and 1.7B parameters, trained on a new high-quality dataset."), + 'stable-beluga': _("🪐 A family of small models with 135M, 360M, and 1.7B parameters, trained on a new high-quality dataset."), + 'qwen2-math': _("Qwen2 Math is a series of specialized math language models built upon the Qwen2 LLMs, which significantly outperforms the mathematical capabilities of open-source models and even closed-source models (e.g., GPT4o)."), + 'dolphin-phi': _("2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language model by Microsoft Research."), 'deepseek-v2': _("A strong, economical, and efficient Mixture-of-Experts language model."), - 'medllama2': _("Fine-tuned Llama 2 model to answer medical questions based on an open source medical dataset."), - 'yarn-mistral': _("An extension of Mistral to support context windows of 64K or 128K."), - 'llama-pro': _("An expansion of Llama 2 that specializes in integrating both general language understanding and domain-specific knowledge, particularly in programming and mathematics."), - 'nous-hermes2-mixtral': _("The Nous Hermes 2 model from Nous Research, now trained over Mixtral."), - 'meditron': _("Open-source medical large language model adapted from Llama 2 to the medical domain."), - 'codeup': _("Great code generation model based on Llama2."), - 'nexusraven': _("Nexus Raven is a 13B instruction tuned model for function calling tasks."), - 'everythinglm': _("Uncensored Llama2 based model with support for a 16K context window."), - 'llava-phi3': _("A new small LLaVA model fine-tuned from Phi 3 Mini."), - 'codegeex4': _("A versatile model for AI software development scenarios, including code completion."), + 'bakllava': _("BakLLaVA is a multimodal model consisting of the Mistral 7B base model augmented with the LLaVA architecture."), 'glm4': _("A strong multi-lingual general language model with competitive performance to Llama 3."), + 'wizardlm-uncensored': _("Uncensored version of Wizard LM model"), + 'yarn-mistral': _("An extension of Mistral to support context windows of 64K or 128K."), + 'phi3.5': _("A lightweight AI model with 3.8 billion parameters with performance overtaking similarly and larger sized models."), + 'medllama2': _("Fine-tuned Llama 2 model to answer medical questions based on an open source medical dataset."), + 'llama-pro': _("An expansion of Llama 2 that specializes in integrating both general language understanding and domain-specific knowledge, particularly in programming and mathematics."), + 'llava-phi3': _("A new small LLaVA model fine-tuned from Phi 3 Mini."), + 'meditron': _("Open-source medical large language model adapted from Llama 2 to the medical domain."), + 'nous-hermes2-mixtral': _("The Nous Hermes 2 model from Nous Research, now trained over Mixtral."), + 'nexusraven': _("Nexus Raven is a 13B instruction tuned model for function calling tasks."), + 'codeup': _("Great code generation model based on Llama2."), + 'everythinglm': _("Uncensored Llama2 based model with support for a 16K context window."), + 'hermes3': _("Hermes 3 is the latest version of the flagship Hermes series of LLMs by Nous Research"), + 'internlm2': _("InternLM2.5 is a 7B parameter model tailored for practical scenarios with outstanding reasoning capability."), 'magicoder': _("🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic instruction data using OSS-Instruct, a novel approach to enlightening LLMs with open-source code snippets."), 'stablelm-zephyr': _("A lightweight chat model allowing accurate, and responsive output without requiring high-end hardware."), 'codebooga': _("A high-performing code instruct model created by merging two existing code models."), 'mistrallite': _("MistralLite is a fine-tuned model based on Mistral with enhanced capabilities of processing long contexts."), + 'llama3-groq-tool-use': _("A series of models from Groq that represent a significant advancement in open-source AI capabilities for tool use/function calling."), + 'falcon2': _("Falcon2 is an 11B parameters causal decoder-only model built by TII and trained over 5T tokens."), 'wizard-vicuna': _("Wizard Vicuna is a 13B parameter model based on Llama 2 trained by MelodysDreamj."), 'duckdb-nsql': _("7B parameter text-to-SQL model made by MotherDuck and Numbers Station."), 'megadolphin': _("MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by interleaving the model with itself."), - 'goliath': _("A language model created by combining two fine-tuned Llama 2 70B models into one."), 'notux': _("A top-performing mixture of experts model, fine-tuned with high-quality data."), + 'goliath': _("A language model created by combining two fine-tuned Llama 2 70B models into one."), 'open-orca-platypus2': _("Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. Designed for chat and code generation."), - 'falcon2': _("Falcon2 is an 11B parameters causal decoder-only model built by TII and trained over 5T tokens."), 'notus': _("A 7B chat model fine-tuned with high-quality data and based on Zephyr."), 'dbrx': _("DBRX is an open, general-purpose LLM created by Databricks."), - 'internlm2': _("InternLM2.5 is a 7B parameter model tailored for practical scenarios with outstanding reasoning capability."), - 'alfred': _("A robust conversational model designed to be used for both chat and instruct use cases."), - 'llama3-groq-tool-use': _("A series of models from Groq that represent a significant advancement in open-source AI capabilities for tool use/function calling."), 'mathstral': _("MathΣtral: a 7B model designed for math reasoning and scientific discovery by Mistral AI."), + 'bge-m3': _("BGE-M3 is a new model from BAAI distinguished for its versatility in Multi-Functionality, Multi-Linguality, and Multi-Granularity."), + 'alfred': _("A robust conversational model designed to be used for both chat and instruct use cases."), 'firefunction-v2': _("An open weights function calling model based on Llama 3, competitive with GPT-4o function calling capabilities."), 'nuextract': _("A 3.8B model fine-tuned on a private high-quality synthetic dataset for information extraction, based on Phi-3."), + 'bge-large': _("Embedding model from BAAI mapping texts to vectors."), + 'paraphrase-multilingual': _("Sentence-transformers model that can be used for tasks like clustering or semantic search."), } \ No newline at end of file