Added model descriptions to localization process

This commit is contained in:
jeffser 2024-07-07 17:41:01 -06:00
parent 04c47554b7
commit 6c851784e2
18 changed files with 5239 additions and 444 deletions

View File

@ -4,3 +4,4 @@ data/com.jeffser.Alpaca.gschema.xml
src/main.py
src/window.py
src/window.ui
src/available_models_descriptions.py

View File

@ -8,13 +8,13 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-07-07 16:55-0600\n"
"POT-Creation-Date: 2024-07-07 17:40-0600\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
"Language: \n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=CHARSET\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
#: data/com.jeffser.Alpaca.desktop.in:3
@ -1066,3 +1066,508 @@ msgstr ""
#: src/window.ui:1121
msgid "Insert new line"
msgstr ""
#: src/available_models_descriptions.py:2
msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B."
msgstr ""
#: src/available_models_descriptions.py:3
msgid "Meta Llama 3: The most capable openly available LLM to date"
msgstr ""
#: src/available_models_descriptions.py:4
msgid "Qwen2 is a new series of large language models from Alibaba group"
msgstr ""
#: src/available_models_descriptions.py:5
msgid ""
"An open-source Mixture-of-Experts code language model that achieves "
"performance comparable to GPT4-Turbo in code-specific tasks."
msgstr ""
#: src/available_models_descriptions.py:6
msgid ""
"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art "
"open models by Microsoft."
msgstr ""
#: src/available_models_descriptions.py:7
msgid ""
"Aya 23, released by Cohere, is a new family of state-of-the-art, "
"multilingual models that support 23 languages."
msgstr ""
#: src/available_models_descriptions.py:8
msgid "The 7B model released by Mistral AI, updated to version 0.3."
msgstr ""
#: src/available_models_descriptions.py:9
msgid ""
"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in "
"8x7b and 8x22b parameter sizes."
msgstr ""
#: src/available_models_descriptions.py:10
msgid ""
"CodeGemma is a collection of powerful, lightweight models that can perform a "
"variety of coding tasks like fill-in-the-middle code completion, code "
"generation, natural language understanding, mathematical reasoning, and "
"instruction following."
msgstr ""
#: src/available_models_descriptions.py:11
msgid ""
"Command R is a Large Language Model optimized for conversational interaction "
"and long context tasks."
msgstr ""
#: src/available_models_descriptions.py:12
msgid ""
"Command R+ is a powerful, scalable large language model purpose-built to "
"excel at real-world enterprise use cases."
msgstr ""
#: src/available_models_descriptions.py:13
msgid ""
"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines "
"a vision encoder and Vicuna for general-purpose visual and language "
"understanding. Updated to version 1.6."
msgstr ""
#: src/available_models_descriptions.py:14
msgid ""
"Gemma is a family of lightweight, state-of-the-art open models built by "
"Google DeepMind. Updated to version 1.1"
msgstr ""
#: src/available_models_descriptions.py:15
msgid ""
"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from "
"0.5B to 110B parameters"
msgstr ""
#: src/available_models_descriptions.py:16
msgid ""
"Llama 2 is a collection of foundation language models ranging from 7B to 70B "
"parameters."
msgstr ""
#: src/available_models_descriptions.py:17
msgid ""
"A large language model that can use text prompts to generate and discuss "
"code."
msgstr ""
#: src/available_models_descriptions.py:18
msgid ""
"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of "
"experts models that excels at coding tasks. Created by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:19
msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope."
msgstr ""
#: src/available_models_descriptions.py:20
msgid ""
"DeepSeek Coder is a capable coding model trained on two trillion code and "
"natural language tokens."
msgstr ""
#: src/available_models_descriptions.py:21
msgid ""
"A high-performing open embedding model with a large token context window."
msgstr ""
#: src/available_models_descriptions.py:22
msgid ""
"Phi-2: a 2.7B language model by Microsoft Research that demonstrates "
"outstanding reasoning and language understanding capabilities."
msgstr ""
#: src/available_models_descriptions.py:23
msgid ""
"The uncensored Dolphin model based on Mistral that excels at coding tasks. "
"Updated to version 2.8."
msgstr ""
#: src/available_models_descriptions.py:24
msgid ""
"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the "
"Mistral 7B model using the OpenOrca dataset."
msgstr ""
#: src/available_models_descriptions.py:25
msgid ""
"A general-purpose model ranging from 3 billion parameters to 70 billion, "
"suitable for entry-level hardware."
msgstr ""
#: src/available_models_descriptions.py:26
msgid "State-of-the-art large embedding model from mixedbread.ai"
msgstr ""
#: src/available_models_descriptions.py:27
msgid ""
"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on "
"Llama 3 that has a variety of instruction, conversational, and coding skills."
msgstr ""
#: src/available_models_descriptions.py:28
msgid ""
"StarCoder2 is the next generation of transparently trained open code LLMs "
"that comes in three sizes: 3B, 7B and 15B parameters."
msgstr ""
#: src/available_models_descriptions.py:29
msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability."
msgstr ""
#: src/available_models_descriptions.py:30
msgid ""
"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models "
"that are trained to act as helpful assistants."
msgstr ""
#: src/available_models_descriptions.py:31
msgid "Yi 1.5 is a high-performing, bilingual language model."
msgstr ""
#: src/available_models_descriptions.py:32
msgid ""
"The powerful family of models by Nous Research that excels at scientific "
"discussion and coding tasks."
msgstr ""
#: src/available_models_descriptions.py:33
msgid ""
"General use chat model based on Llama and Llama 2 with 2K to 16K context "
"sizes."
msgstr ""
#: src/available_models_descriptions.py:34
msgid ""
"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on "
"Llama 2 uncensored by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:35
msgid ""
"The TinyLlama project is an open endeavor to train a compact 1.1B Llama "
"model on 3 trillion tokens."
msgstr ""
#: src/available_models_descriptions.py:36
msgid ""
"State of the art large language model from Microsoft AI with improved "
"performance on complex chat, multilingual, reasoning and agent use cases."
msgstr ""
#: src/available_models_descriptions.py:37
msgid ""
"StarCoder is a code generation model trained on 80+ programming languages."
msgstr ""
#: src/available_models_descriptions.py:38
msgid ""
"Codestral is Mistral AIs first-ever code model designed for code generation "
"tasks."
msgstr ""
#: src/available_models_descriptions.py:39
msgid ""
"A family of open-source models trained on a wide variety of data, surpassing "
"ChatGPT on various benchmarks. Updated to version 3.5-0106."
msgstr ""
#: src/available_models_descriptions.py:40
msgid ""
"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset "
"by Eric Hartford and based on TinyLlama."
msgstr ""
#: src/available_models_descriptions.py:41
msgid ""
"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully "
"open datasets."
msgstr ""
#: src/available_models_descriptions.py:42
msgid "State-of-the-art code generation model"
msgstr ""
#: src/available_models_descriptions.py:43
msgid ""
"Stable Code 3B is a coding model with instruct and code completion variants "
"on par with models such as Code Llama 7B that are 2.5x larger."
msgstr ""
#: src/available_models_descriptions.py:44
msgid ""
"A fine-tuned model based on Mistral with good coverage of domain and "
"language."
msgstr ""
#: src/available_models_descriptions.py:45
msgid "Model focused on math and logic problems"
msgstr ""
#: src/available_models_descriptions.py:46
msgid ""
"CodeQwen1.5 is a large language model pretrained on a large amount of code "
"data."
msgstr ""
#: src/available_models_descriptions.py:47
msgid "Code generation model based on Code Llama."
msgstr ""
#: src/available_models_descriptions.py:48
msgid ""
"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model "
"trained on multilingual data in English, Spanish, German, Italian, French, "
"Portuguese, and Dutch."
msgstr ""
#: src/available_models_descriptions.py:49
msgid ""
"A 7B and 15B uncensored variant of the Dolphin model family that excels at "
"coding, based on StarCoder2."
msgstr ""
#: src/available_models_descriptions.py:50
msgid "Embedding models on very large sentence level datasets."
msgstr ""
#: src/available_models_descriptions.py:51
msgid "General use models based on Llama and Llama 2 from Nous Research."
msgstr ""
#: src/available_models_descriptions.py:52
msgid ""
"Starling is a large language model trained by reinforcement learning from AI "
"feedback focused on improving chatbot helpfulness."
msgstr ""
#: src/available_models_descriptions.py:53
msgid ""
"SQLCoder is a code completion model fined-tuned on StarCoder for SQL "
"generation tasks"
msgstr ""
#: src/available_models_descriptions.py:54
msgid ""
"Orca 2 is built by Microsoft research, and are a fine-tuned version of "
"Meta's Llama 2 models. The model is designed to excel particularly in "
"reasoning."
msgstr ""
#: src/available_models_descriptions.py:55
msgid ""
"This model extends LLama-3 8B's context length from 8k to over 1m tokens."
msgstr ""
#: src/available_models_descriptions.py:56
msgid "An advanced language model crafted with 2 trillion bilingual tokens."
msgstr ""
#: src/available_models_descriptions.py:57
msgid "An extension of Llama 2 that supports a context of up to 128k tokens."
msgstr ""
#: src/available_models_descriptions.py:58
msgid ""
"A model from NVIDIA based on Llama 3 that excels at conversational question "
"answering (QA) and retrieval-augmented generation (RAG)."
msgstr ""
#: src/available_models_descriptions.py:59
msgid ""
"A compact, yet powerful 10.7B large language model designed for single-turn "
"conversation."
msgstr ""
#: src/available_models_descriptions.py:60
msgid ""
"Conversational model based on Llama 2 that performs competitively on various "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:61
msgid "A family of open foundation models by IBM for Code Intelligence"
msgstr ""
#: src/available_models_descriptions.py:62
msgid ""
"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language "
"model by Microsoft Research."
msgstr ""
#: src/available_models_descriptions.py:63
msgid "General use model based on Llama 2."
msgstr ""
#: src/available_models_descriptions.py:64
msgid ""
"A companion assistant trained in philosophy, psychology, and personal "
"relationships. Based on Mistral."
msgstr ""
#: src/available_models_descriptions.py:65
msgid ""
"Llama 2 based model fine tuned on an Orca-style dataset. Originally called "
"Free Willy."
msgstr ""
#: src/available_models_descriptions.py:66
msgid ""
"BakLLaVA is a multimodal model consisting of the Mistral 7B base model "
"augmented with the LLaVA architecture."
msgstr ""
#: src/available_models_descriptions.py:67
msgid ""
"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:68
msgid "Uncensored version of Wizard LM model"
msgstr ""
#: src/available_models_descriptions.py:69
msgid ""
"Fine-tuned Llama 2 model to answer medical questions based on an open source "
"medical dataset."
msgstr ""
#: src/available_models_descriptions.py:70
msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral."
msgstr ""
#: src/available_models_descriptions.py:71
msgid "An extension of Mistral to support context windows of 64K or 128K."
msgstr ""
#: src/available_models_descriptions.py:72
msgid ""
"A suite of text embedding models by Snowflake, optimized for performance."
msgstr ""
#: src/available_models_descriptions.py:73
msgid ""
"An expansion of Llama 2 that specializes in integrating both general "
"language understanding and domain-specific knowledge, particularly in "
"programming and mathematics."
msgstr ""
#: src/available_models_descriptions.py:74
msgid "Great code generation model based on Llama2."
msgstr ""
#: src/available_models_descriptions.py:75
msgid ""
"Open-source medical large language model adapted from Llama 2 to the medical "
"domain."
msgstr ""
#: src/available_models_descriptions.py:76
msgid ""
"moondream2 is a small vision language model designed to run efficiently on "
"edge devices."
msgstr ""
#: src/available_models_descriptions.py:77
msgid "Uncensored Llama2 based model with support for a 16K context window."
msgstr ""
#: src/available_models_descriptions.py:78
msgid ""
"Nexus Raven is a 13B instruction tuned model for function calling tasks."
msgstr ""
#: src/available_models_descriptions.py:79
msgid ""
"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic "
"instruction data using OSS-Instruct, a novel approach to enlightening LLMs "
"with open-source code snippets."
msgstr ""
#: src/available_models_descriptions.py:80
msgid "A strong, economical, and efficient Mixture-of-Experts language model."
msgstr ""
#: src/available_models_descriptions.py:81
msgid ""
"A lightweight chat model allowing accurate, and responsive output without "
"requiring high-end hardware."
msgstr ""
#: src/available_models_descriptions.py:82
msgid ""
"A high-performing code instruct model created by merging two existing code "
"models."
msgstr ""
#: src/available_models_descriptions.py:83
msgid "A new small LLaVA model fine-tuned from Phi 3 Mini."
msgstr ""
#: src/available_models_descriptions.py:84
msgid ""
"MistralLite is a fine-tuned model based on Mistral with enhanced "
"capabilities of processing long contexts."
msgstr ""
#: src/available_models_descriptions.py:85
msgid ""
"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by "
"MelodysDreamj."
msgstr ""
#: src/available_models_descriptions.py:86
msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station."
msgstr ""
#: src/available_models_descriptions.py:87
msgid ""
"A language model created by combining two fine-tuned Llama 2 70B models into "
"one."
msgstr ""
#: src/available_models_descriptions.py:88
msgid ""
"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by "
"interleaving the model with itself."
msgstr ""
#: src/available_models_descriptions.py:89
msgid ""
"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. "
"Designed for chat and code generation."
msgstr ""
#: src/available_models_descriptions.py:90
msgid ""
"A top-performing mixture of experts model, fine-tuned with high-quality data."
msgstr ""
#: src/available_models_descriptions.py:91
msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr."
msgstr ""
#: src/available_models_descriptions.py:92
msgid "DBRX is an open, general-purpose LLM created by Databricks."
msgstr ""
#: src/available_models_descriptions.py:93
msgid ""
"Falcon2 is an 11B parameters causal decoder-only model built by TII and "
"trained over 5T tokens."
msgstr ""
#: src/available_models_descriptions.py:94
msgid ""
"A robust conversational model designed to be used for both chat and instruct "
"use cases."
msgstr ""

507
po/bn.po
View File

@ -7,7 +7,7 @@ msgid ""
msgstr ""
"Project-Id-Version: 1.0.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-07-07 16:07-0600\n"
"POT-Creation-Date: 2024-07-07 17:40-0600\n"
"PO-Revision-Date: 2024-07-07 15:12-0600\n"
"Last-Translator: (YOUR NAME) <(YOUR EMAIL OPTIONAL)>\n"
"Language-Team: Bengali\n"
@ -1108,6 +1108,511 @@ msgstr "Paste"
msgid "Insert new line"
msgstr "Insert new line"
#: src/available_models_descriptions.py:2
msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B."
msgstr ""
#: src/available_models_descriptions.py:3
msgid "Meta Llama 3: The most capable openly available LLM to date"
msgstr ""
#: src/available_models_descriptions.py:4
msgid "Qwen2 is a new series of large language models from Alibaba group"
msgstr ""
#: src/available_models_descriptions.py:5
msgid ""
"An open-source Mixture-of-Experts code language model that achieves "
"performance comparable to GPT4-Turbo in code-specific tasks."
msgstr ""
#: src/available_models_descriptions.py:6
msgid ""
"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art "
"open models by Microsoft."
msgstr ""
#: src/available_models_descriptions.py:7
msgid ""
"Aya 23, released by Cohere, is a new family of state-of-the-art, "
"multilingual models that support 23 languages."
msgstr ""
#: src/available_models_descriptions.py:8
msgid "The 7B model released by Mistral AI, updated to version 0.3."
msgstr ""
#: src/available_models_descriptions.py:9
msgid ""
"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in "
"8x7b and 8x22b parameter sizes."
msgstr ""
#: src/available_models_descriptions.py:10
msgid ""
"CodeGemma is a collection of powerful, lightweight models that can perform a "
"variety of coding tasks like fill-in-the-middle code completion, code "
"generation, natural language understanding, mathematical reasoning, and "
"instruction following."
msgstr ""
#: src/available_models_descriptions.py:11
msgid ""
"Command R is a Large Language Model optimized for conversational interaction "
"and long context tasks."
msgstr ""
#: src/available_models_descriptions.py:12
msgid ""
"Command R+ is a powerful, scalable large language model purpose-built to "
"excel at real-world enterprise use cases."
msgstr ""
#: src/available_models_descriptions.py:13
msgid ""
"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines "
"a vision encoder and Vicuna for general-purpose visual and language "
"understanding. Updated to version 1.6."
msgstr ""
#: src/available_models_descriptions.py:14
msgid ""
"Gemma is a family of lightweight, state-of-the-art open models built by "
"Google DeepMind. Updated to version 1.1"
msgstr ""
#: src/available_models_descriptions.py:15
msgid ""
"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from "
"0.5B to 110B parameters"
msgstr ""
#: src/available_models_descriptions.py:16
msgid ""
"Llama 2 is a collection of foundation language models ranging from 7B to 70B "
"parameters."
msgstr ""
#: src/available_models_descriptions.py:17
msgid ""
"A large language model that can use text prompts to generate and discuss "
"code."
msgstr ""
#: src/available_models_descriptions.py:18
msgid ""
"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of "
"experts models that excels at coding tasks. Created by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:19
msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope."
msgstr ""
#: src/available_models_descriptions.py:20
msgid ""
"DeepSeek Coder is a capable coding model trained on two trillion code and "
"natural language tokens."
msgstr ""
#: src/available_models_descriptions.py:21
msgid ""
"A high-performing open embedding model with a large token context window."
msgstr ""
#: src/available_models_descriptions.py:22
msgid ""
"Phi-2: a 2.7B language model by Microsoft Research that demonstrates "
"outstanding reasoning and language understanding capabilities."
msgstr ""
#: src/available_models_descriptions.py:23
msgid ""
"The uncensored Dolphin model based on Mistral that excels at coding tasks. "
"Updated to version 2.8."
msgstr ""
#: src/available_models_descriptions.py:24
msgid ""
"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the "
"Mistral 7B model using the OpenOrca dataset."
msgstr ""
#: src/available_models_descriptions.py:25
msgid ""
"A general-purpose model ranging from 3 billion parameters to 70 billion, "
"suitable for entry-level hardware."
msgstr ""
#: src/available_models_descriptions.py:26
msgid "State-of-the-art large embedding model from mixedbread.ai"
msgstr ""
#: src/available_models_descriptions.py:27
msgid ""
"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on "
"Llama 3 that has a variety of instruction, conversational, and coding skills."
msgstr ""
#: src/available_models_descriptions.py:28
msgid ""
"StarCoder2 is the next generation of transparently trained open code LLMs "
"that comes in three sizes: 3B, 7B and 15B parameters."
msgstr ""
#: src/available_models_descriptions.py:29
msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability."
msgstr ""
#: src/available_models_descriptions.py:30
msgid ""
"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models "
"that are trained to act as helpful assistants."
msgstr ""
#: src/available_models_descriptions.py:31
msgid "Yi 1.5 is a high-performing, bilingual language model."
msgstr ""
#: src/available_models_descriptions.py:32
msgid ""
"The powerful family of models by Nous Research that excels at scientific "
"discussion and coding tasks."
msgstr ""
#: src/available_models_descriptions.py:33
msgid ""
"General use chat model based on Llama and Llama 2 with 2K to 16K context "
"sizes."
msgstr ""
#: src/available_models_descriptions.py:34
msgid ""
"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on "
"Llama 2 uncensored by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:35
msgid ""
"The TinyLlama project is an open endeavor to train a compact 1.1B Llama "
"model on 3 trillion tokens."
msgstr ""
#: src/available_models_descriptions.py:36
msgid ""
"State of the art large language model from Microsoft AI with improved "
"performance on complex chat, multilingual, reasoning and agent use cases."
msgstr ""
#: src/available_models_descriptions.py:37
msgid ""
"StarCoder is a code generation model trained on 80+ programming languages."
msgstr ""
#: src/available_models_descriptions.py:38
msgid ""
"Codestral is Mistral AIs first-ever code model designed for code generation "
"tasks."
msgstr ""
#: src/available_models_descriptions.py:39
msgid ""
"A family of open-source models trained on a wide variety of data, surpassing "
"ChatGPT on various benchmarks. Updated to version 3.5-0106."
msgstr ""
#: src/available_models_descriptions.py:40
msgid ""
"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset "
"by Eric Hartford and based on TinyLlama."
msgstr ""
#: src/available_models_descriptions.py:41
msgid ""
"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully "
"open datasets."
msgstr ""
#: src/available_models_descriptions.py:42
msgid "State-of-the-art code generation model"
msgstr ""
#: src/available_models_descriptions.py:43
msgid ""
"Stable Code 3B is a coding model with instruct and code completion variants "
"on par with models such as Code Llama 7B that are 2.5x larger."
msgstr ""
#: src/available_models_descriptions.py:44
msgid ""
"A fine-tuned model based on Mistral with good coverage of domain and "
"language."
msgstr ""
#: src/available_models_descriptions.py:45
msgid "Model focused on math and logic problems"
msgstr ""
#: src/available_models_descriptions.py:46
msgid ""
"CodeQwen1.5 is a large language model pretrained on a large amount of code "
"data."
msgstr ""
#: src/available_models_descriptions.py:47
msgid "Code generation model based on Code Llama."
msgstr ""
#: src/available_models_descriptions.py:48
msgid ""
"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model "
"trained on multilingual data in English, Spanish, German, Italian, French, "
"Portuguese, and Dutch."
msgstr ""
#: src/available_models_descriptions.py:49
msgid ""
"A 7B and 15B uncensored variant of the Dolphin model family that excels at "
"coding, based on StarCoder2."
msgstr ""
#: src/available_models_descriptions.py:50
msgid "Embedding models on very large sentence level datasets."
msgstr ""
#: src/available_models_descriptions.py:51
msgid "General use models based on Llama and Llama 2 from Nous Research."
msgstr ""
#: src/available_models_descriptions.py:52
msgid ""
"Starling is a large language model trained by reinforcement learning from AI "
"feedback focused on improving chatbot helpfulness."
msgstr ""
#: src/available_models_descriptions.py:53
msgid ""
"SQLCoder is a code completion model fined-tuned on StarCoder for SQL "
"generation tasks"
msgstr ""
#: src/available_models_descriptions.py:54
msgid ""
"Orca 2 is built by Microsoft research, and are a fine-tuned version of "
"Meta's Llama 2 models. The model is designed to excel particularly in "
"reasoning."
msgstr ""
#: src/available_models_descriptions.py:55
msgid ""
"This model extends LLama-3 8B's context length from 8k to over 1m tokens."
msgstr ""
#: src/available_models_descriptions.py:56
msgid "An advanced language model crafted with 2 trillion bilingual tokens."
msgstr ""
#: src/available_models_descriptions.py:57
msgid "An extension of Llama 2 that supports a context of up to 128k tokens."
msgstr ""
#: src/available_models_descriptions.py:58
msgid ""
"A model from NVIDIA based on Llama 3 that excels at conversational question "
"answering (QA) and retrieval-augmented generation (RAG)."
msgstr ""
#: src/available_models_descriptions.py:59
msgid ""
"A compact, yet powerful 10.7B large language model designed for single-turn "
"conversation."
msgstr ""
#: src/available_models_descriptions.py:60
msgid ""
"Conversational model based on Llama 2 that performs competitively on various "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:61
msgid "A family of open foundation models by IBM for Code Intelligence"
msgstr ""
#: src/available_models_descriptions.py:62
msgid ""
"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language "
"model by Microsoft Research."
msgstr ""
#: src/available_models_descriptions.py:63
msgid "General use model based on Llama 2."
msgstr ""
#: src/available_models_descriptions.py:64
msgid ""
"A companion assistant trained in philosophy, psychology, and personal "
"relationships. Based on Mistral."
msgstr ""
#: src/available_models_descriptions.py:65
msgid ""
"Llama 2 based model fine tuned on an Orca-style dataset. Originally called "
"Free Willy."
msgstr ""
#: src/available_models_descriptions.py:66
msgid ""
"BakLLaVA is a multimodal model consisting of the Mistral 7B base model "
"augmented with the LLaVA architecture."
msgstr ""
#: src/available_models_descriptions.py:67
msgid ""
"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:68
msgid "Uncensored version of Wizard LM model"
msgstr ""
#: src/available_models_descriptions.py:69
msgid ""
"Fine-tuned Llama 2 model to answer medical questions based on an open source "
"medical dataset."
msgstr ""
#: src/available_models_descriptions.py:70
msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral."
msgstr ""
#: src/available_models_descriptions.py:71
msgid "An extension of Mistral to support context windows of 64K or 128K."
msgstr ""
#: src/available_models_descriptions.py:72
msgid ""
"A suite of text embedding models by Snowflake, optimized for performance."
msgstr ""
#: src/available_models_descriptions.py:73
msgid ""
"An expansion of Llama 2 that specializes in integrating both general "
"language understanding and domain-specific knowledge, particularly in "
"programming and mathematics."
msgstr ""
#: src/available_models_descriptions.py:74
msgid "Great code generation model based on Llama2."
msgstr ""
#: src/available_models_descriptions.py:75
msgid ""
"Open-source medical large language model adapted from Llama 2 to the medical "
"domain."
msgstr ""
#: src/available_models_descriptions.py:76
msgid ""
"moondream2 is a small vision language model designed to run efficiently on "
"edge devices."
msgstr ""
#: src/available_models_descriptions.py:77
msgid "Uncensored Llama2 based model with support for a 16K context window."
msgstr ""
#: src/available_models_descriptions.py:78
msgid ""
"Nexus Raven is a 13B instruction tuned model for function calling tasks."
msgstr ""
#: src/available_models_descriptions.py:79
msgid ""
"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic "
"instruction data using OSS-Instruct, a novel approach to enlightening LLMs "
"with open-source code snippets."
msgstr ""
#: src/available_models_descriptions.py:80
msgid "A strong, economical, and efficient Mixture-of-Experts language model."
msgstr ""
#: src/available_models_descriptions.py:81
msgid ""
"A lightweight chat model allowing accurate, and responsive output without "
"requiring high-end hardware."
msgstr ""
#: src/available_models_descriptions.py:82
msgid ""
"A high-performing code instruct model created by merging two existing code "
"models."
msgstr ""
#: src/available_models_descriptions.py:83
msgid "A new small LLaVA model fine-tuned from Phi 3 Mini."
msgstr ""
#: src/available_models_descriptions.py:84
msgid ""
"MistralLite is a fine-tuned model based on Mistral with enhanced "
"capabilities of processing long contexts."
msgstr ""
#: src/available_models_descriptions.py:85
msgid ""
"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by "
"MelodysDreamj."
msgstr ""
#: src/available_models_descriptions.py:86
msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station."
msgstr ""
#: src/available_models_descriptions.py:87
msgid ""
"A language model created by combining two fine-tuned Llama 2 70B models into "
"one."
msgstr ""
#: src/available_models_descriptions.py:88
msgid ""
"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by "
"interleaving the model with itself."
msgstr ""
#: src/available_models_descriptions.py:89
msgid ""
"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. "
"Designed for chat and code generation."
msgstr ""
#: src/available_models_descriptions.py:90
msgid ""
"A top-performing mixture of experts model, fine-tuned with high-quality data."
msgstr ""
#: src/available_models_descriptions.py:91
msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr."
msgstr ""
#: src/available_models_descriptions.py:92
msgid "DBRX is an open, general-purpose LLM created by Databricks."
msgstr ""
#: src/available_models_descriptions.py:93
msgid ""
"Falcon2 is an 11B parameters causal decoder-only model built by TII and "
"trained over 5T tokens."
msgstr ""
#: src/available_models_descriptions.py:94
msgid ""
"A robust conversational model designed to be used for both chat and instruct "
"use cases."
msgstr ""
#~ msgid "Message Received"
#~ msgstr "Message Received"

183
po/bn.po~
View File

@ -6,8 +6,8 @@
msgid ""
msgstr ""
"Project-Id-Version: 1.0.0\n"
"Report-Msgid-Bugs-To: https://github.com/Jeffser/Alpaca\n"
"POT-Creation-Date: 2024-07-02 18:22-0600\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-07-07 16:07-0600\n"
"PO-Revision-Date: 2024-07-07 15:12-0600\n"
"Last-Translator: (YOUR NAME) <(YOUR EMAIL OPTIONAL)>\n"
"Language-Team: Bengali\n"
@ -65,7 +65,7 @@ msgstr "Plain text documents recognition"
msgid "Import and export chats"
msgstr "Import and export chats"
#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:853
#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:860
msgid "Disclaimer"
msgstr "Disclaimer"
@ -667,7 +667,7 @@ msgstr "0.1.1 Stable Release"
msgid "This is the first public version of Alpaca"
msgstr "This is the first public version of Alpaca"
#: src/window.py:58 src/window.py:1053 src/window.py:1115 src/window.ui:41
#: src/window.py:58 src/window.py:1083 src/window.py:1145 src/window.ui:41
msgid "New Chat"
msgstr "New Chat"
@ -760,67 +760,67 @@ msgstr "Chat exported successfully"
msgid "Chat imported successfully"
msgstr "Chat imported successfully"
#: src/window.py:278
#: src/window.py:280
msgid "Close"
msgstr "Close"
#: src/window.py:279 src/window.ui:806
#: src/window.py:281 src/window.ui:813
msgid "Next"
msgstr "Next"
#: src/window.py:329
#: src/window.py:332
msgid "Pulling in the background..."
msgstr "Pulling in the background..."
#: src/window.py:381
#: src/window.py:384
msgid "Stop Creating '{}'"
msgstr "Stop Creating '{}'"
#: src/window.py:418
#: src/window.py:421
msgid "image"
msgstr "image"
#: src/window.py:588
#: src/window.py:593
msgid "Remove Message"
msgstr "Remove Message"
#: src/window.py:593 src/window.py:841
#: src/window.py:598 src/window.py:869
msgid "Copy Message"
msgstr "Copy Message"
#: src/window.py:598
#: src/window.py:603
msgid "Edit Message"
msgstr "Edit Message"
#: src/window.py:729
#: src/window.py:661
msgid "Missing Image"
msgstr ""
#: src/window.py:677
msgid "Missing image"
msgstr ""
#: src/window.py:757
msgid "Remove '{} ({})'"
msgstr "Remove '{} ({})'"
#: src/window.py:882
msgid "Message Received"
msgstr "Message Received"
#: src/window.py:882
msgid "New message from '{}'"
msgstr "New message from '{}'"
#: src/window.py:939
#: src/window.py:969
msgid "Task Complete"
msgstr "Task Complete"
#: src/window.py:939
#: src/window.py:969
msgid "Model '{}' pulled successfully."
msgstr "Model '{}' pulled successfully."
#: src/window.py:944
#: src/window.py:974
msgid "Pull Model Error"
msgstr "Pull Model Error"
#: src/window.py:944
#: src/window.py:974
msgid "Failed to pull model '{}' due to network error."
msgstr "Failed to pull model '{}' due to network error."
#: src/window.py:978
#: src/window.py:1008
msgid "Stop Pulling '{} ({})'"
msgstr "Stop Pulling '{} ({})'"
@ -832,7 +832,7 @@ msgstr "Menu"
msgid "Toggle Sidebar"
msgstr "Toggle Sidebar"
#: src/window.ui:107 src/window.ui:595
#: src/window.ui:107 src/window.ui:587
msgid "Manage Models"
msgstr "Manage Models"
@ -844,27 +844,27 @@ msgstr "Chat Menu"
msgid "Attach File"
msgstr "Attach File"
#: src/window.ui:238 src/window.ui:1120
#: src/window.ui:242 src/window.ui:1127
msgid "Send Message"
msgstr "Send Message"
#: src/window.ui:286 src/window.ui:965 src/window.ui:1079
#: src/window.ui:290 src/window.ui:972 src/window.ui:1086
msgid "Preferences"
msgstr "Preferences"
#: src/window.ui:289 src/window.ui:1057
#: src/window.ui:293 src/window.ui:1064
msgid "General"
msgstr "General"
#: src/window.ui:297
#: src/window.ui:299
msgid "Use Remote Connection to Ollama"
msgstr "Use Remote Connection to Ollama"
#: src/window.ui:303
#: src/window.ui:305
msgid "URL of Remote Instance"
msgstr "URL of Remote Instance"
#: src/window.ui:310
#: src/window.ui:312
msgid "Bearer Token (Optional)"
msgstr "Bearer Token (Optional)"
@ -872,15 +872,11 @@ msgstr "Bearer Token (Optional)"
msgid "Run Alpaca In Background"
msgstr "Run Alpaca In Background"
#: src/window.ui:331
msgid "Model"
msgstr "Model"
#: src/window.ui:341
#: src/window.ui:333
msgid "Temperature"
msgstr "Temperature"
#: src/window.ui:342
#: src/window.ui:334
msgid ""
"The temperature of the model. Increasing the temperature will make the model "
"answer more creatively. (Default: 0.8)"
@ -888,11 +884,11 @@ msgstr ""
"The temperature of the model. Increasing the temperature will make the model "
"answer more creatively. (Default: 0.8)"
#: src/window.ui:357
#: src/window.ui:349
msgid "Seed"
msgstr "Seed"
#: src/window.ui:358
#: src/window.ui:350
msgid ""
"Sets the random number seed to use for generation. Setting this to a "
"specific number will make the model generate the same text for the same "
@ -902,11 +898,11 @@ msgstr ""
"specific number will make the model generate the same text for the same "
"prompt. (Default: 0 (random))"
#: src/window.ui:372
#: src/window.ui:364
msgid "Keep Alive Time"
msgstr "Keep Alive Time"
#: src/window.ui:373
#: src/window.ui:365
msgid ""
"Controls how long the model will stay loaded into memory following the "
"request in minutes (Default: 5)"
@ -914,15 +910,15 @@ msgstr ""
"Controls how long the model will stay loaded into memory following the "
"request in minutes (Default: 5)"
#: src/window.ui:389
#: src/window.ui:381
msgid "Ollama Instance"
msgstr "Ollama Instance"
#: src/window.ui:393
#: src/window.ui:385
msgid "Ollama Overrides"
msgstr "Ollama Overrides"
#: src/window.ui:394
#: src/window.ui:386
msgid ""
"Manage the arguments used on Ollama, any changes on this page only applies "
"to the integrated instance, the instance will restart if you make changes."
@ -930,31 +926,31 @@ msgstr ""
"Manage the arguments used on Ollama, any changes on this page only applies "
"to the integrated instance, the instance will restart if you make changes."
#: src/window.ui:477
#: src/window.ui:469
msgid "Create"
msgstr "Create"
#: src/window.ui:490 src/window.ui:605
#: src/window.ui:482 src/window.ui:597
msgid "Create Model"
msgstr "Create Model"
#: src/window.ui:516
#: src/window.ui:508
msgid "Base"
msgstr "Base"
#: src/window.ui:534
#: src/window.ui:526
msgid "Name"
msgstr "Name"
#: src/window.ui:540
#: src/window.ui:532
msgid "Context"
msgstr "Context"
#: src/window.ui:555
#: src/window.ui:547
msgid "Template"
msgstr "Template"
#: src/window.ui:561
#: src/window.ui:553
msgid ""
"Some models require a specific template. Please visit the model's website "
"for more information if you're unsure."
@ -962,39 +958,45 @@ msgstr ""
"Some models require a specific template. Please visit the model's website "
"for more information if you're unsure."
#: src/window.ui:612
#: src/window.ui:604
msgid "Search Model"
msgstr "Search Model"
#: src/window.ui:672
#: src/window.ui:664
msgid "No Models Found"
msgstr "No Models Found"
#: src/window.ui:673
#: src/window.ui:665
msgid "Try a different search"
msgstr "Try a different search"
#: src/window.ui:738
#: src/window.ui:708
msgid ""
"By downloading this model you accept the license agreement available on the "
"model's website."
msgstr ""
#: src/window.ui:745
msgid "Open with Default App"
msgstr "Open with Default App"
#: src/window.ui:790
#: src/window.ui:797
msgid "Previous"
msgstr "Previous"
#: src/window.ui:833
#: src/window.ui:840
msgid "Welcome to Alpaca"
msgstr "Welcome to Alpaca"
#: src/window.ui:834
#: src/window.ui:841
msgid "Powered by Ollama"
msgstr "Powered by Ollama"
#: src/window.ui:837
#: src/window.ui:844
msgid "Ollama Website"
msgstr "Ollama Website"
#: src/window.ui:854
#: src/window.ui:861
msgid ""
"Alpaca and its developers are not liable for any damages to devices or "
"software resulting from the execution of code generated by an AI model. "
@ -1004,11 +1006,11 @@ msgstr ""
"software resulting from the execution of code generated by an AI model. "
"Please exercise caution and review the code carefully before running it."
#: src/window.ui:865
#: src/window.ui:872
msgid "Featured Models"
msgstr "Featured Models"
#: src/window.ui:866
#: src/window.ui:873
msgid ""
"Alpaca works locally on your device, to start chatting you'll need an AI "
"model, you can either pull models from this list or the 'Manage Models' menu "
@ -1018,90 +1020,99 @@ msgstr ""
"model, you can either pull models from this list or the 'Manage Models' menu "
"later."
#: src/window.ui:876
#: src/window.ui:883
msgid "Built by Meta"
msgstr "Built by Meta"
#: src/window.ui:894
#: src/window.ui:901
msgid "Built by Google DeepMind"
msgstr "Built by Google DeepMind"
#: src/window.ui:912
#: src/window.ui:919
msgid "Built by Microsoft"
msgstr "Built by Microsoft"
#: src/window.ui:930
#: src/window.ui:937
msgid "Multimodal AI with image recognition"
msgstr "Multimodal AI with image recognition"
#: src/window.ui:959
#: src/window.ui:966
msgid "Import Chat"
msgstr "Import Chat"
#: src/window.ui:969
#: src/window.ui:976
msgid "Keyboard Shortcuts"
msgstr "Keyboard Shortcuts"
#: src/window.ui:973
#: src/window.ui:980
msgid "About Alpaca"
msgstr "About Alpaca"
#: src/window.ui:980 src/window.ui:999
#: src/window.ui:987 src/window.ui:1006
msgid "Rename Chat"
msgstr "Rename Chat"
#: src/window.ui:984 src/window.ui:1003
#: src/window.ui:991 src/window.ui:1010
msgid "Export Chat"
msgstr "Export Chat"
#: src/window.ui:988
#: src/window.ui:995
msgid "Clear Chat"
msgstr "Clear Chat"
#: src/window.ui:995
#: src/window.ui:1002
msgid "Delete Chat"
msgstr "Delete Chat"
#: src/window.ui:1011
#: src/window.ui:1018
msgid "From Existing Model"
msgstr "From Existing Model"
#: src/window.ui:1015
#: src/window.ui:1022
msgid "From GGUF File (Experimental)"
msgstr "From GGUF File (Experimental)"
#: src/window.ui:1061
#: src/window.ui:1068
msgid "Close application"
msgstr "Close application"
#: src/window.ui:1067
#: src/window.ui:1074
msgid "Import chat"
msgstr "Import chat"
#: src/window.ui:1073
#: src/window.ui:1080
msgid "Clear chat"
msgstr "Clear chat"
#: src/window.ui:1085
#: src/window.ui:1092
msgid "New chat"
msgstr "New chat"
#: src/window.ui:1091
#: src/window.ui:1098
msgid "Show shortcuts window"
msgstr "Show shortcuts window"
#: src/window.ui:1098
#: src/window.ui:1105
msgid "Editor"
msgstr "Editor"
#: src/window.ui:1102
#: src/window.ui:1109
msgid "Copy"
msgstr "Copy"
#: src/window.ui:1108
#: src/window.ui:1115
msgid "Paste"
msgstr "Paste"
#: src/window.ui:1114
#: src/window.ui:1121
msgid "Insert new line"
msgstr "Insert new line"
#~ msgid "Message Received"
#~ msgstr "Message Received"
#~ msgid "New message from '{}'"
#~ msgstr "New message from '{}'"
#~ msgid "Model"
#~ msgstr "Model"

507
po/es.po
View File

@ -7,7 +7,7 @@ msgid ""
msgstr ""
"Project-Id-Version: 1.0.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-07-07 16:04-0600\n"
"POT-Creation-Date: 2024-07-07 17:40-0600\n"
"PO-Revision-Date: 2024-05-19 19:44-0600\n"
"Last-Translator: Jeffry Samuel Eduarte Rojas <jeffrysamuer@gmail.com>\n"
"Language-Team: Spanish\n"
@ -1193,6 +1193,511 @@ msgstr "Pegar"
msgid "Insert new line"
msgstr "Saltar línea"
#: src/available_models_descriptions.py:2
msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B."
msgstr ""
#: src/available_models_descriptions.py:3
msgid "Meta Llama 3: The most capable openly available LLM to date"
msgstr ""
#: src/available_models_descriptions.py:4
msgid "Qwen2 is a new series of large language models from Alibaba group"
msgstr ""
#: src/available_models_descriptions.py:5
msgid ""
"An open-source Mixture-of-Experts code language model that achieves "
"performance comparable to GPT4-Turbo in code-specific tasks."
msgstr ""
#: src/available_models_descriptions.py:6
msgid ""
"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art "
"open models by Microsoft."
msgstr ""
#: src/available_models_descriptions.py:7
msgid ""
"Aya 23, released by Cohere, is a new family of state-of-the-art, "
"multilingual models that support 23 languages."
msgstr ""
#: src/available_models_descriptions.py:8
msgid "The 7B model released by Mistral AI, updated to version 0.3."
msgstr ""
#: src/available_models_descriptions.py:9
msgid ""
"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in "
"8x7b and 8x22b parameter sizes."
msgstr ""
#: src/available_models_descriptions.py:10
msgid ""
"CodeGemma is a collection of powerful, lightweight models that can perform a "
"variety of coding tasks like fill-in-the-middle code completion, code "
"generation, natural language understanding, mathematical reasoning, and "
"instruction following."
msgstr ""
#: src/available_models_descriptions.py:11
msgid ""
"Command R is a Large Language Model optimized for conversational interaction "
"and long context tasks."
msgstr ""
#: src/available_models_descriptions.py:12
msgid ""
"Command R+ is a powerful, scalable large language model purpose-built to "
"excel at real-world enterprise use cases."
msgstr ""
#: src/available_models_descriptions.py:13
msgid ""
"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines "
"a vision encoder and Vicuna for general-purpose visual and language "
"understanding. Updated to version 1.6."
msgstr ""
#: src/available_models_descriptions.py:14
msgid ""
"Gemma is a family of lightweight, state-of-the-art open models built by "
"Google DeepMind. Updated to version 1.1"
msgstr ""
#: src/available_models_descriptions.py:15
msgid ""
"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from "
"0.5B to 110B parameters"
msgstr ""
#: src/available_models_descriptions.py:16
msgid ""
"Llama 2 is a collection of foundation language models ranging from 7B to 70B "
"parameters."
msgstr ""
#: src/available_models_descriptions.py:17
msgid ""
"A large language model that can use text prompts to generate and discuss "
"code."
msgstr ""
#: src/available_models_descriptions.py:18
msgid ""
"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of "
"experts models that excels at coding tasks. Created by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:19
msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope."
msgstr ""
#: src/available_models_descriptions.py:20
msgid ""
"DeepSeek Coder is a capable coding model trained on two trillion code and "
"natural language tokens."
msgstr ""
#: src/available_models_descriptions.py:21
msgid ""
"A high-performing open embedding model with a large token context window."
msgstr ""
#: src/available_models_descriptions.py:22
msgid ""
"Phi-2: a 2.7B language model by Microsoft Research that demonstrates "
"outstanding reasoning and language understanding capabilities."
msgstr ""
#: src/available_models_descriptions.py:23
msgid ""
"The uncensored Dolphin model based on Mistral that excels at coding tasks. "
"Updated to version 2.8."
msgstr ""
#: src/available_models_descriptions.py:24
msgid ""
"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the "
"Mistral 7B model using the OpenOrca dataset."
msgstr ""
#: src/available_models_descriptions.py:25
msgid ""
"A general-purpose model ranging from 3 billion parameters to 70 billion, "
"suitable for entry-level hardware."
msgstr ""
#: src/available_models_descriptions.py:26
msgid "State-of-the-art large embedding model from mixedbread.ai"
msgstr ""
#: src/available_models_descriptions.py:27
msgid ""
"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on "
"Llama 3 that has a variety of instruction, conversational, and coding skills."
msgstr ""
#: src/available_models_descriptions.py:28
msgid ""
"StarCoder2 is the next generation of transparently trained open code LLMs "
"that comes in three sizes: 3B, 7B and 15B parameters."
msgstr ""
#: src/available_models_descriptions.py:29
msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability."
msgstr ""
#: src/available_models_descriptions.py:30
msgid ""
"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models "
"that are trained to act as helpful assistants."
msgstr ""
#: src/available_models_descriptions.py:31
msgid "Yi 1.5 is a high-performing, bilingual language model."
msgstr ""
#: src/available_models_descriptions.py:32
msgid ""
"The powerful family of models by Nous Research that excels at scientific "
"discussion and coding tasks."
msgstr ""
#: src/available_models_descriptions.py:33
msgid ""
"General use chat model based on Llama and Llama 2 with 2K to 16K context "
"sizes."
msgstr ""
#: src/available_models_descriptions.py:34
msgid ""
"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on "
"Llama 2 uncensored by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:35
msgid ""
"The TinyLlama project is an open endeavor to train a compact 1.1B Llama "
"model on 3 trillion tokens."
msgstr ""
#: src/available_models_descriptions.py:36
msgid ""
"State of the art large language model from Microsoft AI with improved "
"performance on complex chat, multilingual, reasoning and agent use cases."
msgstr ""
#: src/available_models_descriptions.py:37
msgid ""
"StarCoder is a code generation model trained on 80+ programming languages."
msgstr ""
#: src/available_models_descriptions.py:38
msgid ""
"Codestral is Mistral AIs first-ever code model designed for code generation "
"tasks."
msgstr ""
#: src/available_models_descriptions.py:39
msgid ""
"A family of open-source models trained on a wide variety of data, surpassing "
"ChatGPT on various benchmarks. Updated to version 3.5-0106."
msgstr ""
#: src/available_models_descriptions.py:40
msgid ""
"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset "
"by Eric Hartford and based on TinyLlama."
msgstr ""
#: src/available_models_descriptions.py:41
msgid ""
"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully "
"open datasets."
msgstr ""
#: src/available_models_descriptions.py:42
msgid "State-of-the-art code generation model"
msgstr ""
#: src/available_models_descriptions.py:43
msgid ""
"Stable Code 3B is a coding model with instruct and code completion variants "
"on par with models such as Code Llama 7B that are 2.5x larger."
msgstr ""
#: src/available_models_descriptions.py:44
msgid ""
"A fine-tuned model based on Mistral with good coverage of domain and "
"language."
msgstr ""
#: src/available_models_descriptions.py:45
msgid "Model focused on math and logic problems"
msgstr ""
#: src/available_models_descriptions.py:46
msgid ""
"CodeQwen1.5 is a large language model pretrained on a large amount of code "
"data."
msgstr ""
#: src/available_models_descriptions.py:47
msgid "Code generation model based on Code Llama."
msgstr ""
#: src/available_models_descriptions.py:48
msgid ""
"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model "
"trained on multilingual data in English, Spanish, German, Italian, French, "
"Portuguese, and Dutch."
msgstr ""
#: src/available_models_descriptions.py:49
msgid ""
"A 7B and 15B uncensored variant of the Dolphin model family that excels at "
"coding, based on StarCoder2."
msgstr ""
#: src/available_models_descriptions.py:50
msgid "Embedding models on very large sentence level datasets."
msgstr ""
#: src/available_models_descriptions.py:51
msgid "General use models based on Llama and Llama 2 from Nous Research."
msgstr ""
#: src/available_models_descriptions.py:52
msgid ""
"Starling is a large language model trained by reinforcement learning from AI "
"feedback focused on improving chatbot helpfulness."
msgstr ""
#: src/available_models_descriptions.py:53
msgid ""
"SQLCoder is a code completion model fined-tuned on StarCoder for SQL "
"generation tasks"
msgstr ""
#: src/available_models_descriptions.py:54
msgid ""
"Orca 2 is built by Microsoft research, and are a fine-tuned version of "
"Meta's Llama 2 models. The model is designed to excel particularly in "
"reasoning."
msgstr ""
#: src/available_models_descriptions.py:55
msgid ""
"This model extends LLama-3 8B's context length from 8k to over 1m tokens."
msgstr ""
#: src/available_models_descriptions.py:56
msgid "An advanced language model crafted with 2 trillion bilingual tokens."
msgstr ""
#: src/available_models_descriptions.py:57
msgid "An extension of Llama 2 that supports a context of up to 128k tokens."
msgstr ""
#: src/available_models_descriptions.py:58
msgid ""
"A model from NVIDIA based on Llama 3 that excels at conversational question "
"answering (QA) and retrieval-augmented generation (RAG)."
msgstr ""
#: src/available_models_descriptions.py:59
msgid ""
"A compact, yet powerful 10.7B large language model designed for single-turn "
"conversation."
msgstr ""
#: src/available_models_descriptions.py:60
msgid ""
"Conversational model based on Llama 2 that performs competitively on various "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:61
msgid "A family of open foundation models by IBM for Code Intelligence"
msgstr ""
#: src/available_models_descriptions.py:62
msgid ""
"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language "
"model by Microsoft Research."
msgstr ""
#: src/available_models_descriptions.py:63
msgid "General use model based on Llama 2."
msgstr ""
#: src/available_models_descriptions.py:64
msgid ""
"A companion assistant trained in philosophy, psychology, and personal "
"relationships. Based on Mistral."
msgstr ""
#: src/available_models_descriptions.py:65
msgid ""
"Llama 2 based model fine tuned on an Orca-style dataset. Originally called "
"Free Willy."
msgstr ""
#: src/available_models_descriptions.py:66
msgid ""
"BakLLaVA is a multimodal model consisting of the Mistral 7B base model "
"augmented with the LLaVA architecture."
msgstr ""
#: src/available_models_descriptions.py:67
msgid ""
"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:68
msgid "Uncensored version of Wizard LM model"
msgstr ""
#: src/available_models_descriptions.py:69
msgid ""
"Fine-tuned Llama 2 model to answer medical questions based on an open source "
"medical dataset."
msgstr ""
#: src/available_models_descriptions.py:70
msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral."
msgstr ""
#: src/available_models_descriptions.py:71
msgid "An extension of Mistral to support context windows of 64K or 128K."
msgstr ""
#: src/available_models_descriptions.py:72
msgid ""
"A suite of text embedding models by Snowflake, optimized for performance."
msgstr ""
#: src/available_models_descriptions.py:73
msgid ""
"An expansion of Llama 2 that specializes in integrating both general "
"language understanding and domain-specific knowledge, particularly in "
"programming and mathematics."
msgstr ""
#: src/available_models_descriptions.py:74
msgid "Great code generation model based on Llama2."
msgstr ""
#: src/available_models_descriptions.py:75
msgid ""
"Open-source medical large language model adapted from Llama 2 to the medical "
"domain."
msgstr ""
#: src/available_models_descriptions.py:76
msgid ""
"moondream2 is a small vision language model designed to run efficiently on "
"edge devices."
msgstr ""
#: src/available_models_descriptions.py:77
msgid "Uncensored Llama2 based model with support for a 16K context window."
msgstr ""
#: src/available_models_descriptions.py:78
msgid ""
"Nexus Raven is a 13B instruction tuned model for function calling tasks."
msgstr ""
#: src/available_models_descriptions.py:79
msgid ""
"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic "
"instruction data using OSS-Instruct, a novel approach to enlightening LLMs "
"with open-source code snippets."
msgstr ""
#: src/available_models_descriptions.py:80
msgid "A strong, economical, and efficient Mixture-of-Experts language model."
msgstr ""
#: src/available_models_descriptions.py:81
msgid ""
"A lightweight chat model allowing accurate, and responsive output without "
"requiring high-end hardware."
msgstr ""
#: src/available_models_descriptions.py:82
msgid ""
"A high-performing code instruct model created by merging two existing code "
"models."
msgstr ""
#: src/available_models_descriptions.py:83
msgid "A new small LLaVA model fine-tuned from Phi 3 Mini."
msgstr ""
#: src/available_models_descriptions.py:84
msgid ""
"MistralLite is a fine-tuned model based on Mistral with enhanced "
"capabilities of processing long contexts."
msgstr ""
#: src/available_models_descriptions.py:85
msgid ""
"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by "
"MelodysDreamj."
msgstr ""
#: src/available_models_descriptions.py:86
msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station."
msgstr ""
#: src/available_models_descriptions.py:87
msgid ""
"A language model created by combining two fine-tuned Llama 2 70B models into "
"one."
msgstr ""
#: src/available_models_descriptions.py:88
msgid ""
"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by "
"interleaving the model with itself."
msgstr ""
#: src/available_models_descriptions.py:89
msgid ""
"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. "
"Designed for chat and code generation."
msgstr ""
#: src/available_models_descriptions.py:90
msgid ""
"A top-performing mixture of experts model, fine-tuned with high-quality data."
msgstr ""
#: src/available_models_descriptions.py:91
msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr."
msgstr ""
#: src/available_models_descriptions.py:92
msgid "DBRX is an open, general-purpose LLM created by Databricks."
msgstr ""
#: src/available_models_descriptions.py:93
msgid ""
"Falcon2 is an 11B parameters causal decoder-only model built by TII and "
"trained over 5T tokens."
msgstr ""
#: src/available_models_descriptions.py:94
msgid ""
"A robust conversational model designed to be used for both chat and instruct "
"use cases."
msgstr ""
#, fuzzy
#~ msgid "New message from '{}'"
#~ msgstr "Nuevo diseño para el entry de mensaje"

184
po/es.po~
View File

@ -6,8 +6,8 @@
msgid ""
msgstr ""
"Project-Id-Version: 1.0.0\n"
"Report-Msgid-Bugs-To: https://github.com/Jeffser/Alpaca\n"
"POT-Creation-Date: 2024-07-02 18:21-0600\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-07-07 16:04-0600\n"
"PO-Revision-Date: 2024-05-19 19:44-0600\n"
"Last-Translator: Jeffry Samuel Eduarte Rojas <jeffrysamuer@gmail.com>\n"
"Language-Team: Spanish\n"
@ -70,7 +70,7 @@ msgstr "Reconocimiento de documentos de texto plano"
msgid "Import and export chats"
msgstr "Añadida la opcion de importar y exportar chats"
#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:853
#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:860
msgid "Disclaimer"
msgstr "Aviso Legal"
@ -719,7 +719,7 @@ msgstr "0.1.1"
msgid "This is the first public version of Alpaca"
msgstr "Esta es la primera versión publica de Alpaca"
#: src/window.py:58 src/window.py:1053 src/window.py:1115 src/window.ui:41
#: src/window.py:58 src/window.py:1083 src/window.py:1145 src/window.ui:41
msgid "New Chat"
msgstr "Nuevo Chat"
@ -819,73 +819,72 @@ msgstr "Chat exportado exitosamente"
msgid "Chat imported successfully"
msgstr "Chat importado exitosamente"
#: src/window.py:278
#: src/window.py:280
msgid "Close"
msgstr "Cerrar"
#: src/window.py:279 src/window.ui:806
#: src/window.py:281 src/window.ui:813
msgid "Next"
msgstr "Siguiente"
#: src/window.py:329
#: src/window.py:332
#, fuzzy
msgid "Pulling in the background..."
msgstr "Ejecutar en el fondo"
#: src/window.py:381
#: src/window.py:384
msgid "Stop Creating '{}'"
msgstr ""
#: src/window.py:418
#: src/window.py:421
#, fuzzy
msgid "image"
msgstr "Imagen"
#: src/window.py:588
#: src/window.py:593
#, fuzzy
msgid "Remove Message"
msgstr "Remover Imagen"
#: src/window.py:593 src/window.py:841
#: src/window.py:598 src/window.py:869
#, fuzzy
msgid "Copy Message"
msgstr "Enviar Mensaje"
#: src/window.py:598
#: src/window.py:603
#, fuzzy
msgid "Edit Message"
msgstr "Enviar Mensaje"
#: src/window.py:729
#: src/window.py:661
msgid "Missing Image"
msgstr ""
#: src/window.py:677
msgid "Missing image"
msgstr ""
#: src/window.py:757
msgid "Remove '{} ({})'"
msgstr ""
#: src/window.py:882
msgid "Message Received"
msgstr ""
#: src/window.py:882
#, fuzzy
msgid "New message from '{}'"
msgstr "Nuevo diseño para el entry de mensaje"
#: src/window.py:939
#: src/window.py:969
msgid "Task Complete"
msgstr "Tarea completada"
#: src/window.py:939
#: src/window.py:969
msgid "Model '{}' pulled successfully."
msgstr "El modelo '{}' fue descargado exitosamente"
#: src/window.py:944
#: src/window.py:974
msgid "Pull Model Error"
msgstr "Error Descargando Modelo"
#: src/window.py:944
#: src/window.py:974
msgid "Failed to pull model '{}' due to network error."
msgstr "No se pudo descargar el modelo '{}' debido a un error de red"
#: src/window.py:978
#: src/window.py:1008
msgid "Stop Pulling '{} ({})'"
msgstr ""
@ -897,7 +896,7 @@ msgstr "Menu"
msgid "Toggle Sidebar"
msgstr "Alternar barra de lado"
#: src/window.ui:107 src/window.ui:595
#: src/window.ui:107 src/window.ui:587
#, fuzzy
msgid "Manage Models"
msgstr "Gestionar modelos"
@ -912,29 +911,29 @@ msgstr "Menu"
msgid "Attach File"
msgstr "Adjuntar archivo"
#: src/window.ui:238 src/window.ui:1120
#: src/window.ui:242 src/window.ui:1127
msgid "Send Message"
msgstr "Enviar Mensaje"
#: src/window.ui:286 src/window.ui:965 src/window.ui:1079
#: src/window.ui:290 src/window.ui:972 src/window.ui:1086
msgid "Preferences"
msgstr "Preferencias"
#: src/window.ui:289 src/window.ui:1057
#: src/window.ui:293 src/window.ui:1064
msgid "General"
msgstr "General"
#: src/window.ui:297
#: src/window.ui:299
#, fuzzy
msgid "Use Remote Connection to Ollama"
msgstr "Gestiona una conección remota a Ollama"
#: src/window.ui:303
#: src/window.ui:305
#, fuzzy
msgid "URL of Remote Instance"
msgstr "URL de la instancia remota"
#: src/window.ui:310
#: src/window.ui:312
msgid "Bearer Token (Optional)"
msgstr ""
@ -943,17 +942,12 @@ msgstr ""
msgid "Run Alpaca In Background"
msgstr "Ejecutar en el fondo"
#: src/window.ui:331
#, fuzzy
msgid "Model"
msgstr "Parar Modelo"
#: src/window.ui:341
#: src/window.ui:333
#, fuzzy
msgid "Temperature"
msgstr "Temperatura"
#: src/window.ui:342
#: src/window.ui:334
msgid ""
"The temperature of the model. Increasing the temperature will make the model "
"answer more creatively. (Default: 0.8)"
@ -961,11 +955,11 @@ msgstr ""
"La temperatura del modelo. Incrementando la temparatura hará que el modelo "
"responda más creativamente (Por defecto: 0.8)"
#: src/window.ui:357
#: src/window.ui:349
msgid "Seed"
msgstr "Semilla"
#: src/window.ui:358
#: src/window.ui:350
msgid ""
"Sets the random number seed to use for generation. Setting this to a "
"specific number will make the model generate the same text for the same "
@ -975,11 +969,11 @@ msgstr ""
"numero especifico hará que el modelo genere el mismo texto a la misma "
"pregunta del usuario (Por defecto: 0 (Al azar))"
#: src/window.ui:372
#: src/window.ui:364
msgid "Keep Alive Time"
msgstr "Tiempo Para Mantener Vivo"
#: src/window.ui:373
#: src/window.ui:365
#, fuzzy
msgid ""
"Controls how long the model will stay loaded into memory following the "
@ -988,17 +982,17 @@ msgstr ""
"Controla por cuanto tiempo el modelo permanecera cargado en la memoria "
"despues de la ultima petición en minutos (Por defecto: 5)"
#: src/window.ui:389
#: src/window.ui:381
#, fuzzy
msgid "Ollama Instance"
msgstr "Instancia de Ollama"
#: src/window.ui:393
#: src/window.ui:385
#, fuzzy
msgid "Ollama Overrides"
msgstr "Overrides de Ollama"
#: src/window.ui:394
#: src/window.ui:386
#, fuzzy
msgid ""
"Manage the arguments used on Ollama, any changes on this page only applies "
@ -1008,33 +1002,33 @@ msgstr ""
"solo aplica a la instancia integrada, la instancia se reiniciará si haces "
"algún cambio"
#: src/window.ui:477
#: src/window.ui:469
msgid "Create"
msgstr "Crear"
#: src/window.ui:490 src/window.ui:605
#: src/window.ui:482 src/window.ui:597
#, fuzzy
msgid "Create Model"
msgstr "Crear modelo"
#: src/window.ui:516
#: src/window.ui:508
msgid "Base"
msgstr "Base"
#: src/window.ui:534
#: src/window.ui:526
msgid "Name"
msgstr "Nombre"
#: src/window.ui:540
#: src/window.ui:532
msgid "Context"
msgstr "Contexto"
#: src/window.ui:555
#: src/window.ui:547
#, fuzzy
msgid "Template"
msgstr "Plantilla"
#: src/window.ui:561
#: src/window.ui:553
msgid ""
"Some models require a specific template. Please visit the model's website "
"for more information if you're unsure."
@ -1042,40 +1036,46 @@ msgstr ""
"Algunos modelos requieren de una plantilla especifica. Por favor visita el "
"sitio web del modelo para más información en caso de que no estés seguro"
#: src/window.ui:612
#: src/window.ui:604
#, fuzzy
msgid "Search Model"
msgstr "Modelos Destacados"
#: src/window.ui:672
#: src/window.ui:664
msgid "No Models Found"
msgstr ""
#: src/window.ui:673
#: src/window.ui:665
msgid "Try a different search"
msgstr ""
#: src/window.ui:738
#: src/window.ui:708
msgid ""
"By downloading this model you accept the license agreement available on the "
"model's website."
msgstr ""
#: src/window.ui:745
msgid "Open with Default App"
msgstr ""
#: src/window.ui:790
#: src/window.ui:797
msgid "Previous"
msgstr "Anterior"
#: src/window.ui:833
#: src/window.ui:840
msgid "Welcome to Alpaca"
msgstr "Bienvenido a Alpaca"
#: src/window.ui:834
#: src/window.ui:841
msgid "Powered by Ollama"
msgstr "Impulsado por Ollama"
#: src/window.ui:837
#: src/window.ui:844
msgid "Ollama Website"
msgstr "Sitio Web de Ollama"
#: src/window.ui:854
#: src/window.ui:861
msgid ""
"Alpaca and its developers are not liable for any damages to devices or "
"software resulting from the execution of code generated by an AI model. "
@ -1086,12 +1086,12 @@ msgstr ""
"un modelo de IA. Por favor sea precavido y revise el codigo cuidadosamente "
"antes de correrlo"
#: src/window.ui:865
#: src/window.ui:872
#, fuzzy
msgid "Featured Models"
msgstr "Modelos Destacados"
#: src/window.ui:866
#: src/window.ui:873
msgid ""
"Alpaca works locally on your device, to start chatting you'll need an AI "
"model, you can either pull models from this list or the 'Manage Models' menu "
@ -1101,98 +1101,106 @@ msgstr ""
"necesitas un modelo IA, puedes descargar modelos de esta lista o usando el "
"menu 'Gestionar Modelos' despues"
#: src/window.ui:876
#: src/window.ui:883
msgid "Built by Meta"
msgstr "Construido por Meta"
#: src/window.ui:894
#: src/window.ui:901
msgid "Built by Google DeepMind"
msgstr "Construido por Google DeepMind"
#: src/window.ui:912
#: src/window.ui:919
msgid "Built by Microsoft"
msgstr "Construido por Microsoft"
#: src/window.ui:930
#: src/window.ui:937
msgid "Multimodal AI with image recognition"
msgstr "IA multimodal con reconocimiento de imagenes"
#: src/window.ui:959
#: src/window.ui:966
#, fuzzy
msgid "Import Chat"
msgstr "Importar chat"
#: src/window.ui:969
#: src/window.ui:976
msgid "Keyboard Shortcuts"
msgstr "Atajos de Teclado"
#: src/window.ui:973
#: src/window.ui:980
msgid "About Alpaca"
msgstr "Sobre Alpaca"
#: src/window.ui:980 src/window.ui:999
#: src/window.ui:987 src/window.ui:1006
msgid "Rename Chat"
msgstr "Renombrar Chat"
#: src/window.ui:984 src/window.ui:1003
#: src/window.ui:991 src/window.ui:1010
#, fuzzy
msgid "Export Chat"
msgstr "Importar chat"
#: src/window.ui:988
#: src/window.ui:995
msgid "Clear Chat"
msgstr "Limpiar Chat"
#: src/window.ui:995
#: src/window.ui:1002
msgid "Delete Chat"
msgstr "Eliminar Chat"
#: src/window.ui:1011
#: src/window.ui:1018
#, fuzzy
msgid "From Existing Model"
msgstr "Usar modelo existente"
#: src/window.ui:1015
#: src/window.ui:1022
#, fuzzy
msgid "From GGUF File (Experimental)"
msgstr "Usar archivo GGUF (Experimental)"
#: src/window.ui:1061
#: src/window.ui:1068
msgid "Close application"
msgstr "Cerrar aplicación"
#: src/window.ui:1067
#: src/window.ui:1074
msgid "Import chat"
msgstr "Importar chat"
#: src/window.ui:1073
#: src/window.ui:1080
msgid "Clear chat"
msgstr "Limpiar chat"
#: src/window.ui:1085
#: src/window.ui:1092
msgid "New chat"
msgstr "Nuevo chat"
#: src/window.ui:1091
#: src/window.ui:1098
msgid "Show shortcuts window"
msgstr "Mostrar ventana de atajos"
#: src/window.ui:1098
#: src/window.ui:1105
msgid "Editor"
msgstr "Editor"
#: src/window.ui:1102
#: src/window.ui:1109
msgid "Copy"
msgstr "Copiar"
#: src/window.ui:1108
#: src/window.ui:1115
msgid "Paste"
msgstr "Pegar"
#: src/window.ui:1114
#: src/window.ui:1121
msgid "Insert new line"
msgstr "Saltar línea"
#, fuzzy
#~ msgid "New message from '{}'"
#~ msgstr "Nuevo diseño para el entry de mensaje"
#, fuzzy
#~ msgid "Model"
#~ msgstr "Parar Modelo"
#, fuzzy
#~ msgid "Send message"
#~ msgstr "Enviar Mensaje"

507
po/fr.po
View File

@ -7,7 +7,7 @@ msgid ""
msgstr ""
"Project-Id-Version: 1.0.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-07-07 16:04-0600\n"
"POT-Creation-Date: 2024-07-07 17:40-0600\n"
"PO-Revision-Date: 2024-07-06 15:45+0200\n"
"Last-Translator: Louis Chauvet-Villaret <louischauvet0@gmail.com>\n"
"Language-Team: French\n"
@ -1152,6 +1152,511 @@ msgstr "Coller"
msgid "Insert new line"
msgstr "Ajouter une ligne"
#: src/available_models_descriptions.py:2
msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B."
msgstr ""
#: src/available_models_descriptions.py:3
msgid "Meta Llama 3: The most capable openly available LLM to date"
msgstr ""
#: src/available_models_descriptions.py:4
msgid "Qwen2 is a new series of large language models from Alibaba group"
msgstr ""
#: src/available_models_descriptions.py:5
msgid ""
"An open-source Mixture-of-Experts code language model that achieves "
"performance comparable to GPT4-Turbo in code-specific tasks."
msgstr ""
#: src/available_models_descriptions.py:6
msgid ""
"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art "
"open models by Microsoft."
msgstr ""
#: src/available_models_descriptions.py:7
msgid ""
"Aya 23, released by Cohere, is a new family of state-of-the-art, "
"multilingual models that support 23 languages."
msgstr ""
#: src/available_models_descriptions.py:8
msgid "The 7B model released by Mistral AI, updated to version 0.3."
msgstr ""
#: src/available_models_descriptions.py:9
msgid ""
"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in "
"8x7b and 8x22b parameter sizes."
msgstr ""
#: src/available_models_descriptions.py:10
msgid ""
"CodeGemma is a collection of powerful, lightweight models that can perform a "
"variety of coding tasks like fill-in-the-middle code completion, code "
"generation, natural language understanding, mathematical reasoning, and "
"instruction following."
msgstr ""
#: src/available_models_descriptions.py:11
msgid ""
"Command R is a Large Language Model optimized for conversational interaction "
"and long context tasks."
msgstr ""
#: src/available_models_descriptions.py:12
msgid ""
"Command R+ is a powerful, scalable large language model purpose-built to "
"excel at real-world enterprise use cases."
msgstr ""
#: src/available_models_descriptions.py:13
msgid ""
"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines "
"a vision encoder and Vicuna for general-purpose visual and language "
"understanding. Updated to version 1.6."
msgstr ""
#: src/available_models_descriptions.py:14
msgid ""
"Gemma is a family of lightweight, state-of-the-art open models built by "
"Google DeepMind. Updated to version 1.1"
msgstr ""
#: src/available_models_descriptions.py:15
msgid ""
"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from "
"0.5B to 110B parameters"
msgstr ""
#: src/available_models_descriptions.py:16
msgid ""
"Llama 2 is a collection of foundation language models ranging from 7B to 70B "
"parameters."
msgstr ""
#: src/available_models_descriptions.py:17
msgid ""
"A large language model that can use text prompts to generate and discuss "
"code."
msgstr ""
#: src/available_models_descriptions.py:18
msgid ""
"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of "
"experts models that excels at coding tasks. Created by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:19
msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope."
msgstr ""
#: src/available_models_descriptions.py:20
msgid ""
"DeepSeek Coder is a capable coding model trained on two trillion code and "
"natural language tokens."
msgstr ""
#: src/available_models_descriptions.py:21
msgid ""
"A high-performing open embedding model with a large token context window."
msgstr ""
#: src/available_models_descriptions.py:22
msgid ""
"Phi-2: a 2.7B language model by Microsoft Research that demonstrates "
"outstanding reasoning and language understanding capabilities."
msgstr ""
#: src/available_models_descriptions.py:23
msgid ""
"The uncensored Dolphin model based on Mistral that excels at coding tasks. "
"Updated to version 2.8."
msgstr ""
#: src/available_models_descriptions.py:24
msgid ""
"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the "
"Mistral 7B model using the OpenOrca dataset."
msgstr ""
#: src/available_models_descriptions.py:25
msgid ""
"A general-purpose model ranging from 3 billion parameters to 70 billion, "
"suitable for entry-level hardware."
msgstr ""
#: src/available_models_descriptions.py:26
msgid "State-of-the-art large embedding model from mixedbread.ai"
msgstr ""
#: src/available_models_descriptions.py:27
msgid ""
"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on "
"Llama 3 that has a variety of instruction, conversational, and coding skills."
msgstr ""
#: src/available_models_descriptions.py:28
msgid ""
"StarCoder2 is the next generation of transparently trained open code LLMs "
"that comes in three sizes: 3B, 7B and 15B parameters."
msgstr ""
#: src/available_models_descriptions.py:29
msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability."
msgstr ""
#: src/available_models_descriptions.py:30
msgid ""
"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models "
"that are trained to act as helpful assistants."
msgstr ""
#: src/available_models_descriptions.py:31
msgid "Yi 1.5 is a high-performing, bilingual language model."
msgstr ""
#: src/available_models_descriptions.py:32
msgid ""
"The powerful family of models by Nous Research that excels at scientific "
"discussion and coding tasks."
msgstr ""
#: src/available_models_descriptions.py:33
msgid ""
"General use chat model based on Llama and Llama 2 with 2K to 16K context "
"sizes."
msgstr ""
#: src/available_models_descriptions.py:34
msgid ""
"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on "
"Llama 2 uncensored by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:35
msgid ""
"The TinyLlama project is an open endeavor to train a compact 1.1B Llama "
"model on 3 trillion tokens."
msgstr ""
#: src/available_models_descriptions.py:36
msgid ""
"State of the art large language model from Microsoft AI with improved "
"performance on complex chat, multilingual, reasoning and agent use cases."
msgstr ""
#: src/available_models_descriptions.py:37
msgid ""
"StarCoder is a code generation model trained on 80+ programming languages."
msgstr ""
#: src/available_models_descriptions.py:38
msgid ""
"Codestral is Mistral AIs first-ever code model designed for code generation "
"tasks."
msgstr ""
#: src/available_models_descriptions.py:39
msgid ""
"A family of open-source models trained on a wide variety of data, surpassing "
"ChatGPT on various benchmarks. Updated to version 3.5-0106."
msgstr ""
#: src/available_models_descriptions.py:40
msgid ""
"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset "
"by Eric Hartford and based on TinyLlama."
msgstr ""
#: src/available_models_descriptions.py:41
msgid ""
"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully "
"open datasets."
msgstr ""
#: src/available_models_descriptions.py:42
msgid "State-of-the-art code generation model"
msgstr ""
#: src/available_models_descriptions.py:43
msgid ""
"Stable Code 3B is a coding model with instruct and code completion variants "
"on par with models such as Code Llama 7B that are 2.5x larger."
msgstr ""
#: src/available_models_descriptions.py:44
msgid ""
"A fine-tuned model based on Mistral with good coverage of domain and "
"language."
msgstr ""
#: src/available_models_descriptions.py:45
msgid "Model focused on math and logic problems"
msgstr ""
#: src/available_models_descriptions.py:46
msgid ""
"CodeQwen1.5 is a large language model pretrained on a large amount of code "
"data."
msgstr ""
#: src/available_models_descriptions.py:47
msgid "Code generation model based on Code Llama."
msgstr ""
#: src/available_models_descriptions.py:48
msgid ""
"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model "
"trained on multilingual data in English, Spanish, German, Italian, French, "
"Portuguese, and Dutch."
msgstr ""
#: src/available_models_descriptions.py:49
msgid ""
"A 7B and 15B uncensored variant of the Dolphin model family that excels at "
"coding, based on StarCoder2."
msgstr ""
#: src/available_models_descriptions.py:50
msgid "Embedding models on very large sentence level datasets."
msgstr ""
#: src/available_models_descriptions.py:51
msgid "General use models based on Llama and Llama 2 from Nous Research."
msgstr ""
#: src/available_models_descriptions.py:52
msgid ""
"Starling is a large language model trained by reinforcement learning from AI "
"feedback focused on improving chatbot helpfulness."
msgstr ""
#: src/available_models_descriptions.py:53
msgid ""
"SQLCoder is a code completion model fined-tuned on StarCoder for SQL "
"generation tasks"
msgstr ""
#: src/available_models_descriptions.py:54
msgid ""
"Orca 2 is built by Microsoft research, and are a fine-tuned version of "
"Meta's Llama 2 models. The model is designed to excel particularly in "
"reasoning."
msgstr ""
#: src/available_models_descriptions.py:55
msgid ""
"This model extends LLama-3 8B's context length from 8k to over 1m tokens."
msgstr ""
#: src/available_models_descriptions.py:56
msgid "An advanced language model crafted with 2 trillion bilingual tokens."
msgstr ""
#: src/available_models_descriptions.py:57
msgid "An extension of Llama 2 that supports a context of up to 128k tokens."
msgstr ""
#: src/available_models_descriptions.py:58
msgid ""
"A model from NVIDIA based on Llama 3 that excels at conversational question "
"answering (QA) and retrieval-augmented generation (RAG)."
msgstr ""
#: src/available_models_descriptions.py:59
msgid ""
"A compact, yet powerful 10.7B large language model designed for single-turn "
"conversation."
msgstr ""
#: src/available_models_descriptions.py:60
msgid ""
"Conversational model based on Llama 2 that performs competitively on various "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:61
msgid "A family of open foundation models by IBM for Code Intelligence"
msgstr ""
#: src/available_models_descriptions.py:62
msgid ""
"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language "
"model by Microsoft Research."
msgstr ""
#: src/available_models_descriptions.py:63
msgid "General use model based on Llama 2."
msgstr ""
#: src/available_models_descriptions.py:64
msgid ""
"A companion assistant trained in philosophy, psychology, and personal "
"relationships. Based on Mistral."
msgstr ""
#: src/available_models_descriptions.py:65
msgid ""
"Llama 2 based model fine tuned on an Orca-style dataset. Originally called "
"Free Willy."
msgstr ""
#: src/available_models_descriptions.py:66
msgid ""
"BakLLaVA is a multimodal model consisting of the Mistral 7B base model "
"augmented with the LLaVA architecture."
msgstr ""
#: src/available_models_descriptions.py:67
msgid ""
"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:68
msgid "Uncensored version of Wizard LM model"
msgstr ""
#: src/available_models_descriptions.py:69
msgid ""
"Fine-tuned Llama 2 model to answer medical questions based on an open source "
"medical dataset."
msgstr ""
#: src/available_models_descriptions.py:70
msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral."
msgstr ""
#: src/available_models_descriptions.py:71
msgid "An extension of Mistral to support context windows of 64K or 128K."
msgstr ""
#: src/available_models_descriptions.py:72
msgid ""
"A suite of text embedding models by Snowflake, optimized for performance."
msgstr ""
#: src/available_models_descriptions.py:73
msgid ""
"An expansion of Llama 2 that specializes in integrating both general "
"language understanding and domain-specific knowledge, particularly in "
"programming and mathematics."
msgstr ""
#: src/available_models_descriptions.py:74
msgid "Great code generation model based on Llama2."
msgstr ""
#: src/available_models_descriptions.py:75
msgid ""
"Open-source medical large language model adapted from Llama 2 to the medical "
"domain."
msgstr ""
#: src/available_models_descriptions.py:76
msgid ""
"moondream2 is a small vision language model designed to run efficiently on "
"edge devices."
msgstr ""
#: src/available_models_descriptions.py:77
msgid "Uncensored Llama2 based model with support for a 16K context window."
msgstr ""
#: src/available_models_descriptions.py:78
msgid ""
"Nexus Raven is a 13B instruction tuned model for function calling tasks."
msgstr ""
#: src/available_models_descriptions.py:79
msgid ""
"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic "
"instruction data using OSS-Instruct, a novel approach to enlightening LLMs "
"with open-source code snippets."
msgstr ""
#: src/available_models_descriptions.py:80
msgid "A strong, economical, and efficient Mixture-of-Experts language model."
msgstr ""
#: src/available_models_descriptions.py:81
msgid ""
"A lightweight chat model allowing accurate, and responsive output without "
"requiring high-end hardware."
msgstr ""
#: src/available_models_descriptions.py:82
msgid ""
"A high-performing code instruct model created by merging two existing code "
"models."
msgstr ""
#: src/available_models_descriptions.py:83
msgid "A new small LLaVA model fine-tuned from Phi 3 Mini."
msgstr ""
#: src/available_models_descriptions.py:84
msgid ""
"MistralLite is a fine-tuned model based on Mistral with enhanced "
"capabilities of processing long contexts."
msgstr ""
#: src/available_models_descriptions.py:85
msgid ""
"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by "
"MelodysDreamj."
msgstr ""
#: src/available_models_descriptions.py:86
msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station."
msgstr ""
#: src/available_models_descriptions.py:87
msgid ""
"A language model created by combining two fine-tuned Llama 2 70B models into "
"one."
msgstr ""
#: src/available_models_descriptions.py:88
msgid ""
"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by "
"interleaving the model with itself."
msgstr ""
#: src/available_models_descriptions.py:89
msgid ""
"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. "
"Designed for chat and code generation."
msgstr ""
#: src/available_models_descriptions.py:90
msgid ""
"A top-performing mixture of experts model, fine-tuned with high-quality data."
msgstr ""
#: src/available_models_descriptions.py:91
msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr."
msgstr ""
#: src/available_models_descriptions.py:92
msgid "DBRX is an open, general-purpose LLM created by Databricks."
msgstr ""
#: src/available_models_descriptions.py:93
msgid ""
"Falcon2 is an 11B parameters causal decoder-only model built by TII and "
"trained over 5T tokens."
msgstr ""
#: src/available_models_descriptions.py:94
msgid ""
"A robust conversational model designed to be used for both chat and instruct "
"use cases."
msgstr ""
#~ msgid "Message Received"
#~ msgstr "Message reçu"

183
po/fr.po~
View File

@ -6,8 +6,8 @@
msgid ""
msgstr ""
"Project-Id-Version: 1.0.0\n"
"Report-Msgid-Bugs-To: https://github.com/Jeffser/Alpaca\n"
"POT-Creation-Date: 2024-07-02 18:21-0600\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-07-07 16:04-0600\n"
"PO-Revision-Date: 2024-07-06 15:45+0200\n"
"Last-Translator: Louis Chauvet-Villaret <louischauvet0@gmail.com>\n"
"Language-Team: French\n"
@ -66,7 +66,7 @@ msgstr "Reconnaissance de documents en texte brut"
msgid "Import and export chats"
msgstr "Importez et exportez les discussions"
#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:853
#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:860
msgid "Disclaimer"
msgstr "Avertissement"
@ -703,7 +703,7 @@ msgstr "Version stable"
msgid "This is the first public version of Alpaca"
msgstr "Première version publique d'Alpaca"
#: src/window.py:58 src/window.py:1053 src/window.py:1115 src/window.ui:41
#: src/window.py:58 src/window.py:1083 src/window.py:1145 src/window.ui:41
msgid "New Chat"
msgstr "Nouvelle discussion"
@ -798,68 +798,68 @@ msgstr "Discussion exportée avec succès"
msgid "Chat imported successfully"
msgstr "Discussion importée avec succès"
#: src/window.py:278
#: src/window.py:280
msgid "Close"
msgstr "Fermer"
#: src/window.py:279 src/window.ui:806
#: src/window.py:281 src/window.ui:813
msgid "Next"
msgstr "Suivant"
#: src/window.py:329
#: src/window.py:332
msgid "Pulling in the background..."
msgstr "Téléchargement en arrière-plan..."
#: src/window.py:381
#: src/window.py:384
msgid "Stop Creating '{}'"
msgstr "Arrêter de créer '{}'"
#: src/window.py:418
#: src/window.py:421
msgid "image"
msgstr "Image"
#: src/window.py:588
#: src/window.py:593
msgid "Remove Message"
msgstr "Supprimer le message"
#: src/window.py:593 src/window.py:841
#: src/window.py:598 src/window.py:869
msgid "Copy Message"
msgstr "Copier le message"
#: src/window.py:598
#: src/window.py:603
msgid "Edit Message"
msgstr "Modifier le message"
#: src/window.py:729
#: src/window.py:661
msgid "Missing Image"
msgstr ""
#: src/window.py:677
msgid "Missing image"
msgstr ""
#: src/window.py:757
msgid "Remove '{} ({})'"
msgstr "Supprimer '{} ({})'"
#: src/window.py:882
msgid "Message Received"
msgstr "Message reçu"
#: src/window.py:882
msgid "New message from '{}'"
msgstr "Nouveau message depuis '{}'"
#: src/window.py:939
#: src/window.py:969
msgid "Task Complete"
msgstr "Tâche terminée"
#: src/window.py:939
#: src/window.py:969
msgid "Model '{}' pulled successfully."
msgstr "Modèle '{}' téléchargé avec succès."
#: src/window.py:944
#: src/window.py:974
msgid "Pull Model Error"
msgstr "Erreur de téléchargement du modèle"
#: src/window.py:944
#: src/window.py:974
msgid "Failed to pull model '{}' due to network error."
msgstr ""
"Échec du téléchargement du modèle '{}' à cause d'une erreur de connexion"
#: src/window.py:978
#: src/window.py:1008
msgid "Stop Pulling '{} ({})'"
msgstr "Arrêter de télécharger '{} ({})'"
@ -871,7 +871,7 @@ msgstr "Menu"
msgid "Toggle Sidebar"
msgstr "Basculer la barre latérale"
#: src/window.ui:107 src/window.ui:595
#: src/window.ui:107 src/window.ui:587
msgid "Manage Models"
msgstr "Gérer les modèles"
@ -883,29 +883,29 @@ msgstr "Menu des discussions"
msgid "Attach File"
msgstr "Ajouter un fichier"
#: src/window.ui:238 src/window.ui:1120
#: src/window.ui:242 src/window.ui:1127
msgid "Send Message"
msgstr "Envoyer le message"
#: src/window.ui:286 src/window.ui:965 src/window.ui:1079
#: src/window.ui:290 src/window.ui:972 src/window.ui:1086
msgid "Preferences"
msgstr "Préférences"
#: src/window.ui:289 src/window.ui:1057
#: src/window.ui:293 src/window.ui:1064
msgid "General"
msgstr "Général"
#: src/window.ui:297
#: src/window.ui:299
msgid "Use Remote Connection to Ollama"
msgstr "Utiliser une connexion à distance d'Ollama"
#: src/window.ui:303
#: src/window.ui:305
msgid "URL of Remote Instance"
msgstr "URL de la connexion distante"
# I don't really know how to translate "Bearer Token"
# I search for it but they don't translate it
#: src/window.ui:310
#: src/window.ui:312
msgid "Bearer Token (Optional)"
msgstr "Bearer Token (Optionnel)"
@ -913,15 +913,11 @@ msgstr "Bearer Token (Optionnel)"
msgid "Run Alpaca In Background"
msgstr "Exécuter Alpaca en arrière-plan"
#: src/window.ui:331
msgid "Model"
msgstr "Modèle"
#: src/window.ui:341
#: src/window.ui:333
msgid "Temperature"
msgstr "Température"
#: src/window.ui:342
#: src/window.ui:334
msgid ""
"The temperature of the model. Increasing the temperature will make the model "
"answer more creatively. (Default: 0.8)"
@ -929,11 +925,11 @@ msgstr ""
"La température du modèle. Augmenter la température engendrera des réponses "
"plus créatives. (défaut : 0.8)"
#: src/window.ui:357
#: src/window.ui:349
msgid "Seed"
msgstr "Graine"
#: src/window.ui:358
#: src/window.ui:350
msgid ""
"Sets the random number seed to use for generation. Setting this to a "
"specific number will make the model generate the same text for the same "
@ -943,11 +939,11 @@ msgstr ""
"spécifique induira une même réponse pour un même prompt. (défaut : 0 "
"(aléatoire))"
#: src/window.ui:372
#: src/window.ui:364
msgid "Keep Alive Time"
msgstr "Temps en mémoire"
#: src/window.ui:373
#: src/window.ui:365
msgid ""
"Controls how long the model will stay loaded into memory following the "
"request in minutes (Default: 5)"
@ -955,16 +951,16 @@ msgstr ""
"Gérer la durée en minutes durant laquelle le modèle reste chargé en mémoire "
"(défaut : 5)"
#: src/window.ui:389
#: src/window.ui:381
msgid "Ollama Instance"
msgstr "Instance d'Ollama"
# What is override ???
#: src/window.ui:393
#: src/window.ui:385
msgid "Ollama Overrides"
msgstr "Paramètres d'Ollama"
#: src/window.ui:394
#: src/window.ui:386
msgid ""
"Manage the arguments used on Ollama, any changes on this page only applies "
"to the integrated instance, the instance will restart if you make changes."
@ -973,31 +969,31 @@ msgstr ""
"s'appliquent seulement à l'instance intégré. L'instance va redémarrer si "
"vous effectuez des changements."
#: src/window.ui:477
#: src/window.ui:469
msgid "Create"
msgstr "Créer"
#: src/window.ui:490 src/window.ui:605
#: src/window.ui:482 src/window.ui:597
msgid "Create Model"
msgstr "Créer un modèle"
#: src/window.ui:516
#: src/window.ui:508
msgid "Base"
msgstr "Base"
#: src/window.ui:534
#: src/window.ui:526
msgid "Name"
msgstr "Nom"
#: src/window.ui:540
#: src/window.ui:532
msgid "Context"
msgstr "Contexte"
#: src/window.ui:555
#: src/window.ui:547
msgid "Template"
msgstr "Template"
#: src/window.ui:561
#: src/window.ui:553
msgid ""
"Some models require a specific template. Please visit the model's website "
"for more information if you're unsure."
@ -1005,39 +1001,45 @@ msgstr ""
"Certains modèles requièrent un format spécifique. Merci de visiter le site "
"du modèle pour plus d'information."
#: src/window.ui:612
#: src/window.ui:604
msgid "Search Model"
msgstr "Chercher un modèle"
#: src/window.ui:672
#: src/window.ui:664
msgid "No Models Found"
msgstr "Aucun modèle trouvé"
#: src/window.ui:673
#: src/window.ui:665
msgid "Try a different search"
msgstr "Essayez une recherche différente"
#: src/window.ui:738
#: src/window.ui:708
msgid ""
"By downloading this model you accept the license agreement available on the "
"model's website."
msgstr ""
#: src/window.ui:745
msgid "Open with Default App"
msgstr "Ouvrir avec l'application par défaut"
#: src/window.ui:790
#: src/window.ui:797
msgid "Previous"
msgstr "Précédent"
#: src/window.ui:833
#: src/window.ui:840
msgid "Welcome to Alpaca"
msgstr "Bienvenue sur Alpaca"
#: src/window.ui:834
#: src/window.ui:841
msgid "Powered by Ollama"
msgstr "Fonctionne grâce à Ollama"
#: src/window.ui:837
#: src/window.ui:844
msgid "Ollama Website"
msgstr "Site web d'Ollama"
#: src/window.ui:854
#: src/window.ui:861
msgid ""
"Alpaca and its developers are not liable for any damages to devices or "
"software resulting from the execution of code generated by an AI model. "
@ -1048,11 +1050,11 @@ msgstr ""
"modèle. Merci de faire attention et de relire attentivement le code avant de "
"l'exécuter."
#: src/window.ui:865
#: src/window.ui:872
msgid "Featured Models"
msgstr "Modèles recommandés"
#: src/window.ui:866
#: src/window.ui:873
msgid ""
"Alpaca works locally on your device, to start chatting you'll need an AI "
"model, you can either pull models from this list or the 'Manage Models' menu "
@ -1062,94 +1064,103 @@ msgstr ""
"vous aurez besoin d'un modèle d'IA, vous pouvez télécharger un modèle soit "
"depuis cette liste soit depuis le menu 'Gérer les modèles' plus tard."
#: src/window.ui:876
#: src/window.ui:883
msgid "Built by Meta"
msgstr "Développé par Meta"
#: src/window.ui:894
#: src/window.ui:901
msgid "Built by Google DeepMind"
msgstr "Développé par Google DeepMind"
#: src/window.ui:912
#: src/window.ui:919
msgid "Built by Microsoft"
msgstr "Développé par Microsoft"
#: src/window.ui:930
#: src/window.ui:937
msgid "Multimodal AI with image recognition"
msgstr "IA multimodale avec reconnaissance d'image"
#: src/window.ui:959
#: src/window.ui:966
msgid "Import Chat"
msgstr "Importer une discussion"
#: src/window.ui:969
#: src/window.ui:976
msgid "Keyboard Shortcuts"
msgstr "Raccourcis claviers"
#: src/window.ui:973
#: src/window.ui:980
msgid "About Alpaca"
msgstr "À propos d'Alpaca"
#: src/window.ui:980 src/window.ui:999
#: src/window.ui:987 src/window.ui:1006
msgid "Rename Chat"
msgstr "Renommer la discussion"
#: src/window.ui:984 src/window.ui:1003
#: src/window.ui:991 src/window.ui:1010
msgid "Export Chat"
msgstr "Exporter la discussion"
#: src/window.ui:988
#: src/window.ui:995
msgid "Clear Chat"
msgstr "Supprimer la discussion"
#: src/window.ui:995
#: src/window.ui:1002
msgid "Delete Chat"
msgstr "Supprimer la discussion"
#: src/window.ui:1011
#: src/window.ui:1018
msgid "From Existing Model"
msgstr "Depuis un modèle existant"
#: src/window.ui:1015
#: src/window.ui:1022
msgid "From GGUF File (Experimental)"
msgstr "Depuis un fichier GGUF (Experimental)"
#: src/window.ui:1061
#: src/window.ui:1068
msgid "Close application"
msgstr "Fermer l'application"
#: src/window.ui:1067
#: src/window.ui:1074
msgid "Import chat"
msgstr "Importer une discussion"
#: src/window.ui:1073
#: src/window.ui:1080
msgid "Clear chat"
msgstr "Supprimer la discussion"
#: src/window.ui:1085
#: src/window.ui:1092
msgid "New chat"
msgstr "Nouvelle discussion"
#: src/window.ui:1091
#: src/window.ui:1098
msgid "Show shortcuts window"
msgstr "Voir les raccourcis clavier"
#: src/window.ui:1098
#: src/window.ui:1105
msgid "Editor"
msgstr "Éditeur"
#: src/window.ui:1102
#: src/window.ui:1109
msgid "Copy"
msgstr "Copier"
#: src/window.ui:1108
#: src/window.ui:1115
msgid "Paste"
msgstr "Coller"
#: src/window.ui:1114
#: src/window.ui:1121
msgid "Insert new line"
msgstr "Ajouter une ligne"
#~ msgid "Message Received"
#~ msgstr "Message reçu"
#~ msgid "New message from '{}'"
#~ msgstr "Nouveau message depuis '{}'"
#~ msgid "Model"
#~ msgstr "Modèle"
#~ msgid "Send message"
#~ msgstr "Envoyer le message"

View File

@ -7,7 +7,7 @@ msgid ""
msgstr ""
"Project-Id-Version: 1.0.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-07-07 16:06-0600\n"
"POT-Creation-Date: 2024-07-07 17:40-0600\n"
"PO-Revision-Date: 2024-07-02 18:24-0600\n"
"Last-Translator: Niklas Opsahl Halvorsen\n"
"Language-Team: Norwegian\n"
@ -1104,6 +1104,511 @@ msgstr "Lim inn"
msgid "Insert new line"
msgstr "Sett inn ny linje"
#: src/available_models_descriptions.py:2
msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B."
msgstr ""
#: src/available_models_descriptions.py:3
msgid "Meta Llama 3: The most capable openly available LLM to date"
msgstr ""
#: src/available_models_descriptions.py:4
msgid "Qwen2 is a new series of large language models from Alibaba group"
msgstr ""
#: src/available_models_descriptions.py:5
msgid ""
"An open-source Mixture-of-Experts code language model that achieves "
"performance comparable to GPT4-Turbo in code-specific tasks."
msgstr ""
#: src/available_models_descriptions.py:6
msgid ""
"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art "
"open models by Microsoft."
msgstr ""
#: src/available_models_descriptions.py:7
msgid ""
"Aya 23, released by Cohere, is a new family of state-of-the-art, "
"multilingual models that support 23 languages."
msgstr ""
#: src/available_models_descriptions.py:8
msgid "The 7B model released by Mistral AI, updated to version 0.3."
msgstr ""
#: src/available_models_descriptions.py:9
msgid ""
"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in "
"8x7b and 8x22b parameter sizes."
msgstr ""
#: src/available_models_descriptions.py:10
msgid ""
"CodeGemma is a collection of powerful, lightweight models that can perform a "
"variety of coding tasks like fill-in-the-middle code completion, code "
"generation, natural language understanding, mathematical reasoning, and "
"instruction following."
msgstr ""
#: src/available_models_descriptions.py:11
msgid ""
"Command R is a Large Language Model optimized for conversational interaction "
"and long context tasks."
msgstr ""
#: src/available_models_descriptions.py:12
msgid ""
"Command R+ is a powerful, scalable large language model purpose-built to "
"excel at real-world enterprise use cases."
msgstr ""
#: src/available_models_descriptions.py:13
msgid ""
"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines "
"a vision encoder and Vicuna for general-purpose visual and language "
"understanding. Updated to version 1.6."
msgstr ""
#: src/available_models_descriptions.py:14
msgid ""
"Gemma is a family of lightweight, state-of-the-art open models built by "
"Google DeepMind. Updated to version 1.1"
msgstr ""
#: src/available_models_descriptions.py:15
msgid ""
"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from "
"0.5B to 110B parameters"
msgstr ""
#: src/available_models_descriptions.py:16
msgid ""
"Llama 2 is a collection of foundation language models ranging from 7B to 70B "
"parameters."
msgstr ""
#: src/available_models_descriptions.py:17
msgid ""
"A large language model that can use text prompts to generate and discuss "
"code."
msgstr ""
#: src/available_models_descriptions.py:18
msgid ""
"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of "
"experts models that excels at coding tasks. Created by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:19
msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope."
msgstr ""
#: src/available_models_descriptions.py:20
msgid ""
"DeepSeek Coder is a capable coding model trained on two trillion code and "
"natural language tokens."
msgstr ""
#: src/available_models_descriptions.py:21
msgid ""
"A high-performing open embedding model with a large token context window."
msgstr ""
#: src/available_models_descriptions.py:22
msgid ""
"Phi-2: a 2.7B language model by Microsoft Research that demonstrates "
"outstanding reasoning and language understanding capabilities."
msgstr ""
#: src/available_models_descriptions.py:23
msgid ""
"The uncensored Dolphin model based on Mistral that excels at coding tasks. "
"Updated to version 2.8."
msgstr ""
#: src/available_models_descriptions.py:24
msgid ""
"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the "
"Mistral 7B model using the OpenOrca dataset."
msgstr ""
#: src/available_models_descriptions.py:25
msgid ""
"A general-purpose model ranging from 3 billion parameters to 70 billion, "
"suitable for entry-level hardware."
msgstr ""
#: src/available_models_descriptions.py:26
msgid "State-of-the-art large embedding model from mixedbread.ai"
msgstr ""
#: src/available_models_descriptions.py:27
msgid ""
"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on "
"Llama 3 that has a variety of instruction, conversational, and coding skills."
msgstr ""
#: src/available_models_descriptions.py:28
msgid ""
"StarCoder2 is the next generation of transparently trained open code LLMs "
"that comes in three sizes: 3B, 7B and 15B parameters."
msgstr ""
#: src/available_models_descriptions.py:29
msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability."
msgstr ""
#: src/available_models_descriptions.py:30
msgid ""
"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models "
"that are trained to act as helpful assistants."
msgstr ""
#: src/available_models_descriptions.py:31
msgid "Yi 1.5 is a high-performing, bilingual language model."
msgstr ""
#: src/available_models_descriptions.py:32
msgid ""
"The powerful family of models by Nous Research that excels at scientific "
"discussion and coding tasks."
msgstr ""
#: src/available_models_descriptions.py:33
msgid ""
"General use chat model based on Llama and Llama 2 with 2K to 16K context "
"sizes."
msgstr ""
#: src/available_models_descriptions.py:34
msgid ""
"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on "
"Llama 2 uncensored by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:35
msgid ""
"The TinyLlama project is an open endeavor to train a compact 1.1B Llama "
"model on 3 trillion tokens."
msgstr ""
#: src/available_models_descriptions.py:36
msgid ""
"State of the art large language model from Microsoft AI with improved "
"performance on complex chat, multilingual, reasoning and agent use cases."
msgstr ""
#: src/available_models_descriptions.py:37
msgid ""
"StarCoder is a code generation model trained on 80+ programming languages."
msgstr ""
#: src/available_models_descriptions.py:38
msgid ""
"Codestral is Mistral AIs first-ever code model designed for code generation "
"tasks."
msgstr ""
#: src/available_models_descriptions.py:39
msgid ""
"A family of open-source models trained on a wide variety of data, surpassing "
"ChatGPT on various benchmarks. Updated to version 3.5-0106."
msgstr ""
#: src/available_models_descriptions.py:40
msgid ""
"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset "
"by Eric Hartford and based on TinyLlama."
msgstr ""
#: src/available_models_descriptions.py:41
msgid ""
"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully "
"open datasets."
msgstr ""
#: src/available_models_descriptions.py:42
msgid "State-of-the-art code generation model"
msgstr ""
#: src/available_models_descriptions.py:43
msgid ""
"Stable Code 3B is a coding model with instruct and code completion variants "
"on par with models such as Code Llama 7B that are 2.5x larger."
msgstr ""
#: src/available_models_descriptions.py:44
msgid ""
"A fine-tuned model based on Mistral with good coverage of domain and "
"language."
msgstr ""
#: src/available_models_descriptions.py:45
msgid "Model focused on math and logic problems"
msgstr ""
#: src/available_models_descriptions.py:46
msgid ""
"CodeQwen1.5 is a large language model pretrained on a large amount of code "
"data."
msgstr ""
#: src/available_models_descriptions.py:47
msgid "Code generation model based on Code Llama."
msgstr ""
#: src/available_models_descriptions.py:48
msgid ""
"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model "
"trained on multilingual data in English, Spanish, German, Italian, French, "
"Portuguese, and Dutch."
msgstr ""
#: src/available_models_descriptions.py:49
msgid ""
"A 7B and 15B uncensored variant of the Dolphin model family that excels at "
"coding, based on StarCoder2."
msgstr ""
#: src/available_models_descriptions.py:50
msgid "Embedding models on very large sentence level datasets."
msgstr ""
#: src/available_models_descriptions.py:51
msgid "General use models based on Llama and Llama 2 from Nous Research."
msgstr ""
#: src/available_models_descriptions.py:52
msgid ""
"Starling is a large language model trained by reinforcement learning from AI "
"feedback focused on improving chatbot helpfulness."
msgstr ""
#: src/available_models_descriptions.py:53
msgid ""
"SQLCoder is a code completion model fined-tuned on StarCoder for SQL "
"generation tasks"
msgstr ""
#: src/available_models_descriptions.py:54
msgid ""
"Orca 2 is built by Microsoft research, and are a fine-tuned version of "
"Meta's Llama 2 models. The model is designed to excel particularly in "
"reasoning."
msgstr ""
#: src/available_models_descriptions.py:55
msgid ""
"This model extends LLama-3 8B's context length from 8k to over 1m tokens."
msgstr ""
#: src/available_models_descriptions.py:56
msgid "An advanced language model crafted with 2 trillion bilingual tokens."
msgstr ""
#: src/available_models_descriptions.py:57
msgid "An extension of Llama 2 that supports a context of up to 128k tokens."
msgstr ""
#: src/available_models_descriptions.py:58
msgid ""
"A model from NVIDIA based on Llama 3 that excels at conversational question "
"answering (QA) and retrieval-augmented generation (RAG)."
msgstr ""
#: src/available_models_descriptions.py:59
msgid ""
"A compact, yet powerful 10.7B large language model designed for single-turn "
"conversation."
msgstr ""
#: src/available_models_descriptions.py:60
msgid ""
"Conversational model based on Llama 2 that performs competitively on various "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:61
msgid "A family of open foundation models by IBM for Code Intelligence"
msgstr ""
#: src/available_models_descriptions.py:62
msgid ""
"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language "
"model by Microsoft Research."
msgstr ""
#: src/available_models_descriptions.py:63
msgid "General use model based on Llama 2."
msgstr ""
#: src/available_models_descriptions.py:64
msgid ""
"A companion assistant trained in philosophy, psychology, and personal "
"relationships. Based on Mistral."
msgstr ""
#: src/available_models_descriptions.py:65
msgid ""
"Llama 2 based model fine tuned on an Orca-style dataset. Originally called "
"Free Willy."
msgstr ""
#: src/available_models_descriptions.py:66
msgid ""
"BakLLaVA is a multimodal model consisting of the Mistral 7B base model "
"augmented with the LLaVA architecture."
msgstr ""
#: src/available_models_descriptions.py:67
msgid ""
"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:68
msgid "Uncensored version of Wizard LM model"
msgstr ""
#: src/available_models_descriptions.py:69
msgid ""
"Fine-tuned Llama 2 model to answer medical questions based on an open source "
"medical dataset."
msgstr ""
#: src/available_models_descriptions.py:70
msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral."
msgstr ""
#: src/available_models_descriptions.py:71
msgid "An extension of Mistral to support context windows of 64K or 128K."
msgstr ""
#: src/available_models_descriptions.py:72
msgid ""
"A suite of text embedding models by Snowflake, optimized for performance."
msgstr ""
#: src/available_models_descriptions.py:73
msgid ""
"An expansion of Llama 2 that specializes in integrating both general "
"language understanding and domain-specific knowledge, particularly in "
"programming and mathematics."
msgstr ""
#: src/available_models_descriptions.py:74
msgid "Great code generation model based on Llama2."
msgstr ""
#: src/available_models_descriptions.py:75
msgid ""
"Open-source medical large language model adapted from Llama 2 to the medical "
"domain."
msgstr ""
#: src/available_models_descriptions.py:76
msgid ""
"moondream2 is a small vision language model designed to run efficiently on "
"edge devices."
msgstr ""
#: src/available_models_descriptions.py:77
msgid "Uncensored Llama2 based model with support for a 16K context window."
msgstr ""
#: src/available_models_descriptions.py:78
msgid ""
"Nexus Raven is a 13B instruction tuned model for function calling tasks."
msgstr ""
#: src/available_models_descriptions.py:79
msgid ""
"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic "
"instruction data using OSS-Instruct, a novel approach to enlightening LLMs "
"with open-source code snippets."
msgstr ""
#: src/available_models_descriptions.py:80
msgid "A strong, economical, and efficient Mixture-of-Experts language model."
msgstr ""
#: src/available_models_descriptions.py:81
msgid ""
"A lightweight chat model allowing accurate, and responsive output without "
"requiring high-end hardware."
msgstr ""
#: src/available_models_descriptions.py:82
msgid ""
"A high-performing code instruct model created by merging two existing code "
"models."
msgstr ""
#: src/available_models_descriptions.py:83
msgid "A new small LLaVA model fine-tuned from Phi 3 Mini."
msgstr ""
#: src/available_models_descriptions.py:84
msgid ""
"MistralLite is a fine-tuned model based on Mistral with enhanced "
"capabilities of processing long contexts."
msgstr ""
#: src/available_models_descriptions.py:85
msgid ""
"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by "
"MelodysDreamj."
msgstr ""
#: src/available_models_descriptions.py:86
msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station."
msgstr ""
#: src/available_models_descriptions.py:87
msgid ""
"A language model created by combining two fine-tuned Llama 2 70B models into "
"one."
msgstr ""
#: src/available_models_descriptions.py:88
msgid ""
"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by "
"interleaving the model with itself."
msgstr ""
#: src/available_models_descriptions.py:89
msgid ""
"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. "
"Designed for chat and code generation."
msgstr ""
#: src/available_models_descriptions.py:90
msgid ""
"A top-performing mixture of experts model, fine-tuned with high-quality data."
msgstr ""
#: src/available_models_descriptions.py:91
msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr."
msgstr ""
#: src/available_models_descriptions.py:92
msgid "DBRX is an open, general-purpose LLM created by Databricks."
msgstr ""
#: src/available_models_descriptions.py:93
msgid ""
"Falcon2 is an 11B parameters causal decoder-only model built by TII and "
"trained over 5T tokens."
msgstr ""
#: src/available_models_descriptions.py:94
msgid ""
"A robust conversational model designed to be used for both chat and instruct "
"use cases."
msgstr ""
#~ msgid "Message Received"
#~ msgstr "Melding Mottatt"

1114
po/nb_NO.po~ Normal file

File diff suppressed because it is too large Load Diff

View File

@ -7,7 +7,7 @@ msgid ""
msgstr ""
"Project-Id-Version: 1.0.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-07-07 16:04-0600\n"
"POT-Creation-Date: 2024-07-07 17:40-0600\n"
"PO-Revision-Date: 2024-05-23 23:29-0600\n"
"Last-Translator: Daimar Stein <daimarstein@pm.me>\n"
"Language-Team: Brazilian Portuguese\n"
@ -1166,6 +1166,511 @@ msgstr "Colar"
msgid "Insert new line"
msgstr ""
#: src/available_models_descriptions.py:2
msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B."
msgstr ""
#: src/available_models_descriptions.py:3
msgid "Meta Llama 3: The most capable openly available LLM to date"
msgstr ""
#: src/available_models_descriptions.py:4
msgid "Qwen2 is a new series of large language models from Alibaba group"
msgstr ""
#: src/available_models_descriptions.py:5
msgid ""
"An open-source Mixture-of-Experts code language model that achieves "
"performance comparable to GPT4-Turbo in code-specific tasks."
msgstr ""
#: src/available_models_descriptions.py:6
msgid ""
"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art "
"open models by Microsoft."
msgstr ""
#: src/available_models_descriptions.py:7
msgid ""
"Aya 23, released by Cohere, is a new family of state-of-the-art, "
"multilingual models that support 23 languages."
msgstr ""
#: src/available_models_descriptions.py:8
msgid "The 7B model released by Mistral AI, updated to version 0.3."
msgstr ""
#: src/available_models_descriptions.py:9
msgid ""
"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in "
"8x7b and 8x22b parameter sizes."
msgstr ""
#: src/available_models_descriptions.py:10
msgid ""
"CodeGemma is a collection of powerful, lightweight models that can perform a "
"variety of coding tasks like fill-in-the-middle code completion, code "
"generation, natural language understanding, mathematical reasoning, and "
"instruction following."
msgstr ""
#: src/available_models_descriptions.py:11
msgid ""
"Command R is a Large Language Model optimized for conversational interaction "
"and long context tasks."
msgstr ""
#: src/available_models_descriptions.py:12
msgid ""
"Command R+ is a powerful, scalable large language model purpose-built to "
"excel at real-world enterprise use cases."
msgstr ""
#: src/available_models_descriptions.py:13
msgid ""
"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines "
"a vision encoder and Vicuna for general-purpose visual and language "
"understanding. Updated to version 1.6."
msgstr ""
#: src/available_models_descriptions.py:14
msgid ""
"Gemma is a family of lightweight, state-of-the-art open models built by "
"Google DeepMind. Updated to version 1.1"
msgstr ""
#: src/available_models_descriptions.py:15
msgid ""
"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from "
"0.5B to 110B parameters"
msgstr ""
#: src/available_models_descriptions.py:16
msgid ""
"Llama 2 is a collection of foundation language models ranging from 7B to 70B "
"parameters."
msgstr ""
#: src/available_models_descriptions.py:17
msgid ""
"A large language model that can use text prompts to generate and discuss "
"code."
msgstr ""
#: src/available_models_descriptions.py:18
msgid ""
"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of "
"experts models that excels at coding tasks. Created by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:19
msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope."
msgstr ""
#: src/available_models_descriptions.py:20
msgid ""
"DeepSeek Coder is a capable coding model trained on two trillion code and "
"natural language tokens."
msgstr ""
#: src/available_models_descriptions.py:21
msgid ""
"A high-performing open embedding model with a large token context window."
msgstr ""
#: src/available_models_descriptions.py:22
msgid ""
"Phi-2: a 2.7B language model by Microsoft Research that demonstrates "
"outstanding reasoning and language understanding capabilities."
msgstr ""
#: src/available_models_descriptions.py:23
msgid ""
"The uncensored Dolphin model based on Mistral that excels at coding tasks. "
"Updated to version 2.8."
msgstr ""
#: src/available_models_descriptions.py:24
msgid ""
"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the "
"Mistral 7B model using the OpenOrca dataset."
msgstr ""
#: src/available_models_descriptions.py:25
msgid ""
"A general-purpose model ranging from 3 billion parameters to 70 billion, "
"suitable for entry-level hardware."
msgstr ""
#: src/available_models_descriptions.py:26
msgid "State-of-the-art large embedding model from mixedbread.ai"
msgstr ""
#: src/available_models_descriptions.py:27
msgid ""
"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on "
"Llama 3 that has a variety of instruction, conversational, and coding skills."
msgstr ""
#: src/available_models_descriptions.py:28
msgid ""
"StarCoder2 is the next generation of transparently trained open code LLMs "
"that comes in three sizes: 3B, 7B and 15B parameters."
msgstr ""
#: src/available_models_descriptions.py:29
msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability."
msgstr ""
#: src/available_models_descriptions.py:30
msgid ""
"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models "
"that are trained to act as helpful assistants."
msgstr ""
#: src/available_models_descriptions.py:31
msgid "Yi 1.5 is a high-performing, bilingual language model."
msgstr ""
#: src/available_models_descriptions.py:32
msgid ""
"The powerful family of models by Nous Research that excels at scientific "
"discussion and coding tasks."
msgstr ""
#: src/available_models_descriptions.py:33
msgid ""
"General use chat model based on Llama and Llama 2 with 2K to 16K context "
"sizes."
msgstr ""
#: src/available_models_descriptions.py:34
msgid ""
"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on "
"Llama 2 uncensored by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:35
msgid ""
"The TinyLlama project is an open endeavor to train a compact 1.1B Llama "
"model on 3 trillion tokens."
msgstr ""
#: src/available_models_descriptions.py:36
msgid ""
"State of the art large language model from Microsoft AI with improved "
"performance on complex chat, multilingual, reasoning and agent use cases."
msgstr ""
#: src/available_models_descriptions.py:37
msgid ""
"StarCoder is a code generation model trained on 80+ programming languages."
msgstr ""
#: src/available_models_descriptions.py:38
msgid ""
"Codestral is Mistral AIs first-ever code model designed for code generation "
"tasks."
msgstr ""
#: src/available_models_descriptions.py:39
msgid ""
"A family of open-source models trained on a wide variety of data, surpassing "
"ChatGPT on various benchmarks. Updated to version 3.5-0106."
msgstr ""
#: src/available_models_descriptions.py:40
msgid ""
"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset "
"by Eric Hartford and based on TinyLlama."
msgstr ""
#: src/available_models_descriptions.py:41
msgid ""
"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully "
"open datasets."
msgstr ""
#: src/available_models_descriptions.py:42
msgid "State-of-the-art code generation model"
msgstr ""
#: src/available_models_descriptions.py:43
msgid ""
"Stable Code 3B is a coding model with instruct and code completion variants "
"on par with models such as Code Llama 7B that are 2.5x larger."
msgstr ""
#: src/available_models_descriptions.py:44
msgid ""
"A fine-tuned model based on Mistral with good coverage of domain and "
"language."
msgstr ""
#: src/available_models_descriptions.py:45
msgid "Model focused on math and logic problems"
msgstr ""
#: src/available_models_descriptions.py:46
msgid ""
"CodeQwen1.5 is a large language model pretrained on a large amount of code "
"data."
msgstr ""
#: src/available_models_descriptions.py:47
msgid "Code generation model based on Code Llama."
msgstr ""
#: src/available_models_descriptions.py:48
msgid ""
"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model "
"trained on multilingual data in English, Spanish, German, Italian, French, "
"Portuguese, and Dutch."
msgstr ""
#: src/available_models_descriptions.py:49
msgid ""
"A 7B and 15B uncensored variant of the Dolphin model family that excels at "
"coding, based on StarCoder2."
msgstr ""
#: src/available_models_descriptions.py:50
msgid "Embedding models on very large sentence level datasets."
msgstr ""
#: src/available_models_descriptions.py:51
msgid "General use models based on Llama and Llama 2 from Nous Research."
msgstr ""
#: src/available_models_descriptions.py:52
msgid ""
"Starling is a large language model trained by reinforcement learning from AI "
"feedback focused on improving chatbot helpfulness."
msgstr ""
#: src/available_models_descriptions.py:53
msgid ""
"SQLCoder is a code completion model fined-tuned on StarCoder for SQL "
"generation tasks"
msgstr ""
#: src/available_models_descriptions.py:54
msgid ""
"Orca 2 is built by Microsoft research, and are a fine-tuned version of "
"Meta's Llama 2 models. The model is designed to excel particularly in "
"reasoning."
msgstr ""
#: src/available_models_descriptions.py:55
msgid ""
"This model extends LLama-3 8B's context length from 8k to over 1m tokens."
msgstr ""
#: src/available_models_descriptions.py:56
msgid "An advanced language model crafted with 2 trillion bilingual tokens."
msgstr ""
#: src/available_models_descriptions.py:57
msgid "An extension of Llama 2 that supports a context of up to 128k tokens."
msgstr ""
#: src/available_models_descriptions.py:58
msgid ""
"A model from NVIDIA based on Llama 3 that excels at conversational question "
"answering (QA) and retrieval-augmented generation (RAG)."
msgstr ""
#: src/available_models_descriptions.py:59
msgid ""
"A compact, yet powerful 10.7B large language model designed for single-turn "
"conversation."
msgstr ""
#: src/available_models_descriptions.py:60
msgid ""
"Conversational model based on Llama 2 that performs competitively on various "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:61
msgid "A family of open foundation models by IBM for Code Intelligence"
msgstr ""
#: src/available_models_descriptions.py:62
msgid ""
"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language "
"model by Microsoft Research."
msgstr ""
#: src/available_models_descriptions.py:63
msgid "General use model based on Llama 2."
msgstr ""
#: src/available_models_descriptions.py:64
msgid ""
"A companion assistant trained in philosophy, psychology, and personal "
"relationships. Based on Mistral."
msgstr ""
#: src/available_models_descriptions.py:65
msgid ""
"Llama 2 based model fine tuned on an Orca-style dataset. Originally called "
"Free Willy."
msgstr ""
#: src/available_models_descriptions.py:66
msgid ""
"BakLLaVA is a multimodal model consisting of the Mistral 7B base model "
"augmented with the LLaVA architecture."
msgstr ""
#: src/available_models_descriptions.py:67
msgid ""
"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:68
msgid "Uncensored version of Wizard LM model"
msgstr ""
#: src/available_models_descriptions.py:69
msgid ""
"Fine-tuned Llama 2 model to answer medical questions based on an open source "
"medical dataset."
msgstr ""
#: src/available_models_descriptions.py:70
msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral."
msgstr ""
#: src/available_models_descriptions.py:71
msgid "An extension of Mistral to support context windows of 64K or 128K."
msgstr ""
#: src/available_models_descriptions.py:72
msgid ""
"A suite of text embedding models by Snowflake, optimized for performance."
msgstr ""
#: src/available_models_descriptions.py:73
msgid ""
"An expansion of Llama 2 that specializes in integrating both general "
"language understanding and domain-specific knowledge, particularly in "
"programming and mathematics."
msgstr ""
#: src/available_models_descriptions.py:74
msgid "Great code generation model based on Llama2."
msgstr ""
#: src/available_models_descriptions.py:75
msgid ""
"Open-source medical large language model adapted from Llama 2 to the medical "
"domain."
msgstr ""
#: src/available_models_descriptions.py:76
msgid ""
"moondream2 is a small vision language model designed to run efficiently on "
"edge devices."
msgstr ""
#: src/available_models_descriptions.py:77
msgid "Uncensored Llama2 based model with support for a 16K context window."
msgstr ""
#: src/available_models_descriptions.py:78
msgid ""
"Nexus Raven is a 13B instruction tuned model for function calling tasks."
msgstr ""
#: src/available_models_descriptions.py:79
msgid ""
"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic "
"instruction data using OSS-Instruct, a novel approach to enlightening LLMs "
"with open-source code snippets."
msgstr ""
#: src/available_models_descriptions.py:80
msgid "A strong, economical, and efficient Mixture-of-Experts language model."
msgstr ""
#: src/available_models_descriptions.py:81
msgid ""
"A lightweight chat model allowing accurate, and responsive output without "
"requiring high-end hardware."
msgstr ""
#: src/available_models_descriptions.py:82
msgid ""
"A high-performing code instruct model created by merging two existing code "
"models."
msgstr ""
#: src/available_models_descriptions.py:83
msgid "A new small LLaVA model fine-tuned from Phi 3 Mini."
msgstr ""
#: src/available_models_descriptions.py:84
msgid ""
"MistralLite is a fine-tuned model based on Mistral with enhanced "
"capabilities of processing long contexts."
msgstr ""
#: src/available_models_descriptions.py:85
msgid ""
"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by "
"MelodysDreamj."
msgstr ""
#: src/available_models_descriptions.py:86
msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station."
msgstr ""
#: src/available_models_descriptions.py:87
msgid ""
"A language model created by combining two fine-tuned Llama 2 70B models into "
"one."
msgstr ""
#: src/available_models_descriptions.py:88
msgid ""
"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by "
"interleaving the model with itself."
msgstr ""
#: src/available_models_descriptions.py:89
msgid ""
"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. "
"Designed for chat and code generation."
msgstr ""
#: src/available_models_descriptions.py:90
msgid ""
"A top-performing mixture of experts model, fine-tuned with high-quality data."
msgstr ""
#: src/available_models_descriptions.py:91
msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr."
msgstr ""
#: src/available_models_descriptions.py:92
msgid "DBRX is an open, general-purpose LLM created by Databricks."
msgstr ""
#: src/available_models_descriptions.py:93
msgid ""
"Falcon2 is an 11B parameters causal decoder-only model built by TII and "
"trained over 5T tokens."
msgstr ""
#: src/available_models_descriptions.py:94
msgid ""
"A robust conversational model designed to be used for both chat and instruct "
"use cases."
msgstr ""
#, fuzzy
#~ msgid "Model"
#~ msgstr "Parar Modelo"

View File

@ -7,7 +7,7 @@ msgid ""
msgstr ""
"Project-Id-Version: 1.0.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-07-02 18:21-0600\n"
"POT-Creation-Date: 2024-07-07 16:04-0600\n"
"PO-Revision-Date: 2024-05-23 23:29-0600\n"
"Last-Translator: Daimar Stein <daimarstein@pm.me>\n"
"Language-Team: Brazilian Portuguese\n"
@ -70,7 +70,7 @@ msgstr ""
msgid "Import and export chats"
msgstr "Importe e exporte conversas"
#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:853
#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:860
msgid "Disclaimer"
msgstr "Aviso Legal"
@ -714,7 +714,7 @@ msgstr "0.1.1 Lançamento Estável"
msgid "This is the first public version of Alpaca"
msgstr "Essa é a primeira versão pública de Alpaca"
#: src/window.py:58 src/window.py:1053 src/window.py:1115 src/window.ui:41
#: src/window.py:58 src/window.py:1083 src/window.py:1145 src/window.ui:41
msgid "New Chat"
msgstr "Nova Conversa"
@ -812,72 +812,72 @@ msgstr "Conversa exportada com sucesso"
msgid "Chat imported successfully"
msgstr "Conversa importada com sucesso"
#: src/window.py:278
#: src/window.py:280
msgid "Close"
msgstr ""
#: src/window.py:279 src/window.ui:806
#: src/window.py:281 src/window.ui:813
msgid "Next"
msgstr "Próximo"
#: src/window.py:329
#: src/window.py:332
#, fuzzy
msgid "Pulling in the background..."
msgstr "Executar em segundo plano"
#: src/window.py:381
#: src/window.py:384
msgid "Stop Creating '{}'"
msgstr ""
#: src/window.py:418
#: src/window.py:421
#, fuzzy
msgid "image"
msgstr "Imagem"
#: src/window.py:588
#: src/window.py:593
#, fuzzy
msgid "Remove Message"
msgstr "Remover Imagem"
#: src/window.py:593 src/window.py:841
#: src/window.py:598 src/window.py:869
#, fuzzy
msgid "Copy Message"
msgstr "Enviar Mensagem"
#: src/window.py:598
#: src/window.py:603
#, fuzzy
msgid "Edit Message"
msgstr "Enviar Mensagem"
#: src/window.py:729
#: src/window.py:661
msgid "Missing Image"
msgstr ""
#: src/window.py:677
msgid "Missing image"
msgstr ""
#: src/window.py:757
msgid "Remove '{} ({})'"
msgstr ""
#: src/window.py:882
msgid "Message Received"
msgstr ""
#: src/window.py:882
msgid "New message from '{}'"
msgstr ""
#: src/window.py:939
#: src/window.py:969
msgid "Task Complete"
msgstr "Tarefa Concluída"
#: src/window.py:939
#: src/window.py:969
msgid "Model '{}' pulled successfully."
msgstr "O modelo '{}' foi baixado com sucesso"
#: src/window.py:944
#: src/window.py:974
msgid "Pull Model Error"
msgstr "Erro ao Baixar Modelo"
#: src/window.py:944
#: src/window.py:974
msgid "Failed to pull model '{}' due to network error."
msgstr "Não foi possível baixar o modelo '{}' devido a um erro de rede."
#: src/window.py:978
#: src/window.py:1008
msgid "Stop Pulling '{} ({})'"
msgstr ""
@ -889,7 +889,7 @@ msgstr "Menu"
msgid "Toggle Sidebar"
msgstr "Alternar barra lateral"
#: src/window.ui:107 src/window.ui:595
#: src/window.ui:107 src/window.ui:587
#, fuzzy
msgid "Manage Models"
msgstr "Gerenciar modelos"
@ -903,29 +903,29 @@ msgstr "Menu"
msgid "Attach File"
msgstr ""
#: src/window.ui:238 src/window.ui:1120
#: src/window.ui:242 src/window.ui:1127
msgid "Send Message"
msgstr "Enviar Mensagem"
#: src/window.ui:286 src/window.ui:965 src/window.ui:1079
#: src/window.ui:290 src/window.ui:972 src/window.ui:1086
msgid "Preferences"
msgstr "Preferências"
#: src/window.ui:289 src/window.ui:1057
#: src/window.ui:293 src/window.ui:1064
msgid "General"
msgstr "Geral"
#: src/window.ui:297
#: src/window.ui:299
#, fuzzy
msgid "Use Remote Connection to Ollama"
msgstr "Gerencia uma conexão remota com Ollama"
#: src/window.ui:303
#: src/window.ui:305
#, fuzzy
msgid "URL of Remote Instance"
msgstr "URL da instância remota"
#: src/window.ui:310
#: src/window.ui:312
msgid "Bearer Token (Optional)"
msgstr ""
@ -934,125 +934,126 @@ msgstr ""
msgid "Run Alpaca In Background"
msgstr "Executar em segundo plano"
#: src/window.ui:331
#, fuzzy
msgid "Model"
msgstr "Parar Modelo"
#: src/window.ui:341
#: src/window.ui:333
#, fuzzy
msgid "Temperature"
msgstr "Funcionalidades"
#: src/window.ui:342
#: src/window.ui:334
msgid ""
"The temperature of the model. Increasing the temperature will make the model "
"answer more creatively. (Default: 0.8)"
msgstr ""
#: src/window.ui:357
#: src/window.ui:349
msgid "Seed"
msgstr ""
#: src/window.ui:358
#: src/window.ui:350
msgid ""
"Sets the random number seed to use for generation. Setting this to a "
"specific number will make the model generate the same text for the same "
"prompt. (Default: 0 (random))"
msgstr ""
#: src/window.ui:372
#: src/window.ui:364
msgid "Keep Alive Time"
msgstr ""
#: src/window.ui:373
#: src/window.ui:365
msgid ""
"Controls how long the model will stay loaded into memory following the "
"request in minutes (Default: 5)"
msgstr ""
#: src/window.ui:389
#: src/window.ui:381
#, fuzzy
msgid "Ollama Instance"
msgstr "Um cliente Ollama"
#: src/window.ui:393
#: src/window.ui:385
#, fuzzy
msgid "Ollama Overrides"
msgstr "Site do Ollama"
#: src/window.ui:394
#: src/window.ui:386
msgid ""
"Manage the arguments used on Ollama, any changes on this page only applies "
"to the integrated instance, the instance will restart if you make changes."
msgstr ""
#: src/window.ui:477
#: src/window.ui:469
msgid "Create"
msgstr "Criar"
#: src/window.ui:490 src/window.ui:605
#: src/window.ui:482 src/window.ui:597
#, fuzzy
msgid "Create Model"
msgstr "Criar"
#: src/window.ui:516
#: src/window.ui:508
msgid "Base"
msgstr ""
#: src/window.ui:534
#: src/window.ui:526
msgid "Name"
msgstr ""
#: src/window.ui:540
#: src/window.ui:532
msgid "Context"
msgstr ""
#: src/window.ui:555
#: src/window.ui:547
#, fuzzy
msgid "Template"
msgstr "Funcionalidades"
#: src/window.ui:561
#: src/window.ui:553
msgid ""
"Some models require a specific template. Please visit the model's website "
"for more information if you're unsure."
msgstr ""
#: src/window.ui:612
#: src/window.ui:604
#, fuzzy
msgid "Search Model"
msgstr "Funcionalidades"
#: src/window.ui:672
#: src/window.ui:664
msgid "No Models Found"
msgstr ""
#: src/window.ui:673
#: src/window.ui:665
msgid "Try a different search"
msgstr ""
#: src/window.ui:738
#: src/window.ui:708
msgid ""
"By downloading this model you accept the license agreement available on the "
"model's website."
msgstr ""
#: src/window.ui:745
msgid "Open with Default App"
msgstr ""
#: src/window.ui:790
#: src/window.ui:797
msgid "Previous"
msgstr "Anterior"
#: src/window.ui:833
#: src/window.ui:840
msgid "Welcome to Alpaca"
msgstr "Bem-vindo(a) a Alpaca"
#: src/window.ui:834
#: src/window.ui:841
msgid "Powered by Ollama"
msgstr "Com tecnologia Ollama"
#: src/window.ui:837
#: src/window.ui:844
msgid "Ollama Website"
msgstr "Site do Ollama"
#: src/window.ui:854
#: src/window.ui:861
msgid ""
"Alpaca and its developers are not liable for any damages to devices or "
"software resulting from the execution of code generated by an AI model. "
@ -1063,108 +1064,112 @@ msgstr ""
"por um modelo de IA. Por favor, tenha cuidado e revise o código com cuidado "
"antes de executá-lo."
#: src/window.ui:865
#: src/window.ui:872
#, fuzzy
msgid "Featured Models"
msgstr "Funcionalidades"
#: src/window.ui:866
#: src/window.ui:873
msgid ""
"Alpaca works locally on your device, to start chatting you'll need an AI "
"model, you can either pull models from this list or the 'Manage Models' menu "
"later."
msgstr ""
#: src/window.ui:876
#: src/window.ui:883
msgid "Built by Meta"
msgstr ""
#: src/window.ui:894
#: src/window.ui:901
msgid "Built by Google DeepMind"
msgstr ""
#: src/window.ui:912
#: src/window.ui:919
msgid "Built by Microsoft"
msgstr ""
#: src/window.ui:930
#: src/window.ui:937
msgid "Multimodal AI with image recognition"
msgstr ""
#: src/window.ui:959
#: src/window.ui:966
#, fuzzy
msgid "Import Chat"
msgstr "Importar conversa"
#: src/window.ui:969
#: src/window.ui:976
msgid "Keyboard Shortcuts"
msgstr "Atalhos de Teclado"
#: src/window.ui:973
#: src/window.ui:980
msgid "About Alpaca"
msgstr "Sobre Alpaca"
#: src/window.ui:980 src/window.ui:999
#: src/window.ui:987 src/window.ui:1006
msgid "Rename Chat"
msgstr "Renomear Conversa"
#: src/window.ui:984 src/window.ui:1003
#: src/window.ui:991 src/window.ui:1010
#, fuzzy
msgid "Export Chat"
msgstr "Importar conversa"
#: src/window.ui:988
#: src/window.ui:995
msgid "Clear Chat"
msgstr "Limpar Conversa"
#: src/window.ui:995
#: src/window.ui:1002
msgid "Delete Chat"
msgstr "Excluir Conversa"
#: src/window.ui:1011
#: src/window.ui:1018
msgid "From Existing Model"
msgstr ""
#: src/window.ui:1015
#: src/window.ui:1022
msgid "From GGUF File (Experimental)"
msgstr ""
#: src/window.ui:1061
#: src/window.ui:1068
msgid "Close application"
msgstr "Fechar aplicativo"
#: src/window.ui:1067
#: src/window.ui:1074
msgid "Import chat"
msgstr "Importar conversa"
#: src/window.ui:1073
#: src/window.ui:1080
msgid "Clear chat"
msgstr "Limpar chat"
#: src/window.ui:1085
#: src/window.ui:1092
msgid "New chat"
msgstr "Nova conversa"
#: src/window.ui:1091
#: src/window.ui:1098
msgid "Show shortcuts window"
msgstr "Mostrar janela de atalhos"
#: src/window.ui:1098
#: src/window.ui:1105
msgid "Editor"
msgstr "Editor"
#: src/window.ui:1102
#: src/window.ui:1109
msgid "Copy"
msgstr "Copiar"
#: src/window.ui:1108
#: src/window.ui:1115
msgid "Paste"
msgstr "Colar"
#: src/window.ui:1114
#: src/window.ui:1121
msgid "Insert new line"
msgstr ""
#, fuzzy
#~ msgid "Model"
#~ msgstr "Parar Modelo"
#, fuzzy
#~ msgid "Send message"
#~ msgstr "Enviar Mensagem"

507
po/ru.po
View File

@ -7,7 +7,7 @@ msgid ""
msgstr ""
"Project-Id-Version: 1.0.0\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-07-07 16:04-0600\n"
"POT-Creation-Date: 2024-07-07 17:40-0600\n"
"PO-Revision-Date: 2024-05-25 10:44+0800\n"
"Last-Translator: (YOUR NAME) <(EMAIL OPTIONAL)>\n"
"Language-Team: Russian\n"
@ -1114,6 +1114,511 @@ msgstr "Вставить"
msgid "Insert new line"
msgstr "Вставить новую строку"
#: src/available_models_descriptions.py:2
msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B."
msgstr ""
#: src/available_models_descriptions.py:3
msgid "Meta Llama 3: The most capable openly available LLM to date"
msgstr ""
#: src/available_models_descriptions.py:4
msgid "Qwen2 is a new series of large language models from Alibaba group"
msgstr ""
#: src/available_models_descriptions.py:5
msgid ""
"An open-source Mixture-of-Experts code language model that achieves "
"performance comparable to GPT4-Turbo in code-specific tasks."
msgstr ""
#: src/available_models_descriptions.py:6
msgid ""
"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art "
"open models by Microsoft."
msgstr ""
#: src/available_models_descriptions.py:7
msgid ""
"Aya 23, released by Cohere, is a new family of state-of-the-art, "
"multilingual models that support 23 languages."
msgstr ""
#: src/available_models_descriptions.py:8
msgid "The 7B model released by Mistral AI, updated to version 0.3."
msgstr ""
#: src/available_models_descriptions.py:9
msgid ""
"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in "
"8x7b and 8x22b parameter sizes."
msgstr ""
#: src/available_models_descriptions.py:10
msgid ""
"CodeGemma is a collection of powerful, lightweight models that can perform a "
"variety of coding tasks like fill-in-the-middle code completion, code "
"generation, natural language understanding, mathematical reasoning, and "
"instruction following."
msgstr ""
#: src/available_models_descriptions.py:11
msgid ""
"Command R is a Large Language Model optimized for conversational interaction "
"and long context tasks."
msgstr ""
#: src/available_models_descriptions.py:12
msgid ""
"Command R+ is a powerful, scalable large language model purpose-built to "
"excel at real-world enterprise use cases."
msgstr ""
#: src/available_models_descriptions.py:13
msgid ""
"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines "
"a vision encoder and Vicuna for general-purpose visual and language "
"understanding. Updated to version 1.6."
msgstr ""
#: src/available_models_descriptions.py:14
msgid ""
"Gemma is a family of lightweight, state-of-the-art open models built by "
"Google DeepMind. Updated to version 1.1"
msgstr ""
#: src/available_models_descriptions.py:15
msgid ""
"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from "
"0.5B to 110B parameters"
msgstr ""
#: src/available_models_descriptions.py:16
msgid ""
"Llama 2 is a collection of foundation language models ranging from 7B to 70B "
"parameters."
msgstr ""
#: src/available_models_descriptions.py:17
msgid ""
"A large language model that can use text prompts to generate and discuss "
"code."
msgstr ""
#: src/available_models_descriptions.py:18
msgid ""
"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of "
"experts models that excels at coding tasks. Created by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:19
msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope."
msgstr ""
#: src/available_models_descriptions.py:20
msgid ""
"DeepSeek Coder is a capable coding model trained on two trillion code and "
"natural language tokens."
msgstr ""
#: src/available_models_descriptions.py:21
msgid ""
"A high-performing open embedding model with a large token context window."
msgstr ""
#: src/available_models_descriptions.py:22
msgid ""
"Phi-2: a 2.7B language model by Microsoft Research that demonstrates "
"outstanding reasoning and language understanding capabilities."
msgstr ""
#: src/available_models_descriptions.py:23
msgid ""
"The uncensored Dolphin model based on Mistral that excels at coding tasks. "
"Updated to version 2.8."
msgstr ""
#: src/available_models_descriptions.py:24
msgid ""
"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the "
"Mistral 7B model using the OpenOrca dataset."
msgstr ""
#: src/available_models_descriptions.py:25
msgid ""
"A general-purpose model ranging from 3 billion parameters to 70 billion, "
"suitable for entry-level hardware."
msgstr ""
#: src/available_models_descriptions.py:26
msgid "State-of-the-art large embedding model from mixedbread.ai"
msgstr ""
#: src/available_models_descriptions.py:27
msgid ""
"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on "
"Llama 3 that has a variety of instruction, conversational, and coding skills."
msgstr ""
#: src/available_models_descriptions.py:28
msgid ""
"StarCoder2 is the next generation of transparently trained open code LLMs "
"that comes in three sizes: 3B, 7B and 15B parameters."
msgstr ""
#: src/available_models_descriptions.py:29
msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability."
msgstr ""
#: src/available_models_descriptions.py:30
msgid ""
"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models "
"that are trained to act as helpful assistants."
msgstr ""
#: src/available_models_descriptions.py:31
msgid "Yi 1.5 is a high-performing, bilingual language model."
msgstr ""
#: src/available_models_descriptions.py:32
msgid ""
"The powerful family of models by Nous Research that excels at scientific "
"discussion and coding tasks."
msgstr ""
#: src/available_models_descriptions.py:33
msgid ""
"General use chat model based on Llama and Llama 2 with 2K to 16K context "
"sizes."
msgstr ""
#: src/available_models_descriptions.py:34
msgid ""
"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on "
"Llama 2 uncensored by Eric Hartford."
msgstr ""
#: src/available_models_descriptions.py:35
msgid ""
"The TinyLlama project is an open endeavor to train a compact 1.1B Llama "
"model on 3 trillion tokens."
msgstr ""
#: src/available_models_descriptions.py:36
msgid ""
"State of the art large language model from Microsoft AI with improved "
"performance on complex chat, multilingual, reasoning and agent use cases."
msgstr ""
#: src/available_models_descriptions.py:37
msgid ""
"StarCoder is a code generation model trained on 80+ programming languages."
msgstr ""
#: src/available_models_descriptions.py:38
msgid ""
"Codestral is Mistral AIs first-ever code model designed for code generation "
"tasks."
msgstr ""
#: src/available_models_descriptions.py:39
msgid ""
"A family of open-source models trained on a wide variety of data, surpassing "
"ChatGPT on various benchmarks. Updated to version 3.5-0106."
msgstr ""
#: src/available_models_descriptions.py:40
msgid ""
"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset "
"by Eric Hartford and based on TinyLlama."
msgstr ""
#: src/available_models_descriptions.py:41
msgid ""
"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully "
"open datasets."
msgstr ""
#: src/available_models_descriptions.py:42
msgid "State-of-the-art code generation model"
msgstr ""
#: src/available_models_descriptions.py:43
msgid ""
"Stable Code 3B is a coding model with instruct and code completion variants "
"on par with models such as Code Llama 7B that are 2.5x larger."
msgstr ""
#: src/available_models_descriptions.py:44
msgid ""
"A fine-tuned model based on Mistral with good coverage of domain and "
"language."
msgstr ""
#: src/available_models_descriptions.py:45
msgid "Model focused on math and logic problems"
msgstr ""
#: src/available_models_descriptions.py:46
msgid ""
"CodeQwen1.5 is a large language model pretrained on a large amount of code "
"data."
msgstr ""
#: src/available_models_descriptions.py:47
msgid "Code generation model based on Code Llama."
msgstr ""
#: src/available_models_descriptions.py:48
msgid ""
"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model "
"trained on multilingual data in English, Spanish, German, Italian, French, "
"Portuguese, and Dutch."
msgstr ""
#: src/available_models_descriptions.py:49
msgid ""
"A 7B and 15B uncensored variant of the Dolphin model family that excels at "
"coding, based on StarCoder2."
msgstr ""
#: src/available_models_descriptions.py:50
msgid "Embedding models on very large sentence level datasets."
msgstr ""
#: src/available_models_descriptions.py:51
msgid "General use models based on Llama and Llama 2 from Nous Research."
msgstr ""
#: src/available_models_descriptions.py:52
msgid ""
"Starling is a large language model trained by reinforcement learning from AI "
"feedback focused on improving chatbot helpfulness."
msgstr ""
#: src/available_models_descriptions.py:53
msgid ""
"SQLCoder is a code completion model fined-tuned on StarCoder for SQL "
"generation tasks"
msgstr ""
#: src/available_models_descriptions.py:54
msgid ""
"Orca 2 is built by Microsoft research, and are a fine-tuned version of "
"Meta's Llama 2 models. The model is designed to excel particularly in "
"reasoning."
msgstr ""
#: src/available_models_descriptions.py:55
msgid ""
"This model extends LLama-3 8B's context length from 8k to over 1m tokens."
msgstr ""
#: src/available_models_descriptions.py:56
msgid "An advanced language model crafted with 2 trillion bilingual tokens."
msgstr ""
#: src/available_models_descriptions.py:57
msgid "An extension of Llama 2 that supports a context of up to 128k tokens."
msgstr ""
#: src/available_models_descriptions.py:58
msgid ""
"A model from NVIDIA based on Llama 3 that excels at conversational question "
"answering (QA) and retrieval-augmented generation (RAG)."
msgstr ""
#: src/available_models_descriptions.py:59
msgid ""
"A compact, yet powerful 10.7B large language model designed for single-turn "
"conversation."
msgstr ""
#: src/available_models_descriptions.py:60
msgid ""
"Conversational model based on Llama 2 that performs competitively on various "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:61
msgid "A family of open foundation models by IBM for Code Intelligence"
msgstr ""
#: src/available_models_descriptions.py:62
msgid ""
"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language "
"model by Microsoft Research."
msgstr ""
#: src/available_models_descriptions.py:63
msgid "General use model based on Llama 2."
msgstr ""
#: src/available_models_descriptions.py:64
msgid ""
"A companion assistant trained in philosophy, psychology, and personal "
"relationships. Based on Mistral."
msgstr ""
#: src/available_models_descriptions.py:65
msgid ""
"Llama 2 based model fine tuned on an Orca-style dataset. Originally called "
"Free Willy."
msgstr ""
#: src/available_models_descriptions.py:66
msgid ""
"BakLLaVA is a multimodal model consisting of the Mistral 7B base model "
"augmented with the LLaVA architecture."
msgstr ""
#: src/available_models_descriptions.py:67
msgid ""
"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several "
"benchmarks."
msgstr ""
#: src/available_models_descriptions.py:68
msgid "Uncensored version of Wizard LM model"
msgstr ""
#: src/available_models_descriptions.py:69
msgid ""
"Fine-tuned Llama 2 model to answer medical questions based on an open source "
"medical dataset."
msgstr ""
#: src/available_models_descriptions.py:70
msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral."
msgstr ""
#: src/available_models_descriptions.py:71
msgid "An extension of Mistral to support context windows of 64K or 128K."
msgstr ""
#: src/available_models_descriptions.py:72
msgid ""
"A suite of text embedding models by Snowflake, optimized for performance."
msgstr ""
#: src/available_models_descriptions.py:73
msgid ""
"An expansion of Llama 2 that specializes in integrating both general "
"language understanding and domain-specific knowledge, particularly in "
"programming and mathematics."
msgstr ""
#: src/available_models_descriptions.py:74
msgid "Great code generation model based on Llama2."
msgstr ""
#: src/available_models_descriptions.py:75
msgid ""
"Open-source medical large language model adapted from Llama 2 to the medical "
"domain."
msgstr ""
#: src/available_models_descriptions.py:76
msgid ""
"moondream2 is a small vision language model designed to run efficiently on "
"edge devices."
msgstr ""
#: src/available_models_descriptions.py:77
msgid "Uncensored Llama2 based model with support for a 16K context window."
msgstr ""
#: src/available_models_descriptions.py:78
msgid ""
"Nexus Raven is a 13B instruction tuned model for function calling tasks."
msgstr ""
#: src/available_models_descriptions.py:79
msgid ""
"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic "
"instruction data using OSS-Instruct, a novel approach to enlightening LLMs "
"with open-source code snippets."
msgstr ""
#: src/available_models_descriptions.py:80
msgid "A strong, economical, and efficient Mixture-of-Experts language model."
msgstr ""
#: src/available_models_descriptions.py:81
msgid ""
"A lightweight chat model allowing accurate, and responsive output without "
"requiring high-end hardware."
msgstr ""
#: src/available_models_descriptions.py:82
msgid ""
"A high-performing code instruct model created by merging two existing code "
"models."
msgstr ""
#: src/available_models_descriptions.py:83
msgid "A new small LLaVA model fine-tuned from Phi 3 Mini."
msgstr ""
#: src/available_models_descriptions.py:84
msgid ""
"MistralLite is a fine-tuned model based on Mistral with enhanced "
"capabilities of processing long contexts."
msgstr ""
#: src/available_models_descriptions.py:85
msgid ""
"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by "
"MelodysDreamj."
msgstr ""
#: src/available_models_descriptions.py:86
msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station."
msgstr ""
#: src/available_models_descriptions.py:87
msgid ""
"A language model created by combining two fine-tuned Llama 2 70B models into "
"one."
msgstr ""
#: src/available_models_descriptions.py:88
msgid ""
"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by "
"interleaving the model with itself."
msgstr ""
#: src/available_models_descriptions.py:89
msgid ""
"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. "
"Designed for chat and code generation."
msgstr ""
#: src/available_models_descriptions.py:90
msgid ""
"A top-performing mixture of experts model, fine-tuned with high-quality data."
msgstr ""
#: src/available_models_descriptions.py:91
msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr."
msgstr ""
#: src/available_models_descriptions.py:92
msgid "DBRX is an open, general-purpose LLM created by Databricks."
msgstr ""
#: src/available_models_descriptions.py:93
msgid ""
"Falcon2 is an 11B parameters causal decoder-only model built by TII and "
"trained over 5T tokens."
msgstr ""
#: src/available_models_descriptions.py:94
msgid ""
"A robust conversational model designed to be used for both chat and instruct "
"use cases."
msgstr ""
#, fuzzy
#~ msgid "Model"
#~ msgstr "Остановить Модель"

179
po/ru.po~
View File

@ -6,8 +6,8 @@
msgid ""
msgstr ""
"Project-Id-Version: 1.0.0\n"
"Report-Msgid-Bugs-To: https://github.com/Jeffser/Alpaca\n"
"POT-Creation-Date: 2024-07-02 18:21-0600\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2024-07-07 16:04-0600\n"
"PO-Revision-Date: 2024-05-25 10:44+0800\n"
"Last-Translator: (YOUR NAME) <(EMAIL OPTIONAL)>\n"
"Language-Team: Russian\n"
@ -73,7 +73,7 @@ msgstr ""
msgid "Import and export chats"
msgstr "Импорт чата"
#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:853
#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:860
msgid "Disclaimer"
msgstr "Отказ от ответственности"
@ -665,7 +665,7 @@ msgstr ""
msgid "This is the first public version of Alpaca"
msgstr ""
#: src/window.py:58 src/window.py:1053 src/window.py:1115 src/window.ui:41
#: src/window.py:58 src/window.py:1083 src/window.py:1145 src/window.ui:41
msgid "New Chat"
msgstr "Новый Чат"
@ -760,72 +760,72 @@ msgstr "Чат успешно экспортирован"
msgid "Chat imported successfully"
msgstr "Чат успешно импортирован"
#: src/window.py:278
#: src/window.py:280
msgid "Close"
msgstr ""
#: src/window.py:279 src/window.ui:806
#: src/window.py:281 src/window.ui:813
msgid "Next"
msgstr "Следующий"
#: src/window.py:329
#: src/window.py:332
#, fuzzy
msgid "Pulling in the background..."
msgstr "Запуск в фоновом режиме"
#: src/window.py:381
#: src/window.py:384
msgid "Stop Creating '{}'"
msgstr ""
#: src/window.py:418
#: src/window.py:421
#, fuzzy
msgid "image"
msgstr "Изображение"
#: src/window.py:588
#: src/window.py:593
#, fuzzy
msgid "Remove Message"
msgstr "Удалить Изображение"
#: src/window.py:593 src/window.py:841
#: src/window.py:598 src/window.py:869
#, fuzzy
msgid "Copy Message"
msgstr "Отправить Сообщение"
#: src/window.py:598
#: src/window.py:603
#, fuzzy
msgid "Edit Message"
msgstr "Отправить Сообщение"
#: src/window.py:729
#: src/window.py:661
msgid "Missing Image"
msgstr ""
#: src/window.py:677
msgid "Missing image"
msgstr ""
#: src/window.py:757
msgid "Remove '{} ({})'"
msgstr ""
#: src/window.py:882
msgid "Message Received"
msgstr ""
#: src/window.py:882
msgid "New message from '{}'"
msgstr ""
#: src/window.py:939
#: src/window.py:969
msgid "Task Complete"
msgstr "Задача выполнена"
#: src/window.py:939
#: src/window.py:969
msgid "Model '{}' pulled successfully."
msgstr "Модель '{}' успешно извлечена."
#: src/window.py:944
#: src/window.py:974
msgid "Pull Model Error"
msgstr "Ошибка Извлечения Модели"
#: src/window.py:944
#: src/window.py:974
msgid "Failed to pull model '{}' due to network error."
msgstr "Не удалось извлечь модель '{}' из-за сетевой ошибки."
#: src/window.py:978
#: src/window.py:1008
msgid "Stop Pulling '{} ({})'"
msgstr ""
@ -837,7 +837,7 @@ msgstr "Меню"
msgid "Toggle Sidebar"
msgstr "Переключение боковой панели"
#: src/window.ui:107 src/window.ui:595
#: src/window.ui:107 src/window.ui:587
#, fuzzy
msgid "Manage Models"
msgstr "Управление моделями"
@ -851,29 +851,29 @@ msgstr "Меню"
msgid "Attach File"
msgstr ""
#: src/window.ui:238 src/window.ui:1120
#: src/window.ui:242 src/window.ui:1127
msgid "Send Message"
msgstr "Отправить Сообщение"
#: src/window.ui:286 src/window.ui:965 src/window.ui:1079
#: src/window.ui:290 src/window.ui:972 src/window.ui:1086
msgid "Preferences"
msgstr "Настройки"
#: src/window.ui:289 src/window.ui:1057
#: src/window.ui:293 src/window.ui:1064
msgid "General"
msgstr "Общие"
#: src/window.ui:297
#: src/window.ui:299
#, fuzzy
msgid "Use Remote Connection to Ollama"
msgstr "Управление удаленным подключением к Ollama"
#: src/window.ui:303
#: src/window.ui:305
#, fuzzy
msgid "URL of Remote Instance"
msgstr "URL-адрес удаленного экземпляра"
#: src/window.ui:310
#: src/window.ui:312
msgid "Bearer Token (Optional)"
msgstr ""
@ -882,124 +882,125 @@ msgstr ""
msgid "Run Alpaca In Background"
msgstr "Запуск в фоновом режиме"
#: src/window.ui:331
#, fuzzy
msgid "Model"
msgstr "Остановить Модель"
#: src/window.ui:341
#: src/window.ui:333
#, fuzzy
msgid "Temperature"
msgstr "Новый Чат"
#: src/window.ui:342
#: src/window.ui:334
msgid ""
"The temperature of the model. Increasing the temperature will make the model "
"answer more creatively. (Default: 0.8)"
msgstr ""
#: src/window.ui:357
#: src/window.ui:349
msgid "Seed"
msgstr ""
#: src/window.ui:358
#: src/window.ui:350
msgid ""
"Sets the random number seed to use for generation. Setting this to a "
"specific number will make the model generate the same text for the same "
"prompt. (Default: 0 (random))"
msgstr ""
#: src/window.ui:372
#: src/window.ui:364
msgid "Keep Alive Time"
msgstr ""
#: src/window.ui:373
#: src/window.ui:365
msgid ""
"Controls how long the model will stay loaded into memory following the "
"request in minutes (Default: 5)"
msgstr ""
#: src/window.ui:389
#: src/window.ui:381
#, fuzzy
msgid "Ollama Instance"
msgstr "Веб-сайт Ollama"
#: src/window.ui:393
#: src/window.ui:385
#, fuzzy
msgid "Ollama Overrides"
msgstr "Веб-сайт Ollama"
#: src/window.ui:394
#: src/window.ui:386
msgid ""
"Manage the arguments used on Ollama, any changes on this page only applies "
"to the integrated instance, the instance will restart if you make changes."
msgstr ""
#: src/window.ui:477
#: src/window.ui:469
msgid "Create"
msgstr "Создать"
#: src/window.ui:490 src/window.ui:605
#: src/window.ui:482 src/window.ui:597
#, fuzzy
msgid "Create Model"
msgstr "Удалить Модель"
#: src/window.ui:516
#: src/window.ui:508
msgid "Base"
msgstr ""
#: src/window.ui:534
#: src/window.ui:526
msgid "Name"
msgstr ""
#: src/window.ui:540
#: src/window.ui:532
msgid "Context"
msgstr ""
#: src/window.ui:555
#: src/window.ui:547
msgid "Template"
msgstr ""
#: src/window.ui:561
#: src/window.ui:553
msgid ""
"Some models require a specific template. Please visit the model's website "
"for more information if you're unsure."
msgstr ""
#: src/window.ui:612
#: src/window.ui:604
#, fuzzy
msgid "Search Model"
msgstr "Остановить Модель"
#: src/window.ui:672
#: src/window.ui:664
msgid "No Models Found"
msgstr ""
#: src/window.ui:673
#: src/window.ui:665
msgid "Try a different search"
msgstr ""
#: src/window.ui:738
#: src/window.ui:708
msgid ""
"By downloading this model you accept the license agreement available on the "
"model's website."
msgstr ""
#: src/window.ui:745
msgid "Open with Default App"
msgstr ""
#: src/window.ui:790
#: src/window.ui:797
msgid "Previous"
msgstr "Предыдущий"
#: src/window.ui:833
#: src/window.ui:840
msgid "Welcome to Alpaca"
msgstr "Добро пожаловать в Alpaca"
#: src/window.ui:834
#: src/window.ui:841
msgid "Powered by Ollama"
msgstr "При поддержке Ollama"
#: src/window.ui:837
#: src/window.ui:844
msgid "Ollama Website"
msgstr "Веб-сайт Ollama"
#: src/window.ui:854
#: src/window.ui:861
msgid ""
"Alpaca and its developers are not liable for any damages to devices or "
"software resulting from the execution of code generated by an AI model. "
@ -1011,108 +1012,112 @@ msgstr ""
"Пожалуйста, будьте осторожны и внимательно ознакомьтесь с кодом перед его "
"запуском."
#: src/window.ui:865
#: src/window.ui:872
#, fuzzy
msgid "Featured Models"
msgstr "Удалить Модель"
#: src/window.ui:866
#: src/window.ui:873
msgid ""
"Alpaca works locally on your device, to start chatting you'll need an AI "
"model, you can either pull models from this list or the 'Manage Models' menu "
"later."
msgstr ""
#: src/window.ui:876
#: src/window.ui:883
msgid "Built by Meta"
msgstr ""
#: src/window.ui:894
#: src/window.ui:901
msgid "Built by Google DeepMind"
msgstr ""
#: src/window.ui:912
#: src/window.ui:919
msgid "Built by Microsoft"
msgstr ""
#: src/window.ui:930
#: src/window.ui:937
msgid "Multimodal AI with image recognition"
msgstr ""
#: src/window.ui:959
#: src/window.ui:966
#, fuzzy
msgid "Import Chat"
msgstr "Импорт чата"
#: src/window.ui:969
#: src/window.ui:976
msgid "Keyboard Shortcuts"
msgstr "Комбинации Клавиш"
#: src/window.ui:973
#: src/window.ui:980
msgid "About Alpaca"
msgstr "О Программе"
#: src/window.ui:980 src/window.ui:999
#: src/window.ui:987 src/window.ui:1006
msgid "Rename Chat"
msgstr "Переименовать Чат"
#: src/window.ui:984 src/window.ui:1003
#: src/window.ui:991 src/window.ui:1010
#, fuzzy
msgid "Export Chat"
msgstr "Экспорт чата"
#: src/window.ui:988
#: src/window.ui:995
msgid "Clear Chat"
msgstr "Очистить Чат"
#: src/window.ui:995
#: src/window.ui:1002
msgid "Delete Chat"
msgstr "Удалить Чат"
#: src/window.ui:1011
#: src/window.ui:1018
msgid "From Existing Model"
msgstr ""
#: src/window.ui:1015
#: src/window.ui:1022
msgid "From GGUF File (Experimental)"
msgstr ""
#: src/window.ui:1061
#: src/window.ui:1068
msgid "Close application"
msgstr "Закрыть приложение"
#: src/window.ui:1067
#: src/window.ui:1074
msgid "Import chat"
msgstr "Импорт чата"
#: src/window.ui:1073
#: src/window.ui:1080
msgid "Clear chat"
msgstr "Очистить чат"
#: src/window.ui:1085
#: src/window.ui:1092
msgid "New chat"
msgstr "Новый чат"
#: src/window.ui:1091
#: src/window.ui:1098
msgid "Show shortcuts window"
msgstr "Показать окно комбинаций клавиш"
#: src/window.ui:1098
#: src/window.ui:1105
msgid "Editor"
msgstr "Редактор"
#: src/window.ui:1102
#: src/window.ui:1109
msgid "Copy"
msgstr "Копировать"
#: src/window.ui:1108
#: src/window.ui:1115
msgid "Paste"
msgstr "Вставить"
#: src/window.ui:1114
#: src/window.ui:1121
msgid "Insert new line"
msgstr "Вставить новую строку"
#, fuzzy
#~ msgid "Model"
#~ msgstr "Остановить Модель"
#~ msgctxt "shortcut window"
#~ msgid "General"
#~ msgstr "Общие"

View File

@ -0,0 +1,95 @@
descriptions = {
'gemma2': _("Google Gemma 2 is now available in 2 sizes, 9B and 27B."),
'llama3': _("Meta Llama 3: The most capable openly available LLM to date"),
'qwen2': _("Qwen2 is a new series of large language models from Alibaba group"),
'deepseek-coder-v2': _("An open-source Mixture-of-Experts code language model that achieves performance comparable to GPT4-Turbo in code-specific tasks."),
'phi3': _("Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art open models by Microsoft."),
'aya': _("Aya 23, released by Cohere, is a new family of state-of-the-art, multilingual models that support 23 languages."),
'mistral': _("The 7B model released by Mistral AI, updated to version 0.3."),
'mixtral': _("A set of Mixture of Experts (MoE) model with open weights by Mistral AI in 8x7b and 8x22b parameter sizes."),
'codegemma': _("CodeGemma is a collection of powerful, lightweight models that can perform a variety of coding tasks like fill-in-the-middle code completion, code generation, natural language understanding, mathematical reasoning, and instruction following."),
'command-r': _("Command R is a Large Language Model optimized for conversational interaction and long context tasks."),
'command-r-plus': _("Command R+ is a powerful, scalable large language model purpose-built to excel at real-world enterprise use cases."),
'llava': _("🌋 LLaVA is a novel end-to-end trained large multimodal model that combines a vision encoder and Vicuna for general-purpose visual and language understanding. Updated to version 1.6."),
'gemma': _("Gemma is a family of lightweight, state-of-the-art open models built by Google DeepMind. Updated to version 1.1"),
'qwen': _("Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from 0.5B to 110B parameters"),
'llama2': _("Llama 2 is a collection of foundation language models ranging from 7B to 70B parameters."),
'codellama': _("A large language model that can use text prompts to generate and discuss code."),
'dolphin-mixtral': _("Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of experts models that excels at coding tasks. Created by Eric Hartford."),
'llama2-uncensored': _("Uncensored Llama 2 model by George Sung and Jarrad Hope."),
'deepseek-coder': _("DeepSeek Coder is a capable coding model trained on two trillion code and natural language tokens."),
'nomic-embed-text': _("A high-performing open embedding model with a large token context window."),
'phi': _("Phi-2: a 2.7B language model by Microsoft Research that demonstrates outstanding reasoning and language understanding capabilities."),
'dolphin-mistral': _("The uncensored Dolphin model based on Mistral that excels at coding tasks. Updated to version 2.8."),
'mistral-openorca': _("Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the Mistral 7B model using the OpenOrca dataset."),
'orca-mini': _("A general-purpose model ranging from 3 billion parameters to 70 billion, suitable for entry-level hardware."),
'mxbai-embed-large': _("State-of-the-art large embedding model from mixedbread.ai"),
'dolphin-llama3': _("Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on Llama 3 that has a variety of instruction, conversational, and coding skills."),
'starcoder2': _("StarCoder2 is the next generation of transparently trained open code LLMs that comes in three sizes: 3B, 7B and 15B parameters."),
'llama2-chinese': _("Llama 2 based model fine tuned to improve Chinese dialogue ability."),
'zephyr': _("Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models that are trained to act as helpful assistants."),
'yi': _("Yi 1.5 is a high-performing, bilingual language model."),
'nous-hermes2': _("The powerful family of models by Nous Research that excels at scientific discussion and coding tasks."),
'vicuna': _("General use chat model based on Llama and Llama 2 with 2K to 16K context sizes."),
'wizard-vicuna-uncensored': _("Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on Llama 2 uncensored by Eric Hartford."),
'tinyllama': _("The TinyLlama project is an open endeavor to train a compact 1.1B Llama model on 3 trillion tokens."),
'wizardlm2': _("State of the art large language model from Microsoft AI with improved performance on complex chat, multilingual, reasoning and agent use cases."),
'starcoder': _("StarCoder is a code generation model trained on 80+ programming languages."),
'codestral': _("Codestral is Mistral AIs first-ever code model designed for code generation tasks."),
'openchat': _("A family of open-source models trained on a wide variety of data, surpassing ChatGPT on various benchmarks. Updated to version 3.5-0106."),
'tinydolphin': _("An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset by Eric Hartford and based on TinyLlama."),
'openhermes': _("OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully open datasets."),
'wizardcoder': _("State-of-the-art code generation model"),
'stable-code': _("Stable Code 3B is a coding model with instruct and code completion variants on par with models such as Code Llama 7B that are 2.5x larger."),
'neural-chat': _("A fine-tuned model based on Mistral with good coverage of domain and language."),
'wizard-math': _("Model focused on math and logic problems"),
'codeqwen': _("CodeQwen1.5 is a large language model pretrained on a large amount of code data."),
'phind-codellama': _("Code generation model based on Code Llama."),
'stablelm2': _("Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model trained on multilingual data in English, Spanish, German, Italian, French, Portuguese, and Dutch."),
'dolphincoder': _("A 7B and 15B uncensored variant of the Dolphin model family that excels at coding, based on StarCoder2."),
'all-minilm': _("Embedding models on very large sentence level datasets."),
'nous-hermes': _("General use models based on Llama and Llama 2 from Nous Research."),
'starling-lm': _("Starling is a large language model trained by reinforcement learning from AI feedback focused on improving chatbot helpfulness."),
'sqlcoder': _("SQLCoder is a code completion model fined-tuned on StarCoder for SQL generation tasks"),
'orca2': _("Orca 2 is built by Microsoft research, and are a fine-tuned version of Meta's Llama 2 models. The model is designed to excel particularly in reasoning."),
'llama3-gradient': _("This model extends LLama-3 8B's context length from 8k to over 1m tokens."),
'deepseek-llm': _("An advanced language model crafted with 2 trillion bilingual tokens."),
'yarn-llama2': _("An extension of Llama 2 that supports a context of up to 128k tokens."),
'llama3-chatqa': _("A model from NVIDIA based on Llama 3 that excels at conversational question answering (QA) and retrieval-augmented generation (RAG)."),
'solar': _("A compact, yet powerful 10.7B large language model designed for single-turn conversation."),
'xwinlm': _("Conversational model based on Llama 2 that performs competitively on various benchmarks."),
'granite-code': _("A family of open foundation models by IBM for Code Intelligence"),
'dolphin-phi': _("2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language model by Microsoft Research."),
'wizardlm': _("General use model based on Llama 2."),
'samantha-mistral': _("A companion assistant trained in philosophy, psychology, and personal relationships. Based on Mistral."),
'stable-beluga': _("Llama 2 based model fine tuned on an Orca-style dataset. Originally called Free Willy."),
'bakllava': _("BakLLaVA is a multimodal model consisting of the Mistral 7B base model augmented with the LLaVA architecture."),
'llava-llama3': _("A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several benchmarks."),
'wizardlm-uncensored': _("Uncensored version of Wizard LM model"),
'medllama2': _("Fine-tuned Llama 2 model to answer medical questions based on an open source medical dataset."),
'nous-hermes2-mixtral': _("The Nous Hermes 2 model from Nous Research, now trained over Mixtral."),
'yarn-mistral': _("An extension of Mistral to support context windows of 64K or 128K."),
'snowflake-arctic-embed': _("A suite of text embedding models by Snowflake, optimized for performance."),
'llama-pro': _("An expansion of Llama 2 that specializes in integrating both general language understanding and domain-specific knowledge, particularly in programming and mathematics."),
'codeup': _("Great code generation model based on Llama2."),
'meditron': _("Open-source medical large language model adapted from Llama 2 to the medical domain."),
'moondream': _("moondream2 is a small vision language model designed to run efficiently on edge devices."),
'everythinglm': _("Uncensored Llama2 based model with support for a 16K context window."),
'nexusraven': _("Nexus Raven is a 13B instruction tuned model for function calling tasks."),
'magicoder': _("🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic instruction data using OSS-Instruct, a novel approach to enlightening LLMs with open-source code snippets."),
'deepseek-v2': _("A strong, economical, and efficient Mixture-of-Experts language model."),
'stablelm-zephyr': _("A lightweight chat model allowing accurate, and responsive output without requiring high-end hardware."),
'codebooga': _("A high-performing code instruct model created by merging two existing code models."),
'llava-phi3': _("A new small LLaVA model fine-tuned from Phi 3 Mini."),
'mistrallite': _("MistralLite is a fine-tuned model based on Mistral with enhanced capabilities of processing long contexts."),
'wizard-vicuna': _("Wizard Vicuna is a 13B parameter model based on Llama 2 trained by MelodysDreamj."),
'duckdb-nsql': _("7B parameter text-to-SQL model made by MotherDuck and Numbers Station."),
'goliath': _("A language model created by combining two fine-tuned Llama 2 70B models into one."),
'megadolphin': _("MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by interleaving the model with itself."),
'open-orca-platypus2': _("Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. Designed for chat and code generation."),
'notux': _("A top-performing mixture of experts model, fine-tuned with high-quality data."),
'notus': _("A 7B chat model fine-tuned with high-quality data and based on Zephyr."),
'dbrx': _("DBRX is an open, general-purpose LLM created by Databricks."),
'falcon2': _("Falcon2 is an 11B parameters causal decoder-only model built by TII and trained over 5T tokens."),
'alfred': _("A robust conversational model designed to be used for both chat and instruct use cases."),
}

View File

@ -43,7 +43,8 @@ alpaca_sources = [
'dialogs.py',
'local_instance.py',
'update_history.py',
'available_models.json'
'available_models.json',
'available_models_descriptions.py'
]
install_data(alpaca_sources, install_dir: moduledir)

View File

@ -27,7 +27,7 @@ from io import BytesIO
from PIL import Image
from pypdf import PdfReader
from datetime import datetime
from . import dialogs, local_instance, connection_handler, update_history
from . import dialogs, local_instance, connection_handler, update_history, available_models_descriptions
@Gtk.Template(resource_path='/com/jeffser/Alpaca/window.ui')
class AlpacaWindow(Adw.ApplicationWindow):
@ -1045,7 +1045,7 @@ Generate a title following these rules:
for name, model_info in self.available_models.items():
model = Adw.ActionRow(
title = "<b>{}{}</b> <small>by {}</small>".format('🖼 ' if model_info["image"] else '', name.replace("-", " ").title(), model_info['author']),
subtitle = model_info["description"], # + ("\n\n<span foreground='white' background='black' line_height='1.5'> Image Recognition </span>" if model_info["image"] else ""),
subtitle = available_models_descriptions.descriptions[name], # + ("\n\n<span foreground='white' background='black' line_height='1.5'> Image Recognition </span>" if model_info["image"] else ""),
#("<b>Image recognition capable</b>\n" if model_info["image"] else "") +
#title = f"<b>{name.capitalize()}</b> <small>by {model_info['author']}</small>",
#subtitle = f"<small>" + (_("(Image recognition capable)\n") if model_info["image"] else "") + f"{model_info['description']}</small>",

View File

@ -0,0 +1,9 @@
import json
with open('src/available_models.json', 'r') as f:
data = json.load(f)
results = 'descriptions = {\n'
for key, value in data.items():
results += f" '{key}': _(\"{value['description']}\"),\n"
results += '}'
with open('src/available_models_descriptions.py', 'w+') as f:
f.write(results)