diff --git a/po/POTFILES b/po/POTFILES index 7f0e22f..5d7dcda 100644 --- a/po/POTFILES +++ b/po/POTFILES @@ -4,3 +4,4 @@ data/com.jeffser.Alpaca.gschema.xml src/main.py src/window.py src/window.ui +src/available_models_descriptions.py diff --git a/po/alpaca.pot b/po/alpaca.pot index 356c091..ec065e0 100644 --- a/po/alpaca.pot +++ b/po/alpaca.pot @@ -8,13 +8,13 @@ msgid "" msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-07-07 16:55-0600\n" +"POT-Creation-Date: 2024-07-07 17:40-0600\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "Language: \n" "MIME-Version: 1.0\n" -"Content-Type: text/plain; charset=CHARSET\n" +"Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" #: data/com.jeffser.Alpaca.desktop.in:3 @@ -1066,3 +1066,508 @@ msgstr "" #: src/window.ui:1121 msgid "Insert new line" msgstr "" + +#: src/available_models_descriptions.py:2 +msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B." +msgstr "" + +#: src/available_models_descriptions.py:3 +msgid "Meta Llama 3: The most capable openly available LLM to date" +msgstr "" + +#: src/available_models_descriptions.py:4 +msgid "Qwen2 is a new series of large language models from Alibaba group" +msgstr "" + +#: src/available_models_descriptions.py:5 +msgid "" +"An open-source Mixture-of-Experts code language model that achieves " +"performance comparable to GPT4-Turbo in code-specific tasks." +msgstr "" + +#: src/available_models_descriptions.py:6 +msgid "" +"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art " +"open models by Microsoft." +msgstr "" + +#: src/available_models_descriptions.py:7 +msgid "" +"Aya 23, released by Cohere, is a new family of state-of-the-art, " +"multilingual models that support 23 languages." +msgstr "" + +#: src/available_models_descriptions.py:8 +msgid "The 7B model released by Mistral AI, updated to version 0.3." +msgstr "" + +#: src/available_models_descriptions.py:9 +msgid "" +"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in " +"8x7b and 8x22b parameter sizes." +msgstr "" + +#: src/available_models_descriptions.py:10 +msgid "" +"CodeGemma is a collection of powerful, lightweight models that can perform a " +"variety of coding tasks like fill-in-the-middle code completion, code " +"generation, natural language understanding, mathematical reasoning, and " +"instruction following." +msgstr "" + +#: src/available_models_descriptions.py:11 +msgid "" +"Command R is a Large Language Model optimized for conversational interaction " +"and long context tasks." +msgstr "" + +#: src/available_models_descriptions.py:12 +msgid "" +"Command R+ is a powerful, scalable large language model purpose-built to " +"excel at real-world enterprise use cases." +msgstr "" + +#: src/available_models_descriptions.py:13 +msgid "" +"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines " +"a vision encoder and Vicuna for general-purpose visual and language " +"understanding. Updated to version 1.6." +msgstr "" + +#: src/available_models_descriptions.py:14 +msgid "" +"Gemma is a family of lightweight, state-of-the-art open models built by " +"Google DeepMind. Updated to version 1.1" +msgstr "" + +#: src/available_models_descriptions.py:15 +msgid "" +"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from " +"0.5B to 110B parameters" +msgstr "" + +#: src/available_models_descriptions.py:16 +msgid "" +"Llama 2 is a collection of foundation language models ranging from 7B to 70B " +"parameters." +msgstr "" + +#: src/available_models_descriptions.py:17 +msgid "" +"A large language model that can use text prompts to generate and discuss " +"code." +msgstr "" + +#: src/available_models_descriptions.py:18 +msgid "" +"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of " +"experts models that excels at coding tasks. Created by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:19 +msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope." +msgstr "" + +#: src/available_models_descriptions.py:20 +msgid "" +"DeepSeek Coder is a capable coding model trained on two trillion code and " +"natural language tokens." +msgstr "" + +#: src/available_models_descriptions.py:21 +msgid "" +"A high-performing open embedding model with a large token context window." +msgstr "" + +#: src/available_models_descriptions.py:22 +msgid "" +"Phi-2: a 2.7B language model by Microsoft Research that demonstrates " +"outstanding reasoning and language understanding capabilities." +msgstr "" + +#: src/available_models_descriptions.py:23 +msgid "" +"The uncensored Dolphin model based on Mistral that excels at coding tasks. " +"Updated to version 2.8." +msgstr "" + +#: src/available_models_descriptions.py:24 +msgid "" +"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the " +"Mistral 7B model using the OpenOrca dataset." +msgstr "" + +#: src/available_models_descriptions.py:25 +msgid "" +"A general-purpose model ranging from 3 billion parameters to 70 billion, " +"suitable for entry-level hardware." +msgstr "" + +#: src/available_models_descriptions.py:26 +msgid "State-of-the-art large embedding model from mixedbread.ai" +msgstr "" + +#: src/available_models_descriptions.py:27 +msgid "" +"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on " +"Llama 3 that has a variety of instruction, conversational, and coding skills." +msgstr "" + +#: src/available_models_descriptions.py:28 +msgid "" +"StarCoder2 is the next generation of transparently trained open code LLMs " +"that comes in three sizes: 3B, 7B and 15B parameters." +msgstr "" + +#: src/available_models_descriptions.py:29 +msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability." +msgstr "" + +#: src/available_models_descriptions.py:30 +msgid "" +"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models " +"that are trained to act as helpful assistants." +msgstr "" + +#: src/available_models_descriptions.py:31 +msgid "Yi 1.5 is a high-performing, bilingual language model." +msgstr "" + +#: src/available_models_descriptions.py:32 +msgid "" +"The powerful family of models by Nous Research that excels at scientific " +"discussion and coding tasks." +msgstr "" + +#: src/available_models_descriptions.py:33 +msgid "" +"General use chat model based on Llama and Llama 2 with 2K to 16K context " +"sizes." +msgstr "" + +#: src/available_models_descriptions.py:34 +msgid "" +"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on " +"Llama 2 uncensored by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:35 +msgid "" +"The TinyLlama project is an open endeavor to train a compact 1.1B Llama " +"model on 3 trillion tokens." +msgstr "" + +#: src/available_models_descriptions.py:36 +msgid "" +"State of the art large language model from Microsoft AI with improved " +"performance on complex chat, multilingual, reasoning and agent use cases." +msgstr "" + +#: src/available_models_descriptions.py:37 +msgid "" +"StarCoder is a code generation model trained on 80+ programming languages." +msgstr "" + +#: src/available_models_descriptions.py:38 +msgid "" +"Codestral is Mistral AI’s first-ever code model designed for code generation " +"tasks." +msgstr "" + +#: src/available_models_descriptions.py:39 +msgid "" +"A family of open-source models trained on a wide variety of data, surpassing " +"ChatGPT on various benchmarks. Updated to version 3.5-0106." +msgstr "" + +#: src/available_models_descriptions.py:40 +msgid "" +"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset " +"by Eric Hartford and based on TinyLlama." +msgstr "" + +#: src/available_models_descriptions.py:41 +msgid "" +"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully " +"open datasets." +msgstr "" + +#: src/available_models_descriptions.py:42 +msgid "State-of-the-art code generation model" +msgstr "" + +#: src/available_models_descriptions.py:43 +msgid "" +"Stable Code 3B is a coding model with instruct and code completion variants " +"on par with models such as Code Llama 7B that are 2.5x larger." +msgstr "" + +#: src/available_models_descriptions.py:44 +msgid "" +"A fine-tuned model based on Mistral with good coverage of domain and " +"language." +msgstr "" + +#: src/available_models_descriptions.py:45 +msgid "Model focused on math and logic problems" +msgstr "" + +#: src/available_models_descriptions.py:46 +msgid "" +"CodeQwen1.5 is a large language model pretrained on a large amount of code " +"data." +msgstr "" + +#: src/available_models_descriptions.py:47 +msgid "Code generation model based on Code Llama." +msgstr "" + +#: src/available_models_descriptions.py:48 +msgid "" +"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model " +"trained on multilingual data in English, Spanish, German, Italian, French, " +"Portuguese, and Dutch." +msgstr "" + +#: src/available_models_descriptions.py:49 +msgid "" +"A 7B and 15B uncensored variant of the Dolphin model family that excels at " +"coding, based on StarCoder2." +msgstr "" + +#: src/available_models_descriptions.py:50 +msgid "Embedding models on very large sentence level datasets." +msgstr "" + +#: src/available_models_descriptions.py:51 +msgid "General use models based on Llama and Llama 2 from Nous Research." +msgstr "" + +#: src/available_models_descriptions.py:52 +msgid "" +"Starling is a large language model trained by reinforcement learning from AI " +"feedback focused on improving chatbot helpfulness." +msgstr "" + +#: src/available_models_descriptions.py:53 +msgid "" +"SQLCoder is a code completion model fined-tuned on StarCoder for SQL " +"generation tasks" +msgstr "" + +#: src/available_models_descriptions.py:54 +msgid "" +"Orca 2 is built by Microsoft research, and are a fine-tuned version of " +"Meta's Llama 2 models. The model is designed to excel particularly in " +"reasoning." +msgstr "" + +#: src/available_models_descriptions.py:55 +msgid "" +"This model extends LLama-3 8B's context length from 8k to over 1m tokens." +msgstr "" + +#: src/available_models_descriptions.py:56 +msgid "An advanced language model crafted with 2 trillion bilingual tokens." +msgstr "" + +#: src/available_models_descriptions.py:57 +msgid "An extension of Llama 2 that supports a context of up to 128k tokens." +msgstr "" + +#: src/available_models_descriptions.py:58 +msgid "" +"A model from NVIDIA based on Llama 3 that excels at conversational question " +"answering (QA) and retrieval-augmented generation (RAG)." +msgstr "" + +#: src/available_models_descriptions.py:59 +msgid "" +"A compact, yet powerful 10.7B large language model designed for single-turn " +"conversation." +msgstr "" + +#: src/available_models_descriptions.py:60 +msgid "" +"Conversational model based on Llama 2 that performs competitively on various " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:61 +msgid "A family of open foundation models by IBM for Code Intelligence" +msgstr "" + +#: src/available_models_descriptions.py:62 +msgid "" +"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language " +"model by Microsoft Research." +msgstr "" + +#: src/available_models_descriptions.py:63 +msgid "General use model based on Llama 2." +msgstr "" + +#: src/available_models_descriptions.py:64 +msgid "" +"A companion assistant trained in philosophy, psychology, and personal " +"relationships. Based on Mistral." +msgstr "" + +#: src/available_models_descriptions.py:65 +msgid "" +"Llama 2 based model fine tuned on an Orca-style dataset. Originally called " +"Free Willy." +msgstr "" + +#: src/available_models_descriptions.py:66 +msgid "" +"BakLLaVA is a multimodal model consisting of the Mistral 7B base model " +"augmented with the LLaVA architecture." +msgstr "" + +#: src/available_models_descriptions.py:67 +msgid "" +"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:68 +msgid "Uncensored version of Wizard LM model" +msgstr "" + +#: src/available_models_descriptions.py:69 +msgid "" +"Fine-tuned Llama 2 model to answer medical questions based on an open source " +"medical dataset." +msgstr "" + +#: src/available_models_descriptions.py:70 +msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral." +msgstr "" + +#: src/available_models_descriptions.py:71 +msgid "An extension of Mistral to support context windows of 64K or 128K." +msgstr "" + +#: src/available_models_descriptions.py:72 +msgid "" +"A suite of text embedding models by Snowflake, optimized for performance." +msgstr "" + +#: src/available_models_descriptions.py:73 +msgid "" +"An expansion of Llama 2 that specializes in integrating both general " +"language understanding and domain-specific knowledge, particularly in " +"programming and mathematics." +msgstr "" + +#: src/available_models_descriptions.py:74 +msgid "Great code generation model based on Llama2." +msgstr "" + +#: src/available_models_descriptions.py:75 +msgid "" +"Open-source medical large language model adapted from Llama 2 to the medical " +"domain." +msgstr "" + +#: src/available_models_descriptions.py:76 +msgid "" +"moondream2 is a small vision language model designed to run efficiently on " +"edge devices." +msgstr "" + +#: src/available_models_descriptions.py:77 +msgid "Uncensored Llama2 based model with support for a 16K context window." +msgstr "" + +#: src/available_models_descriptions.py:78 +msgid "" +"Nexus Raven is a 13B instruction tuned model for function calling tasks." +msgstr "" + +#: src/available_models_descriptions.py:79 +msgid "" +"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic " +"instruction data using OSS-Instruct, a novel approach to enlightening LLMs " +"with open-source code snippets." +msgstr "" + +#: src/available_models_descriptions.py:80 +msgid "A strong, economical, and efficient Mixture-of-Experts language model." +msgstr "" + +#: src/available_models_descriptions.py:81 +msgid "" +"A lightweight chat model allowing accurate, and responsive output without " +"requiring high-end hardware." +msgstr "" + +#: src/available_models_descriptions.py:82 +msgid "" +"A high-performing code instruct model created by merging two existing code " +"models." +msgstr "" + +#: src/available_models_descriptions.py:83 +msgid "A new small LLaVA model fine-tuned from Phi 3 Mini." +msgstr "" + +#: src/available_models_descriptions.py:84 +msgid "" +"MistralLite is a fine-tuned model based on Mistral with enhanced " +"capabilities of processing long contexts." +msgstr "" + +#: src/available_models_descriptions.py:85 +msgid "" +"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by " +"MelodysDreamj." +msgstr "" + +#: src/available_models_descriptions.py:86 +msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station." +msgstr "" + +#: src/available_models_descriptions.py:87 +msgid "" +"A language model created by combining two fine-tuned Llama 2 70B models into " +"one." +msgstr "" + +#: src/available_models_descriptions.py:88 +msgid "" +"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by " +"interleaving the model with itself." +msgstr "" + +#: src/available_models_descriptions.py:89 +msgid "" +"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. " +"Designed for chat and code generation." +msgstr "" + +#: src/available_models_descriptions.py:90 +msgid "" +"A top-performing mixture of experts model, fine-tuned with high-quality data." +msgstr "" + +#: src/available_models_descriptions.py:91 +msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr." +msgstr "" + +#: src/available_models_descriptions.py:92 +msgid "DBRX is an open, general-purpose LLM created by Databricks." +msgstr "" + +#: src/available_models_descriptions.py:93 +msgid "" +"Falcon2 is an 11B parameters causal decoder-only model built by TII and " +"trained over 5T tokens." +msgstr "" + +#: src/available_models_descriptions.py:94 +msgid "" +"A robust conversational model designed to be used for both chat and instruct " +"use cases." +msgstr "" diff --git a/po/bn.po b/po/bn.po index ea60b89..1f460e7 100644 --- a/po/bn.po +++ b/po/bn.po @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: 1.0.0\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-07-07 16:07-0600\n" +"POT-Creation-Date: 2024-07-07 17:40-0600\n" "PO-Revision-Date: 2024-07-07 15:12-0600\n" "Last-Translator: (YOUR NAME) <(YOUR EMAIL OPTIONAL)>\n" "Language-Team: Bengali\n" @@ -1108,6 +1108,511 @@ msgstr "Paste" msgid "Insert new line" msgstr "Insert new line" +#: src/available_models_descriptions.py:2 +msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B." +msgstr "" + +#: src/available_models_descriptions.py:3 +msgid "Meta Llama 3: The most capable openly available LLM to date" +msgstr "" + +#: src/available_models_descriptions.py:4 +msgid "Qwen2 is a new series of large language models from Alibaba group" +msgstr "" + +#: src/available_models_descriptions.py:5 +msgid "" +"An open-source Mixture-of-Experts code language model that achieves " +"performance comparable to GPT4-Turbo in code-specific tasks." +msgstr "" + +#: src/available_models_descriptions.py:6 +msgid "" +"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art " +"open models by Microsoft." +msgstr "" + +#: src/available_models_descriptions.py:7 +msgid "" +"Aya 23, released by Cohere, is a new family of state-of-the-art, " +"multilingual models that support 23 languages." +msgstr "" + +#: src/available_models_descriptions.py:8 +msgid "The 7B model released by Mistral AI, updated to version 0.3." +msgstr "" + +#: src/available_models_descriptions.py:9 +msgid "" +"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in " +"8x7b and 8x22b parameter sizes." +msgstr "" + +#: src/available_models_descriptions.py:10 +msgid "" +"CodeGemma is a collection of powerful, lightweight models that can perform a " +"variety of coding tasks like fill-in-the-middle code completion, code " +"generation, natural language understanding, mathematical reasoning, and " +"instruction following." +msgstr "" + +#: src/available_models_descriptions.py:11 +msgid "" +"Command R is a Large Language Model optimized for conversational interaction " +"and long context tasks." +msgstr "" + +#: src/available_models_descriptions.py:12 +msgid "" +"Command R+ is a powerful, scalable large language model purpose-built to " +"excel at real-world enterprise use cases." +msgstr "" + +#: src/available_models_descriptions.py:13 +msgid "" +"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines " +"a vision encoder and Vicuna for general-purpose visual and language " +"understanding. Updated to version 1.6." +msgstr "" + +#: src/available_models_descriptions.py:14 +msgid "" +"Gemma is a family of lightweight, state-of-the-art open models built by " +"Google DeepMind. Updated to version 1.1" +msgstr "" + +#: src/available_models_descriptions.py:15 +msgid "" +"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from " +"0.5B to 110B parameters" +msgstr "" + +#: src/available_models_descriptions.py:16 +msgid "" +"Llama 2 is a collection of foundation language models ranging from 7B to 70B " +"parameters." +msgstr "" + +#: src/available_models_descriptions.py:17 +msgid "" +"A large language model that can use text prompts to generate and discuss " +"code." +msgstr "" + +#: src/available_models_descriptions.py:18 +msgid "" +"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of " +"experts models that excels at coding tasks. Created by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:19 +msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope." +msgstr "" + +#: src/available_models_descriptions.py:20 +msgid "" +"DeepSeek Coder is a capable coding model trained on two trillion code and " +"natural language tokens." +msgstr "" + +#: src/available_models_descriptions.py:21 +msgid "" +"A high-performing open embedding model with a large token context window." +msgstr "" + +#: src/available_models_descriptions.py:22 +msgid "" +"Phi-2: a 2.7B language model by Microsoft Research that demonstrates " +"outstanding reasoning and language understanding capabilities." +msgstr "" + +#: src/available_models_descriptions.py:23 +msgid "" +"The uncensored Dolphin model based on Mistral that excels at coding tasks. " +"Updated to version 2.8." +msgstr "" + +#: src/available_models_descriptions.py:24 +msgid "" +"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the " +"Mistral 7B model using the OpenOrca dataset." +msgstr "" + +#: src/available_models_descriptions.py:25 +msgid "" +"A general-purpose model ranging from 3 billion parameters to 70 billion, " +"suitable for entry-level hardware." +msgstr "" + +#: src/available_models_descriptions.py:26 +msgid "State-of-the-art large embedding model from mixedbread.ai" +msgstr "" + +#: src/available_models_descriptions.py:27 +msgid "" +"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on " +"Llama 3 that has a variety of instruction, conversational, and coding skills." +msgstr "" + +#: src/available_models_descriptions.py:28 +msgid "" +"StarCoder2 is the next generation of transparently trained open code LLMs " +"that comes in three sizes: 3B, 7B and 15B parameters." +msgstr "" + +#: src/available_models_descriptions.py:29 +msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability." +msgstr "" + +#: src/available_models_descriptions.py:30 +msgid "" +"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models " +"that are trained to act as helpful assistants." +msgstr "" + +#: src/available_models_descriptions.py:31 +msgid "Yi 1.5 is a high-performing, bilingual language model." +msgstr "" + +#: src/available_models_descriptions.py:32 +msgid "" +"The powerful family of models by Nous Research that excels at scientific " +"discussion and coding tasks." +msgstr "" + +#: src/available_models_descriptions.py:33 +msgid "" +"General use chat model based on Llama and Llama 2 with 2K to 16K context " +"sizes." +msgstr "" + +#: src/available_models_descriptions.py:34 +msgid "" +"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on " +"Llama 2 uncensored by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:35 +msgid "" +"The TinyLlama project is an open endeavor to train a compact 1.1B Llama " +"model on 3 trillion tokens." +msgstr "" + +#: src/available_models_descriptions.py:36 +msgid "" +"State of the art large language model from Microsoft AI with improved " +"performance on complex chat, multilingual, reasoning and agent use cases." +msgstr "" + +#: src/available_models_descriptions.py:37 +msgid "" +"StarCoder is a code generation model trained on 80+ programming languages." +msgstr "" + +#: src/available_models_descriptions.py:38 +msgid "" +"Codestral is Mistral AI’s first-ever code model designed for code generation " +"tasks." +msgstr "" + +#: src/available_models_descriptions.py:39 +msgid "" +"A family of open-source models trained on a wide variety of data, surpassing " +"ChatGPT on various benchmarks. Updated to version 3.5-0106." +msgstr "" + +#: src/available_models_descriptions.py:40 +msgid "" +"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset " +"by Eric Hartford and based on TinyLlama." +msgstr "" + +#: src/available_models_descriptions.py:41 +msgid "" +"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully " +"open datasets." +msgstr "" + +#: src/available_models_descriptions.py:42 +msgid "State-of-the-art code generation model" +msgstr "" + +#: src/available_models_descriptions.py:43 +msgid "" +"Stable Code 3B is a coding model with instruct and code completion variants " +"on par with models such as Code Llama 7B that are 2.5x larger." +msgstr "" + +#: src/available_models_descriptions.py:44 +msgid "" +"A fine-tuned model based on Mistral with good coverage of domain and " +"language." +msgstr "" + +#: src/available_models_descriptions.py:45 +msgid "Model focused on math and logic problems" +msgstr "" + +#: src/available_models_descriptions.py:46 +msgid "" +"CodeQwen1.5 is a large language model pretrained on a large amount of code " +"data." +msgstr "" + +#: src/available_models_descriptions.py:47 +msgid "Code generation model based on Code Llama." +msgstr "" + +#: src/available_models_descriptions.py:48 +msgid "" +"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model " +"trained on multilingual data in English, Spanish, German, Italian, French, " +"Portuguese, and Dutch." +msgstr "" + +#: src/available_models_descriptions.py:49 +msgid "" +"A 7B and 15B uncensored variant of the Dolphin model family that excels at " +"coding, based on StarCoder2." +msgstr "" + +#: src/available_models_descriptions.py:50 +msgid "Embedding models on very large sentence level datasets." +msgstr "" + +#: src/available_models_descriptions.py:51 +msgid "General use models based on Llama and Llama 2 from Nous Research." +msgstr "" + +#: src/available_models_descriptions.py:52 +msgid "" +"Starling is a large language model trained by reinforcement learning from AI " +"feedback focused on improving chatbot helpfulness." +msgstr "" + +#: src/available_models_descriptions.py:53 +msgid "" +"SQLCoder is a code completion model fined-tuned on StarCoder for SQL " +"generation tasks" +msgstr "" + +#: src/available_models_descriptions.py:54 +msgid "" +"Orca 2 is built by Microsoft research, and are a fine-tuned version of " +"Meta's Llama 2 models. The model is designed to excel particularly in " +"reasoning." +msgstr "" + +#: src/available_models_descriptions.py:55 +msgid "" +"This model extends LLama-3 8B's context length from 8k to over 1m tokens." +msgstr "" + +#: src/available_models_descriptions.py:56 +msgid "An advanced language model crafted with 2 trillion bilingual tokens." +msgstr "" + +#: src/available_models_descriptions.py:57 +msgid "An extension of Llama 2 that supports a context of up to 128k tokens." +msgstr "" + +#: src/available_models_descriptions.py:58 +msgid "" +"A model from NVIDIA based on Llama 3 that excels at conversational question " +"answering (QA) and retrieval-augmented generation (RAG)." +msgstr "" + +#: src/available_models_descriptions.py:59 +msgid "" +"A compact, yet powerful 10.7B large language model designed for single-turn " +"conversation." +msgstr "" + +#: src/available_models_descriptions.py:60 +msgid "" +"Conversational model based on Llama 2 that performs competitively on various " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:61 +msgid "A family of open foundation models by IBM for Code Intelligence" +msgstr "" + +#: src/available_models_descriptions.py:62 +msgid "" +"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language " +"model by Microsoft Research." +msgstr "" + +#: src/available_models_descriptions.py:63 +msgid "General use model based on Llama 2." +msgstr "" + +#: src/available_models_descriptions.py:64 +msgid "" +"A companion assistant trained in philosophy, psychology, and personal " +"relationships. Based on Mistral." +msgstr "" + +#: src/available_models_descriptions.py:65 +msgid "" +"Llama 2 based model fine tuned on an Orca-style dataset. Originally called " +"Free Willy." +msgstr "" + +#: src/available_models_descriptions.py:66 +msgid "" +"BakLLaVA is a multimodal model consisting of the Mistral 7B base model " +"augmented with the LLaVA architecture." +msgstr "" + +#: src/available_models_descriptions.py:67 +msgid "" +"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:68 +msgid "Uncensored version of Wizard LM model" +msgstr "" + +#: src/available_models_descriptions.py:69 +msgid "" +"Fine-tuned Llama 2 model to answer medical questions based on an open source " +"medical dataset." +msgstr "" + +#: src/available_models_descriptions.py:70 +msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral." +msgstr "" + +#: src/available_models_descriptions.py:71 +msgid "An extension of Mistral to support context windows of 64K or 128K." +msgstr "" + +#: src/available_models_descriptions.py:72 +msgid "" +"A suite of text embedding models by Snowflake, optimized for performance." +msgstr "" + +#: src/available_models_descriptions.py:73 +msgid "" +"An expansion of Llama 2 that specializes in integrating both general " +"language understanding and domain-specific knowledge, particularly in " +"programming and mathematics." +msgstr "" + +#: src/available_models_descriptions.py:74 +msgid "Great code generation model based on Llama2." +msgstr "" + +#: src/available_models_descriptions.py:75 +msgid "" +"Open-source medical large language model adapted from Llama 2 to the medical " +"domain." +msgstr "" + +#: src/available_models_descriptions.py:76 +msgid "" +"moondream2 is a small vision language model designed to run efficiently on " +"edge devices." +msgstr "" + +#: src/available_models_descriptions.py:77 +msgid "Uncensored Llama2 based model with support for a 16K context window." +msgstr "" + +#: src/available_models_descriptions.py:78 +msgid "" +"Nexus Raven is a 13B instruction tuned model for function calling tasks." +msgstr "" + +#: src/available_models_descriptions.py:79 +msgid "" +"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic " +"instruction data using OSS-Instruct, a novel approach to enlightening LLMs " +"with open-source code snippets." +msgstr "" + +#: src/available_models_descriptions.py:80 +msgid "A strong, economical, and efficient Mixture-of-Experts language model." +msgstr "" + +#: src/available_models_descriptions.py:81 +msgid "" +"A lightweight chat model allowing accurate, and responsive output without " +"requiring high-end hardware." +msgstr "" + +#: src/available_models_descriptions.py:82 +msgid "" +"A high-performing code instruct model created by merging two existing code " +"models." +msgstr "" + +#: src/available_models_descriptions.py:83 +msgid "A new small LLaVA model fine-tuned from Phi 3 Mini." +msgstr "" + +#: src/available_models_descriptions.py:84 +msgid "" +"MistralLite is a fine-tuned model based on Mistral with enhanced " +"capabilities of processing long contexts." +msgstr "" + +#: src/available_models_descriptions.py:85 +msgid "" +"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by " +"MelodysDreamj." +msgstr "" + +#: src/available_models_descriptions.py:86 +msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station." +msgstr "" + +#: src/available_models_descriptions.py:87 +msgid "" +"A language model created by combining two fine-tuned Llama 2 70B models into " +"one." +msgstr "" + +#: src/available_models_descriptions.py:88 +msgid "" +"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by " +"interleaving the model with itself." +msgstr "" + +#: src/available_models_descriptions.py:89 +msgid "" +"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. " +"Designed for chat and code generation." +msgstr "" + +#: src/available_models_descriptions.py:90 +msgid "" +"A top-performing mixture of experts model, fine-tuned with high-quality data." +msgstr "" + +#: src/available_models_descriptions.py:91 +msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr." +msgstr "" + +#: src/available_models_descriptions.py:92 +msgid "DBRX is an open, general-purpose LLM created by Databricks." +msgstr "" + +#: src/available_models_descriptions.py:93 +msgid "" +"Falcon2 is an 11B parameters causal decoder-only model built by TII and " +"trained over 5T tokens." +msgstr "" + +#: src/available_models_descriptions.py:94 +msgid "" +"A robust conversational model designed to be used for both chat and instruct " +"use cases." +msgstr "" + #~ msgid "Message Received" #~ msgstr "Message Received" diff --git a/po/bn.po~ b/po/bn.po~ index 9bb50a4..ea60b89 100644 --- a/po/bn.po~ +++ b/po/bn.po~ @@ -6,8 +6,8 @@ msgid "" msgstr "" "Project-Id-Version: 1.0.0\n" -"Report-Msgid-Bugs-To: https://github.com/Jeffser/Alpaca\n" -"POT-Creation-Date: 2024-07-02 18:22-0600\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2024-07-07 16:07-0600\n" "PO-Revision-Date: 2024-07-07 15:12-0600\n" "Last-Translator: (YOUR NAME) <(YOUR EMAIL OPTIONAL)>\n" "Language-Team: Bengali\n" @@ -65,7 +65,7 @@ msgstr "Plain text documents recognition" msgid "Import and export chats" msgstr "Import and export chats" -#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:853 +#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:860 msgid "Disclaimer" msgstr "Disclaimer" @@ -667,7 +667,7 @@ msgstr "0.1.1 Stable Release" msgid "This is the first public version of Alpaca" msgstr "This is the first public version of Alpaca" -#: src/window.py:58 src/window.py:1053 src/window.py:1115 src/window.ui:41 +#: src/window.py:58 src/window.py:1083 src/window.py:1145 src/window.ui:41 msgid "New Chat" msgstr "New Chat" @@ -760,67 +760,67 @@ msgstr "Chat exported successfully" msgid "Chat imported successfully" msgstr "Chat imported successfully" -#: src/window.py:278 +#: src/window.py:280 msgid "Close" msgstr "Close" -#: src/window.py:279 src/window.ui:806 +#: src/window.py:281 src/window.ui:813 msgid "Next" msgstr "Next" -#: src/window.py:329 +#: src/window.py:332 msgid "Pulling in the background..." msgstr "Pulling in the background..." -#: src/window.py:381 +#: src/window.py:384 msgid "Stop Creating '{}'" msgstr "Stop Creating '{}'" -#: src/window.py:418 +#: src/window.py:421 msgid "image" msgstr "image" -#: src/window.py:588 +#: src/window.py:593 msgid "Remove Message" msgstr "Remove Message" -#: src/window.py:593 src/window.py:841 +#: src/window.py:598 src/window.py:869 msgid "Copy Message" msgstr "Copy Message" -#: src/window.py:598 +#: src/window.py:603 msgid "Edit Message" msgstr "Edit Message" -#: src/window.py:729 +#: src/window.py:661 +msgid "Missing Image" +msgstr "" + +#: src/window.py:677 +msgid "Missing image" +msgstr "" + +#: src/window.py:757 msgid "Remove '{} ({})'" msgstr "Remove '{} ({})'" -#: src/window.py:882 -msgid "Message Received" -msgstr "Message Received" - -#: src/window.py:882 -msgid "New message from '{}'" -msgstr "New message from '{}'" - -#: src/window.py:939 +#: src/window.py:969 msgid "Task Complete" msgstr "Task Complete" -#: src/window.py:939 +#: src/window.py:969 msgid "Model '{}' pulled successfully." msgstr "Model '{}' pulled successfully." -#: src/window.py:944 +#: src/window.py:974 msgid "Pull Model Error" msgstr "Pull Model Error" -#: src/window.py:944 +#: src/window.py:974 msgid "Failed to pull model '{}' due to network error." msgstr "Failed to pull model '{}' due to network error." -#: src/window.py:978 +#: src/window.py:1008 msgid "Stop Pulling '{} ({})'" msgstr "Stop Pulling '{} ({})'" @@ -832,7 +832,7 @@ msgstr "Menu" msgid "Toggle Sidebar" msgstr "Toggle Sidebar" -#: src/window.ui:107 src/window.ui:595 +#: src/window.ui:107 src/window.ui:587 msgid "Manage Models" msgstr "Manage Models" @@ -844,27 +844,27 @@ msgstr "Chat Menu" msgid "Attach File" msgstr "Attach File" -#: src/window.ui:238 src/window.ui:1120 +#: src/window.ui:242 src/window.ui:1127 msgid "Send Message" msgstr "Send Message" -#: src/window.ui:286 src/window.ui:965 src/window.ui:1079 +#: src/window.ui:290 src/window.ui:972 src/window.ui:1086 msgid "Preferences" msgstr "Preferences" -#: src/window.ui:289 src/window.ui:1057 +#: src/window.ui:293 src/window.ui:1064 msgid "General" msgstr "General" -#: src/window.ui:297 +#: src/window.ui:299 msgid "Use Remote Connection to Ollama" msgstr "Use Remote Connection to Ollama" -#: src/window.ui:303 +#: src/window.ui:305 msgid "URL of Remote Instance" msgstr "URL of Remote Instance" -#: src/window.ui:310 +#: src/window.ui:312 msgid "Bearer Token (Optional)" msgstr "Bearer Token (Optional)" @@ -872,15 +872,11 @@ msgstr "Bearer Token (Optional)" msgid "Run Alpaca In Background" msgstr "Run Alpaca In Background" -#: src/window.ui:331 -msgid "Model" -msgstr "Model" - -#: src/window.ui:341 +#: src/window.ui:333 msgid "Temperature" msgstr "Temperature" -#: src/window.ui:342 +#: src/window.ui:334 msgid "" "The temperature of the model. Increasing the temperature will make the model " "answer more creatively. (Default: 0.8)" @@ -888,11 +884,11 @@ msgstr "" "The temperature of the model. Increasing the temperature will make the model " "answer more creatively. (Default: 0.8)" -#: src/window.ui:357 +#: src/window.ui:349 msgid "Seed" msgstr "Seed" -#: src/window.ui:358 +#: src/window.ui:350 msgid "" "Sets the random number seed to use for generation. Setting this to a " "specific number will make the model generate the same text for the same " @@ -902,11 +898,11 @@ msgstr "" "specific number will make the model generate the same text for the same " "prompt. (Default: 0 (random))" -#: src/window.ui:372 +#: src/window.ui:364 msgid "Keep Alive Time" msgstr "Keep Alive Time" -#: src/window.ui:373 +#: src/window.ui:365 msgid "" "Controls how long the model will stay loaded into memory following the " "request in minutes (Default: 5)" @@ -914,15 +910,15 @@ msgstr "" "Controls how long the model will stay loaded into memory following the " "request in minutes (Default: 5)" -#: src/window.ui:389 +#: src/window.ui:381 msgid "Ollama Instance" msgstr "Ollama Instance" -#: src/window.ui:393 +#: src/window.ui:385 msgid "Ollama Overrides" msgstr "Ollama Overrides" -#: src/window.ui:394 +#: src/window.ui:386 msgid "" "Manage the arguments used on Ollama, any changes on this page only applies " "to the integrated instance, the instance will restart if you make changes." @@ -930,31 +926,31 @@ msgstr "" "Manage the arguments used on Ollama, any changes on this page only applies " "to the integrated instance, the instance will restart if you make changes." -#: src/window.ui:477 +#: src/window.ui:469 msgid "Create" msgstr "Create" -#: src/window.ui:490 src/window.ui:605 +#: src/window.ui:482 src/window.ui:597 msgid "Create Model" msgstr "Create Model" -#: src/window.ui:516 +#: src/window.ui:508 msgid "Base" msgstr "Base" -#: src/window.ui:534 +#: src/window.ui:526 msgid "Name" msgstr "Name" -#: src/window.ui:540 +#: src/window.ui:532 msgid "Context" msgstr "Context" -#: src/window.ui:555 +#: src/window.ui:547 msgid "Template" msgstr "Template" -#: src/window.ui:561 +#: src/window.ui:553 msgid "" "Some models require a specific template. Please visit the model's website " "for more information if you're unsure." @@ -962,39 +958,45 @@ msgstr "" "Some models require a specific template. Please visit the model's website " "for more information if you're unsure." -#: src/window.ui:612 +#: src/window.ui:604 msgid "Search Model" msgstr "Search Model" -#: src/window.ui:672 +#: src/window.ui:664 msgid "No Models Found" msgstr "No Models Found" -#: src/window.ui:673 +#: src/window.ui:665 msgid "Try a different search" msgstr "Try a different search" -#: src/window.ui:738 +#: src/window.ui:708 +msgid "" +"By downloading this model you accept the license agreement available on the " +"model's website." +msgstr "" + +#: src/window.ui:745 msgid "Open with Default App" msgstr "Open with Default App" -#: src/window.ui:790 +#: src/window.ui:797 msgid "Previous" msgstr "Previous" -#: src/window.ui:833 +#: src/window.ui:840 msgid "Welcome to Alpaca" msgstr "Welcome to Alpaca" -#: src/window.ui:834 +#: src/window.ui:841 msgid "Powered by Ollama" msgstr "Powered by Ollama" -#: src/window.ui:837 +#: src/window.ui:844 msgid "Ollama Website" msgstr "Ollama Website" -#: src/window.ui:854 +#: src/window.ui:861 msgid "" "Alpaca and its developers are not liable for any damages to devices or " "software resulting from the execution of code generated by an AI model. " @@ -1004,11 +1006,11 @@ msgstr "" "software resulting from the execution of code generated by an AI model. " "Please exercise caution and review the code carefully before running it." -#: src/window.ui:865 +#: src/window.ui:872 msgid "Featured Models" msgstr "Featured Models" -#: src/window.ui:866 +#: src/window.ui:873 msgid "" "Alpaca works locally on your device, to start chatting you'll need an AI " "model, you can either pull models from this list or the 'Manage Models' menu " @@ -1018,90 +1020,99 @@ msgstr "" "model, you can either pull models from this list or the 'Manage Models' menu " "later." -#: src/window.ui:876 +#: src/window.ui:883 msgid "Built by Meta" msgstr "Built by Meta" -#: src/window.ui:894 +#: src/window.ui:901 msgid "Built by Google DeepMind" msgstr "Built by Google DeepMind" -#: src/window.ui:912 +#: src/window.ui:919 msgid "Built by Microsoft" msgstr "Built by Microsoft" -#: src/window.ui:930 +#: src/window.ui:937 msgid "Multimodal AI with image recognition" msgstr "Multimodal AI with image recognition" -#: src/window.ui:959 +#: src/window.ui:966 msgid "Import Chat" msgstr "Import Chat" -#: src/window.ui:969 +#: src/window.ui:976 msgid "Keyboard Shortcuts" msgstr "Keyboard Shortcuts" -#: src/window.ui:973 +#: src/window.ui:980 msgid "About Alpaca" msgstr "About Alpaca" -#: src/window.ui:980 src/window.ui:999 +#: src/window.ui:987 src/window.ui:1006 msgid "Rename Chat" msgstr "Rename Chat" -#: src/window.ui:984 src/window.ui:1003 +#: src/window.ui:991 src/window.ui:1010 msgid "Export Chat" msgstr "Export Chat" -#: src/window.ui:988 +#: src/window.ui:995 msgid "Clear Chat" msgstr "Clear Chat" -#: src/window.ui:995 +#: src/window.ui:1002 msgid "Delete Chat" msgstr "Delete Chat" -#: src/window.ui:1011 +#: src/window.ui:1018 msgid "From Existing Model" msgstr "From Existing Model" -#: src/window.ui:1015 +#: src/window.ui:1022 msgid "From GGUF File (Experimental)" msgstr "From GGUF File (Experimental)" -#: src/window.ui:1061 +#: src/window.ui:1068 msgid "Close application" msgstr "Close application" -#: src/window.ui:1067 +#: src/window.ui:1074 msgid "Import chat" msgstr "Import chat" -#: src/window.ui:1073 +#: src/window.ui:1080 msgid "Clear chat" msgstr "Clear chat" -#: src/window.ui:1085 +#: src/window.ui:1092 msgid "New chat" msgstr "New chat" -#: src/window.ui:1091 +#: src/window.ui:1098 msgid "Show shortcuts window" msgstr "Show shortcuts window" -#: src/window.ui:1098 +#: src/window.ui:1105 msgid "Editor" msgstr "Editor" -#: src/window.ui:1102 +#: src/window.ui:1109 msgid "Copy" msgstr "Copy" -#: src/window.ui:1108 +#: src/window.ui:1115 msgid "Paste" msgstr "Paste" -#: src/window.ui:1114 +#: src/window.ui:1121 msgid "Insert new line" msgstr "Insert new line" + +#~ msgid "Message Received" +#~ msgstr "Message Received" + +#~ msgid "New message from '{}'" +#~ msgstr "New message from '{}'" + +#~ msgid "Model" +#~ msgstr "Model" diff --git a/po/es.po b/po/es.po index 9245b4e..10726d0 100644 --- a/po/es.po +++ b/po/es.po @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: 1.0.0\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-07-07 16:04-0600\n" +"POT-Creation-Date: 2024-07-07 17:40-0600\n" "PO-Revision-Date: 2024-05-19 19:44-0600\n" "Last-Translator: Jeffry Samuel Eduarte Rojas \n" "Language-Team: Spanish\n" @@ -1193,6 +1193,511 @@ msgstr "Pegar" msgid "Insert new line" msgstr "Saltar línea" +#: src/available_models_descriptions.py:2 +msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B." +msgstr "" + +#: src/available_models_descriptions.py:3 +msgid "Meta Llama 3: The most capable openly available LLM to date" +msgstr "" + +#: src/available_models_descriptions.py:4 +msgid "Qwen2 is a new series of large language models from Alibaba group" +msgstr "" + +#: src/available_models_descriptions.py:5 +msgid "" +"An open-source Mixture-of-Experts code language model that achieves " +"performance comparable to GPT4-Turbo in code-specific tasks." +msgstr "" + +#: src/available_models_descriptions.py:6 +msgid "" +"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art " +"open models by Microsoft." +msgstr "" + +#: src/available_models_descriptions.py:7 +msgid "" +"Aya 23, released by Cohere, is a new family of state-of-the-art, " +"multilingual models that support 23 languages." +msgstr "" + +#: src/available_models_descriptions.py:8 +msgid "The 7B model released by Mistral AI, updated to version 0.3." +msgstr "" + +#: src/available_models_descriptions.py:9 +msgid "" +"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in " +"8x7b and 8x22b parameter sizes." +msgstr "" + +#: src/available_models_descriptions.py:10 +msgid "" +"CodeGemma is a collection of powerful, lightweight models that can perform a " +"variety of coding tasks like fill-in-the-middle code completion, code " +"generation, natural language understanding, mathematical reasoning, and " +"instruction following." +msgstr "" + +#: src/available_models_descriptions.py:11 +msgid "" +"Command R is a Large Language Model optimized for conversational interaction " +"and long context tasks." +msgstr "" + +#: src/available_models_descriptions.py:12 +msgid "" +"Command R+ is a powerful, scalable large language model purpose-built to " +"excel at real-world enterprise use cases." +msgstr "" + +#: src/available_models_descriptions.py:13 +msgid "" +"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines " +"a vision encoder and Vicuna for general-purpose visual and language " +"understanding. Updated to version 1.6." +msgstr "" + +#: src/available_models_descriptions.py:14 +msgid "" +"Gemma is a family of lightweight, state-of-the-art open models built by " +"Google DeepMind. Updated to version 1.1" +msgstr "" + +#: src/available_models_descriptions.py:15 +msgid "" +"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from " +"0.5B to 110B parameters" +msgstr "" + +#: src/available_models_descriptions.py:16 +msgid "" +"Llama 2 is a collection of foundation language models ranging from 7B to 70B " +"parameters." +msgstr "" + +#: src/available_models_descriptions.py:17 +msgid "" +"A large language model that can use text prompts to generate and discuss " +"code." +msgstr "" + +#: src/available_models_descriptions.py:18 +msgid "" +"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of " +"experts models that excels at coding tasks. Created by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:19 +msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope." +msgstr "" + +#: src/available_models_descriptions.py:20 +msgid "" +"DeepSeek Coder is a capable coding model trained on two trillion code and " +"natural language tokens." +msgstr "" + +#: src/available_models_descriptions.py:21 +msgid "" +"A high-performing open embedding model with a large token context window." +msgstr "" + +#: src/available_models_descriptions.py:22 +msgid "" +"Phi-2: a 2.7B language model by Microsoft Research that demonstrates " +"outstanding reasoning and language understanding capabilities." +msgstr "" + +#: src/available_models_descriptions.py:23 +msgid "" +"The uncensored Dolphin model based on Mistral that excels at coding tasks. " +"Updated to version 2.8." +msgstr "" + +#: src/available_models_descriptions.py:24 +msgid "" +"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the " +"Mistral 7B model using the OpenOrca dataset." +msgstr "" + +#: src/available_models_descriptions.py:25 +msgid "" +"A general-purpose model ranging from 3 billion parameters to 70 billion, " +"suitable for entry-level hardware." +msgstr "" + +#: src/available_models_descriptions.py:26 +msgid "State-of-the-art large embedding model from mixedbread.ai" +msgstr "" + +#: src/available_models_descriptions.py:27 +msgid "" +"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on " +"Llama 3 that has a variety of instruction, conversational, and coding skills." +msgstr "" + +#: src/available_models_descriptions.py:28 +msgid "" +"StarCoder2 is the next generation of transparently trained open code LLMs " +"that comes in three sizes: 3B, 7B and 15B parameters." +msgstr "" + +#: src/available_models_descriptions.py:29 +msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability." +msgstr "" + +#: src/available_models_descriptions.py:30 +msgid "" +"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models " +"that are trained to act as helpful assistants." +msgstr "" + +#: src/available_models_descriptions.py:31 +msgid "Yi 1.5 is a high-performing, bilingual language model." +msgstr "" + +#: src/available_models_descriptions.py:32 +msgid "" +"The powerful family of models by Nous Research that excels at scientific " +"discussion and coding tasks." +msgstr "" + +#: src/available_models_descriptions.py:33 +msgid "" +"General use chat model based on Llama and Llama 2 with 2K to 16K context " +"sizes." +msgstr "" + +#: src/available_models_descriptions.py:34 +msgid "" +"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on " +"Llama 2 uncensored by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:35 +msgid "" +"The TinyLlama project is an open endeavor to train a compact 1.1B Llama " +"model on 3 trillion tokens." +msgstr "" + +#: src/available_models_descriptions.py:36 +msgid "" +"State of the art large language model from Microsoft AI with improved " +"performance on complex chat, multilingual, reasoning and agent use cases." +msgstr "" + +#: src/available_models_descriptions.py:37 +msgid "" +"StarCoder is a code generation model trained on 80+ programming languages." +msgstr "" + +#: src/available_models_descriptions.py:38 +msgid "" +"Codestral is Mistral AI’s first-ever code model designed for code generation " +"tasks." +msgstr "" + +#: src/available_models_descriptions.py:39 +msgid "" +"A family of open-source models trained on a wide variety of data, surpassing " +"ChatGPT on various benchmarks. Updated to version 3.5-0106." +msgstr "" + +#: src/available_models_descriptions.py:40 +msgid "" +"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset " +"by Eric Hartford and based on TinyLlama." +msgstr "" + +#: src/available_models_descriptions.py:41 +msgid "" +"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully " +"open datasets." +msgstr "" + +#: src/available_models_descriptions.py:42 +msgid "State-of-the-art code generation model" +msgstr "" + +#: src/available_models_descriptions.py:43 +msgid "" +"Stable Code 3B is a coding model with instruct and code completion variants " +"on par with models such as Code Llama 7B that are 2.5x larger." +msgstr "" + +#: src/available_models_descriptions.py:44 +msgid "" +"A fine-tuned model based on Mistral with good coverage of domain and " +"language." +msgstr "" + +#: src/available_models_descriptions.py:45 +msgid "Model focused on math and logic problems" +msgstr "" + +#: src/available_models_descriptions.py:46 +msgid "" +"CodeQwen1.5 is a large language model pretrained on a large amount of code " +"data." +msgstr "" + +#: src/available_models_descriptions.py:47 +msgid "Code generation model based on Code Llama." +msgstr "" + +#: src/available_models_descriptions.py:48 +msgid "" +"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model " +"trained on multilingual data in English, Spanish, German, Italian, French, " +"Portuguese, and Dutch." +msgstr "" + +#: src/available_models_descriptions.py:49 +msgid "" +"A 7B and 15B uncensored variant of the Dolphin model family that excels at " +"coding, based on StarCoder2." +msgstr "" + +#: src/available_models_descriptions.py:50 +msgid "Embedding models on very large sentence level datasets." +msgstr "" + +#: src/available_models_descriptions.py:51 +msgid "General use models based on Llama and Llama 2 from Nous Research." +msgstr "" + +#: src/available_models_descriptions.py:52 +msgid "" +"Starling is a large language model trained by reinforcement learning from AI " +"feedback focused on improving chatbot helpfulness." +msgstr "" + +#: src/available_models_descriptions.py:53 +msgid "" +"SQLCoder is a code completion model fined-tuned on StarCoder for SQL " +"generation tasks" +msgstr "" + +#: src/available_models_descriptions.py:54 +msgid "" +"Orca 2 is built by Microsoft research, and are a fine-tuned version of " +"Meta's Llama 2 models. The model is designed to excel particularly in " +"reasoning." +msgstr "" + +#: src/available_models_descriptions.py:55 +msgid "" +"This model extends LLama-3 8B's context length from 8k to over 1m tokens." +msgstr "" + +#: src/available_models_descriptions.py:56 +msgid "An advanced language model crafted with 2 trillion bilingual tokens." +msgstr "" + +#: src/available_models_descriptions.py:57 +msgid "An extension of Llama 2 that supports a context of up to 128k tokens." +msgstr "" + +#: src/available_models_descriptions.py:58 +msgid "" +"A model from NVIDIA based on Llama 3 that excels at conversational question " +"answering (QA) and retrieval-augmented generation (RAG)." +msgstr "" + +#: src/available_models_descriptions.py:59 +msgid "" +"A compact, yet powerful 10.7B large language model designed for single-turn " +"conversation." +msgstr "" + +#: src/available_models_descriptions.py:60 +msgid "" +"Conversational model based on Llama 2 that performs competitively on various " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:61 +msgid "A family of open foundation models by IBM for Code Intelligence" +msgstr "" + +#: src/available_models_descriptions.py:62 +msgid "" +"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language " +"model by Microsoft Research." +msgstr "" + +#: src/available_models_descriptions.py:63 +msgid "General use model based on Llama 2." +msgstr "" + +#: src/available_models_descriptions.py:64 +msgid "" +"A companion assistant trained in philosophy, psychology, and personal " +"relationships. Based on Mistral." +msgstr "" + +#: src/available_models_descriptions.py:65 +msgid "" +"Llama 2 based model fine tuned on an Orca-style dataset. Originally called " +"Free Willy." +msgstr "" + +#: src/available_models_descriptions.py:66 +msgid "" +"BakLLaVA is a multimodal model consisting of the Mistral 7B base model " +"augmented with the LLaVA architecture." +msgstr "" + +#: src/available_models_descriptions.py:67 +msgid "" +"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:68 +msgid "Uncensored version of Wizard LM model" +msgstr "" + +#: src/available_models_descriptions.py:69 +msgid "" +"Fine-tuned Llama 2 model to answer medical questions based on an open source " +"medical dataset." +msgstr "" + +#: src/available_models_descriptions.py:70 +msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral." +msgstr "" + +#: src/available_models_descriptions.py:71 +msgid "An extension of Mistral to support context windows of 64K or 128K." +msgstr "" + +#: src/available_models_descriptions.py:72 +msgid "" +"A suite of text embedding models by Snowflake, optimized for performance." +msgstr "" + +#: src/available_models_descriptions.py:73 +msgid "" +"An expansion of Llama 2 that specializes in integrating both general " +"language understanding and domain-specific knowledge, particularly in " +"programming and mathematics." +msgstr "" + +#: src/available_models_descriptions.py:74 +msgid "Great code generation model based on Llama2." +msgstr "" + +#: src/available_models_descriptions.py:75 +msgid "" +"Open-source medical large language model adapted from Llama 2 to the medical " +"domain." +msgstr "" + +#: src/available_models_descriptions.py:76 +msgid "" +"moondream2 is a small vision language model designed to run efficiently on " +"edge devices." +msgstr "" + +#: src/available_models_descriptions.py:77 +msgid "Uncensored Llama2 based model with support for a 16K context window." +msgstr "" + +#: src/available_models_descriptions.py:78 +msgid "" +"Nexus Raven is a 13B instruction tuned model for function calling tasks." +msgstr "" + +#: src/available_models_descriptions.py:79 +msgid "" +"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic " +"instruction data using OSS-Instruct, a novel approach to enlightening LLMs " +"with open-source code snippets." +msgstr "" + +#: src/available_models_descriptions.py:80 +msgid "A strong, economical, and efficient Mixture-of-Experts language model." +msgstr "" + +#: src/available_models_descriptions.py:81 +msgid "" +"A lightweight chat model allowing accurate, and responsive output without " +"requiring high-end hardware." +msgstr "" + +#: src/available_models_descriptions.py:82 +msgid "" +"A high-performing code instruct model created by merging two existing code " +"models." +msgstr "" + +#: src/available_models_descriptions.py:83 +msgid "A new small LLaVA model fine-tuned from Phi 3 Mini." +msgstr "" + +#: src/available_models_descriptions.py:84 +msgid "" +"MistralLite is a fine-tuned model based on Mistral with enhanced " +"capabilities of processing long contexts." +msgstr "" + +#: src/available_models_descriptions.py:85 +msgid "" +"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by " +"MelodysDreamj." +msgstr "" + +#: src/available_models_descriptions.py:86 +msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station." +msgstr "" + +#: src/available_models_descriptions.py:87 +msgid "" +"A language model created by combining two fine-tuned Llama 2 70B models into " +"one." +msgstr "" + +#: src/available_models_descriptions.py:88 +msgid "" +"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by " +"interleaving the model with itself." +msgstr "" + +#: src/available_models_descriptions.py:89 +msgid "" +"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. " +"Designed for chat and code generation." +msgstr "" + +#: src/available_models_descriptions.py:90 +msgid "" +"A top-performing mixture of experts model, fine-tuned with high-quality data." +msgstr "" + +#: src/available_models_descriptions.py:91 +msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr." +msgstr "" + +#: src/available_models_descriptions.py:92 +msgid "DBRX is an open, general-purpose LLM created by Databricks." +msgstr "" + +#: src/available_models_descriptions.py:93 +msgid "" +"Falcon2 is an 11B parameters causal decoder-only model built by TII and " +"trained over 5T tokens." +msgstr "" + +#: src/available_models_descriptions.py:94 +msgid "" +"A robust conversational model designed to be used for both chat and instruct " +"use cases." +msgstr "" + #, fuzzy #~ msgid "New message from '{}'" #~ msgstr "Nuevo diseño para el entry de mensaje" diff --git a/po/es.po~ b/po/es.po~ index 15685b2..9245b4e 100644 --- a/po/es.po~ +++ b/po/es.po~ @@ -6,8 +6,8 @@ msgid "" msgstr "" "Project-Id-Version: 1.0.0\n" -"Report-Msgid-Bugs-To: https://github.com/Jeffser/Alpaca\n" -"POT-Creation-Date: 2024-07-02 18:21-0600\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2024-07-07 16:04-0600\n" "PO-Revision-Date: 2024-05-19 19:44-0600\n" "Last-Translator: Jeffry Samuel Eduarte Rojas \n" "Language-Team: Spanish\n" @@ -70,7 +70,7 @@ msgstr "Reconocimiento de documentos de texto plano" msgid "Import and export chats" msgstr "Añadida la opcion de importar y exportar chats" -#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:853 +#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:860 msgid "Disclaimer" msgstr "Aviso Legal" @@ -719,7 +719,7 @@ msgstr "0.1.1" msgid "This is the first public version of Alpaca" msgstr "Esta es la primera versión publica de Alpaca" -#: src/window.py:58 src/window.py:1053 src/window.py:1115 src/window.ui:41 +#: src/window.py:58 src/window.py:1083 src/window.py:1145 src/window.ui:41 msgid "New Chat" msgstr "Nuevo Chat" @@ -819,73 +819,72 @@ msgstr "Chat exportado exitosamente" msgid "Chat imported successfully" msgstr "Chat importado exitosamente" -#: src/window.py:278 +#: src/window.py:280 msgid "Close" msgstr "Cerrar" -#: src/window.py:279 src/window.ui:806 +#: src/window.py:281 src/window.ui:813 msgid "Next" msgstr "Siguiente" -#: src/window.py:329 +#: src/window.py:332 #, fuzzy msgid "Pulling in the background..." msgstr "Ejecutar en el fondo" -#: src/window.py:381 +#: src/window.py:384 msgid "Stop Creating '{}'" msgstr "" -#: src/window.py:418 +#: src/window.py:421 #, fuzzy msgid "image" msgstr "Imagen" -#: src/window.py:588 +#: src/window.py:593 #, fuzzy msgid "Remove Message" msgstr "Remover Imagen" -#: src/window.py:593 src/window.py:841 +#: src/window.py:598 src/window.py:869 #, fuzzy msgid "Copy Message" msgstr "Enviar Mensaje" -#: src/window.py:598 +#: src/window.py:603 #, fuzzy msgid "Edit Message" msgstr "Enviar Mensaje" -#: src/window.py:729 +#: src/window.py:661 +msgid "Missing Image" +msgstr "" + +#: src/window.py:677 +msgid "Missing image" +msgstr "" + +#: src/window.py:757 msgid "Remove '{} ({})'" msgstr "" -#: src/window.py:882 -msgid "Message Received" -msgstr "" - -#: src/window.py:882 -#, fuzzy -msgid "New message from '{}'" -msgstr "Nuevo diseño para el entry de mensaje" - -#: src/window.py:939 +#: src/window.py:969 msgid "Task Complete" msgstr "Tarea completada" -#: src/window.py:939 +#: src/window.py:969 msgid "Model '{}' pulled successfully." msgstr "El modelo '{}' fue descargado exitosamente" -#: src/window.py:944 +#: src/window.py:974 msgid "Pull Model Error" msgstr "Error Descargando Modelo" -#: src/window.py:944 +#: src/window.py:974 msgid "Failed to pull model '{}' due to network error." msgstr "No se pudo descargar el modelo '{}' debido a un error de red" -#: src/window.py:978 +#: src/window.py:1008 msgid "Stop Pulling '{} ({})'" msgstr "" @@ -897,7 +896,7 @@ msgstr "Menu" msgid "Toggle Sidebar" msgstr "Alternar barra de lado" -#: src/window.ui:107 src/window.ui:595 +#: src/window.ui:107 src/window.ui:587 #, fuzzy msgid "Manage Models" msgstr "Gestionar modelos" @@ -912,29 +911,29 @@ msgstr "Menu" msgid "Attach File" msgstr "Adjuntar archivo" -#: src/window.ui:238 src/window.ui:1120 +#: src/window.ui:242 src/window.ui:1127 msgid "Send Message" msgstr "Enviar Mensaje" -#: src/window.ui:286 src/window.ui:965 src/window.ui:1079 +#: src/window.ui:290 src/window.ui:972 src/window.ui:1086 msgid "Preferences" msgstr "Preferencias" -#: src/window.ui:289 src/window.ui:1057 +#: src/window.ui:293 src/window.ui:1064 msgid "General" msgstr "General" -#: src/window.ui:297 +#: src/window.ui:299 #, fuzzy msgid "Use Remote Connection to Ollama" msgstr "Gestiona una conección remota a Ollama" -#: src/window.ui:303 +#: src/window.ui:305 #, fuzzy msgid "URL of Remote Instance" msgstr "URL de la instancia remota" -#: src/window.ui:310 +#: src/window.ui:312 msgid "Bearer Token (Optional)" msgstr "" @@ -943,17 +942,12 @@ msgstr "" msgid "Run Alpaca In Background" msgstr "Ejecutar en el fondo" -#: src/window.ui:331 -#, fuzzy -msgid "Model" -msgstr "Parar Modelo" - -#: src/window.ui:341 +#: src/window.ui:333 #, fuzzy msgid "Temperature" msgstr "Temperatura" -#: src/window.ui:342 +#: src/window.ui:334 msgid "" "The temperature of the model. Increasing the temperature will make the model " "answer more creatively. (Default: 0.8)" @@ -961,11 +955,11 @@ msgstr "" "La temperatura del modelo. Incrementando la temparatura hará que el modelo " "responda más creativamente (Por defecto: 0.8)" -#: src/window.ui:357 +#: src/window.ui:349 msgid "Seed" msgstr "Semilla" -#: src/window.ui:358 +#: src/window.ui:350 msgid "" "Sets the random number seed to use for generation. Setting this to a " "specific number will make the model generate the same text for the same " @@ -975,11 +969,11 @@ msgstr "" "numero especifico hará que el modelo genere el mismo texto a la misma " "pregunta del usuario (Por defecto: 0 (Al azar))" -#: src/window.ui:372 +#: src/window.ui:364 msgid "Keep Alive Time" msgstr "Tiempo Para Mantener Vivo" -#: src/window.ui:373 +#: src/window.ui:365 #, fuzzy msgid "" "Controls how long the model will stay loaded into memory following the " @@ -988,17 +982,17 @@ msgstr "" "Controla por cuanto tiempo el modelo permanecera cargado en la memoria " "despues de la ultima petición en minutos (Por defecto: 5)" -#: src/window.ui:389 +#: src/window.ui:381 #, fuzzy msgid "Ollama Instance" msgstr "Instancia de Ollama" -#: src/window.ui:393 +#: src/window.ui:385 #, fuzzy msgid "Ollama Overrides" msgstr "Overrides de Ollama" -#: src/window.ui:394 +#: src/window.ui:386 #, fuzzy msgid "" "Manage the arguments used on Ollama, any changes on this page only applies " @@ -1008,33 +1002,33 @@ msgstr "" "solo aplica a la instancia integrada, la instancia se reiniciará si haces " "algún cambio" -#: src/window.ui:477 +#: src/window.ui:469 msgid "Create" msgstr "Crear" -#: src/window.ui:490 src/window.ui:605 +#: src/window.ui:482 src/window.ui:597 #, fuzzy msgid "Create Model" msgstr "Crear modelo" -#: src/window.ui:516 +#: src/window.ui:508 msgid "Base" msgstr "Base" -#: src/window.ui:534 +#: src/window.ui:526 msgid "Name" msgstr "Nombre" -#: src/window.ui:540 +#: src/window.ui:532 msgid "Context" msgstr "Contexto" -#: src/window.ui:555 +#: src/window.ui:547 #, fuzzy msgid "Template" msgstr "Plantilla" -#: src/window.ui:561 +#: src/window.ui:553 msgid "" "Some models require a specific template. Please visit the model's website " "for more information if you're unsure." @@ -1042,40 +1036,46 @@ msgstr "" "Algunos modelos requieren de una plantilla especifica. Por favor visita el " "sitio web del modelo para más información en caso de que no estés seguro" -#: src/window.ui:612 +#: src/window.ui:604 #, fuzzy msgid "Search Model" msgstr "Modelos Destacados" -#: src/window.ui:672 +#: src/window.ui:664 msgid "No Models Found" msgstr "" -#: src/window.ui:673 +#: src/window.ui:665 msgid "Try a different search" msgstr "" -#: src/window.ui:738 +#: src/window.ui:708 +msgid "" +"By downloading this model you accept the license agreement available on the " +"model's website." +msgstr "" + +#: src/window.ui:745 msgid "Open with Default App" msgstr "" -#: src/window.ui:790 +#: src/window.ui:797 msgid "Previous" msgstr "Anterior" -#: src/window.ui:833 +#: src/window.ui:840 msgid "Welcome to Alpaca" msgstr "Bienvenido a Alpaca" -#: src/window.ui:834 +#: src/window.ui:841 msgid "Powered by Ollama" msgstr "Impulsado por Ollama" -#: src/window.ui:837 +#: src/window.ui:844 msgid "Ollama Website" msgstr "Sitio Web de Ollama" -#: src/window.ui:854 +#: src/window.ui:861 msgid "" "Alpaca and its developers are not liable for any damages to devices or " "software resulting from the execution of code generated by an AI model. " @@ -1086,12 +1086,12 @@ msgstr "" "un modelo de IA. Por favor sea precavido y revise el codigo cuidadosamente " "antes de correrlo" -#: src/window.ui:865 +#: src/window.ui:872 #, fuzzy msgid "Featured Models" msgstr "Modelos Destacados" -#: src/window.ui:866 +#: src/window.ui:873 msgid "" "Alpaca works locally on your device, to start chatting you'll need an AI " "model, you can either pull models from this list or the 'Manage Models' menu " @@ -1101,98 +1101,106 @@ msgstr "" "necesitas un modelo IA, puedes descargar modelos de esta lista o usando el " "menu 'Gestionar Modelos' despues" -#: src/window.ui:876 +#: src/window.ui:883 msgid "Built by Meta" msgstr "Construido por Meta" -#: src/window.ui:894 +#: src/window.ui:901 msgid "Built by Google DeepMind" msgstr "Construido por Google DeepMind" -#: src/window.ui:912 +#: src/window.ui:919 msgid "Built by Microsoft" msgstr "Construido por Microsoft" -#: src/window.ui:930 +#: src/window.ui:937 msgid "Multimodal AI with image recognition" msgstr "IA multimodal con reconocimiento de imagenes" -#: src/window.ui:959 +#: src/window.ui:966 #, fuzzy msgid "Import Chat" msgstr "Importar chat" -#: src/window.ui:969 +#: src/window.ui:976 msgid "Keyboard Shortcuts" msgstr "Atajos de Teclado" -#: src/window.ui:973 +#: src/window.ui:980 msgid "About Alpaca" msgstr "Sobre Alpaca" -#: src/window.ui:980 src/window.ui:999 +#: src/window.ui:987 src/window.ui:1006 msgid "Rename Chat" msgstr "Renombrar Chat" -#: src/window.ui:984 src/window.ui:1003 +#: src/window.ui:991 src/window.ui:1010 #, fuzzy msgid "Export Chat" msgstr "Importar chat" -#: src/window.ui:988 +#: src/window.ui:995 msgid "Clear Chat" msgstr "Limpiar Chat" -#: src/window.ui:995 +#: src/window.ui:1002 msgid "Delete Chat" msgstr "Eliminar Chat" -#: src/window.ui:1011 +#: src/window.ui:1018 #, fuzzy msgid "From Existing Model" msgstr "Usar modelo existente" -#: src/window.ui:1015 +#: src/window.ui:1022 #, fuzzy msgid "From GGUF File (Experimental)" msgstr "Usar archivo GGUF (Experimental)" -#: src/window.ui:1061 +#: src/window.ui:1068 msgid "Close application" msgstr "Cerrar aplicación" -#: src/window.ui:1067 +#: src/window.ui:1074 msgid "Import chat" msgstr "Importar chat" -#: src/window.ui:1073 +#: src/window.ui:1080 msgid "Clear chat" msgstr "Limpiar chat" -#: src/window.ui:1085 +#: src/window.ui:1092 msgid "New chat" msgstr "Nuevo chat" -#: src/window.ui:1091 +#: src/window.ui:1098 msgid "Show shortcuts window" msgstr "Mostrar ventana de atajos" -#: src/window.ui:1098 +#: src/window.ui:1105 msgid "Editor" msgstr "Editor" -#: src/window.ui:1102 +#: src/window.ui:1109 msgid "Copy" msgstr "Copiar" -#: src/window.ui:1108 +#: src/window.ui:1115 msgid "Paste" msgstr "Pegar" -#: src/window.ui:1114 +#: src/window.ui:1121 msgid "Insert new line" msgstr "Saltar línea" +#, fuzzy +#~ msgid "New message from '{}'" +#~ msgstr "Nuevo diseño para el entry de mensaje" + +#, fuzzy +#~ msgid "Model" +#~ msgstr "Parar Modelo" + #, fuzzy #~ msgid "Send message" #~ msgstr "Enviar Mensaje" diff --git a/po/fr.po b/po/fr.po index 4866eb5..c4aa7e4 100644 --- a/po/fr.po +++ b/po/fr.po @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: 1.0.0\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-07-07 16:04-0600\n" +"POT-Creation-Date: 2024-07-07 17:40-0600\n" "PO-Revision-Date: 2024-07-06 15:45+0200\n" "Last-Translator: Louis Chauvet-Villaret \n" "Language-Team: French\n" @@ -1152,6 +1152,511 @@ msgstr "Coller" msgid "Insert new line" msgstr "Ajouter une ligne" +#: src/available_models_descriptions.py:2 +msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B." +msgstr "" + +#: src/available_models_descriptions.py:3 +msgid "Meta Llama 3: The most capable openly available LLM to date" +msgstr "" + +#: src/available_models_descriptions.py:4 +msgid "Qwen2 is a new series of large language models from Alibaba group" +msgstr "" + +#: src/available_models_descriptions.py:5 +msgid "" +"An open-source Mixture-of-Experts code language model that achieves " +"performance comparable to GPT4-Turbo in code-specific tasks." +msgstr "" + +#: src/available_models_descriptions.py:6 +msgid "" +"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art " +"open models by Microsoft." +msgstr "" + +#: src/available_models_descriptions.py:7 +msgid "" +"Aya 23, released by Cohere, is a new family of state-of-the-art, " +"multilingual models that support 23 languages." +msgstr "" + +#: src/available_models_descriptions.py:8 +msgid "The 7B model released by Mistral AI, updated to version 0.3." +msgstr "" + +#: src/available_models_descriptions.py:9 +msgid "" +"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in " +"8x7b and 8x22b parameter sizes." +msgstr "" + +#: src/available_models_descriptions.py:10 +msgid "" +"CodeGemma is a collection of powerful, lightweight models that can perform a " +"variety of coding tasks like fill-in-the-middle code completion, code " +"generation, natural language understanding, mathematical reasoning, and " +"instruction following." +msgstr "" + +#: src/available_models_descriptions.py:11 +msgid "" +"Command R is a Large Language Model optimized for conversational interaction " +"and long context tasks." +msgstr "" + +#: src/available_models_descriptions.py:12 +msgid "" +"Command R+ is a powerful, scalable large language model purpose-built to " +"excel at real-world enterprise use cases." +msgstr "" + +#: src/available_models_descriptions.py:13 +msgid "" +"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines " +"a vision encoder and Vicuna for general-purpose visual and language " +"understanding. Updated to version 1.6." +msgstr "" + +#: src/available_models_descriptions.py:14 +msgid "" +"Gemma is a family of lightweight, state-of-the-art open models built by " +"Google DeepMind. Updated to version 1.1" +msgstr "" + +#: src/available_models_descriptions.py:15 +msgid "" +"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from " +"0.5B to 110B parameters" +msgstr "" + +#: src/available_models_descriptions.py:16 +msgid "" +"Llama 2 is a collection of foundation language models ranging from 7B to 70B " +"parameters." +msgstr "" + +#: src/available_models_descriptions.py:17 +msgid "" +"A large language model that can use text prompts to generate and discuss " +"code." +msgstr "" + +#: src/available_models_descriptions.py:18 +msgid "" +"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of " +"experts models that excels at coding tasks. Created by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:19 +msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope." +msgstr "" + +#: src/available_models_descriptions.py:20 +msgid "" +"DeepSeek Coder is a capable coding model trained on two trillion code and " +"natural language tokens." +msgstr "" + +#: src/available_models_descriptions.py:21 +msgid "" +"A high-performing open embedding model with a large token context window." +msgstr "" + +#: src/available_models_descriptions.py:22 +msgid "" +"Phi-2: a 2.7B language model by Microsoft Research that demonstrates " +"outstanding reasoning and language understanding capabilities." +msgstr "" + +#: src/available_models_descriptions.py:23 +msgid "" +"The uncensored Dolphin model based on Mistral that excels at coding tasks. " +"Updated to version 2.8." +msgstr "" + +#: src/available_models_descriptions.py:24 +msgid "" +"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the " +"Mistral 7B model using the OpenOrca dataset." +msgstr "" + +#: src/available_models_descriptions.py:25 +msgid "" +"A general-purpose model ranging from 3 billion parameters to 70 billion, " +"suitable for entry-level hardware." +msgstr "" + +#: src/available_models_descriptions.py:26 +msgid "State-of-the-art large embedding model from mixedbread.ai" +msgstr "" + +#: src/available_models_descriptions.py:27 +msgid "" +"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on " +"Llama 3 that has a variety of instruction, conversational, and coding skills." +msgstr "" + +#: src/available_models_descriptions.py:28 +msgid "" +"StarCoder2 is the next generation of transparently trained open code LLMs " +"that comes in three sizes: 3B, 7B and 15B parameters." +msgstr "" + +#: src/available_models_descriptions.py:29 +msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability." +msgstr "" + +#: src/available_models_descriptions.py:30 +msgid "" +"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models " +"that are trained to act as helpful assistants." +msgstr "" + +#: src/available_models_descriptions.py:31 +msgid "Yi 1.5 is a high-performing, bilingual language model." +msgstr "" + +#: src/available_models_descriptions.py:32 +msgid "" +"The powerful family of models by Nous Research that excels at scientific " +"discussion and coding tasks." +msgstr "" + +#: src/available_models_descriptions.py:33 +msgid "" +"General use chat model based on Llama and Llama 2 with 2K to 16K context " +"sizes." +msgstr "" + +#: src/available_models_descriptions.py:34 +msgid "" +"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on " +"Llama 2 uncensored by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:35 +msgid "" +"The TinyLlama project is an open endeavor to train a compact 1.1B Llama " +"model on 3 trillion tokens." +msgstr "" + +#: src/available_models_descriptions.py:36 +msgid "" +"State of the art large language model from Microsoft AI with improved " +"performance on complex chat, multilingual, reasoning and agent use cases." +msgstr "" + +#: src/available_models_descriptions.py:37 +msgid "" +"StarCoder is a code generation model trained on 80+ programming languages." +msgstr "" + +#: src/available_models_descriptions.py:38 +msgid "" +"Codestral is Mistral AI’s first-ever code model designed for code generation " +"tasks." +msgstr "" + +#: src/available_models_descriptions.py:39 +msgid "" +"A family of open-source models trained on a wide variety of data, surpassing " +"ChatGPT on various benchmarks. Updated to version 3.5-0106." +msgstr "" + +#: src/available_models_descriptions.py:40 +msgid "" +"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset " +"by Eric Hartford and based on TinyLlama." +msgstr "" + +#: src/available_models_descriptions.py:41 +msgid "" +"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully " +"open datasets." +msgstr "" + +#: src/available_models_descriptions.py:42 +msgid "State-of-the-art code generation model" +msgstr "" + +#: src/available_models_descriptions.py:43 +msgid "" +"Stable Code 3B is a coding model with instruct and code completion variants " +"on par with models such as Code Llama 7B that are 2.5x larger." +msgstr "" + +#: src/available_models_descriptions.py:44 +msgid "" +"A fine-tuned model based on Mistral with good coverage of domain and " +"language." +msgstr "" + +#: src/available_models_descriptions.py:45 +msgid "Model focused on math and logic problems" +msgstr "" + +#: src/available_models_descriptions.py:46 +msgid "" +"CodeQwen1.5 is a large language model pretrained on a large amount of code " +"data." +msgstr "" + +#: src/available_models_descriptions.py:47 +msgid "Code generation model based on Code Llama." +msgstr "" + +#: src/available_models_descriptions.py:48 +msgid "" +"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model " +"trained on multilingual data in English, Spanish, German, Italian, French, " +"Portuguese, and Dutch." +msgstr "" + +#: src/available_models_descriptions.py:49 +msgid "" +"A 7B and 15B uncensored variant of the Dolphin model family that excels at " +"coding, based on StarCoder2." +msgstr "" + +#: src/available_models_descriptions.py:50 +msgid "Embedding models on very large sentence level datasets." +msgstr "" + +#: src/available_models_descriptions.py:51 +msgid "General use models based on Llama and Llama 2 from Nous Research." +msgstr "" + +#: src/available_models_descriptions.py:52 +msgid "" +"Starling is a large language model trained by reinforcement learning from AI " +"feedback focused on improving chatbot helpfulness." +msgstr "" + +#: src/available_models_descriptions.py:53 +msgid "" +"SQLCoder is a code completion model fined-tuned on StarCoder for SQL " +"generation tasks" +msgstr "" + +#: src/available_models_descriptions.py:54 +msgid "" +"Orca 2 is built by Microsoft research, and are a fine-tuned version of " +"Meta's Llama 2 models. The model is designed to excel particularly in " +"reasoning." +msgstr "" + +#: src/available_models_descriptions.py:55 +msgid "" +"This model extends LLama-3 8B's context length from 8k to over 1m tokens." +msgstr "" + +#: src/available_models_descriptions.py:56 +msgid "An advanced language model crafted with 2 trillion bilingual tokens." +msgstr "" + +#: src/available_models_descriptions.py:57 +msgid "An extension of Llama 2 that supports a context of up to 128k tokens." +msgstr "" + +#: src/available_models_descriptions.py:58 +msgid "" +"A model from NVIDIA based on Llama 3 that excels at conversational question " +"answering (QA) and retrieval-augmented generation (RAG)." +msgstr "" + +#: src/available_models_descriptions.py:59 +msgid "" +"A compact, yet powerful 10.7B large language model designed for single-turn " +"conversation." +msgstr "" + +#: src/available_models_descriptions.py:60 +msgid "" +"Conversational model based on Llama 2 that performs competitively on various " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:61 +msgid "A family of open foundation models by IBM for Code Intelligence" +msgstr "" + +#: src/available_models_descriptions.py:62 +msgid "" +"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language " +"model by Microsoft Research." +msgstr "" + +#: src/available_models_descriptions.py:63 +msgid "General use model based on Llama 2." +msgstr "" + +#: src/available_models_descriptions.py:64 +msgid "" +"A companion assistant trained in philosophy, psychology, and personal " +"relationships. Based on Mistral." +msgstr "" + +#: src/available_models_descriptions.py:65 +msgid "" +"Llama 2 based model fine tuned on an Orca-style dataset. Originally called " +"Free Willy." +msgstr "" + +#: src/available_models_descriptions.py:66 +msgid "" +"BakLLaVA is a multimodal model consisting of the Mistral 7B base model " +"augmented with the LLaVA architecture." +msgstr "" + +#: src/available_models_descriptions.py:67 +msgid "" +"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:68 +msgid "Uncensored version of Wizard LM model" +msgstr "" + +#: src/available_models_descriptions.py:69 +msgid "" +"Fine-tuned Llama 2 model to answer medical questions based on an open source " +"medical dataset." +msgstr "" + +#: src/available_models_descriptions.py:70 +msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral." +msgstr "" + +#: src/available_models_descriptions.py:71 +msgid "An extension of Mistral to support context windows of 64K or 128K." +msgstr "" + +#: src/available_models_descriptions.py:72 +msgid "" +"A suite of text embedding models by Snowflake, optimized for performance." +msgstr "" + +#: src/available_models_descriptions.py:73 +msgid "" +"An expansion of Llama 2 that specializes in integrating both general " +"language understanding and domain-specific knowledge, particularly in " +"programming and mathematics." +msgstr "" + +#: src/available_models_descriptions.py:74 +msgid "Great code generation model based on Llama2." +msgstr "" + +#: src/available_models_descriptions.py:75 +msgid "" +"Open-source medical large language model adapted from Llama 2 to the medical " +"domain." +msgstr "" + +#: src/available_models_descriptions.py:76 +msgid "" +"moondream2 is a small vision language model designed to run efficiently on " +"edge devices." +msgstr "" + +#: src/available_models_descriptions.py:77 +msgid "Uncensored Llama2 based model with support for a 16K context window." +msgstr "" + +#: src/available_models_descriptions.py:78 +msgid "" +"Nexus Raven is a 13B instruction tuned model for function calling tasks." +msgstr "" + +#: src/available_models_descriptions.py:79 +msgid "" +"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic " +"instruction data using OSS-Instruct, a novel approach to enlightening LLMs " +"with open-source code snippets." +msgstr "" + +#: src/available_models_descriptions.py:80 +msgid "A strong, economical, and efficient Mixture-of-Experts language model." +msgstr "" + +#: src/available_models_descriptions.py:81 +msgid "" +"A lightweight chat model allowing accurate, and responsive output without " +"requiring high-end hardware." +msgstr "" + +#: src/available_models_descriptions.py:82 +msgid "" +"A high-performing code instruct model created by merging two existing code " +"models." +msgstr "" + +#: src/available_models_descriptions.py:83 +msgid "A new small LLaVA model fine-tuned from Phi 3 Mini." +msgstr "" + +#: src/available_models_descriptions.py:84 +msgid "" +"MistralLite is a fine-tuned model based on Mistral with enhanced " +"capabilities of processing long contexts." +msgstr "" + +#: src/available_models_descriptions.py:85 +msgid "" +"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by " +"MelodysDreamj." +msgstr "" + +#: src/available_models_descriptions.py:86 +msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station." +msgstr "" + +#: src/available_models_descriptions.py:87 +msgid "" +"A language model created by combining two fine-tuned Llama 2 70B models into " +"one." +msgstr "" + +#: src/available_models_descriptions.py:88 +msgid "" +"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by " +"interleaving the model with itself." +msgstr "" + +#: src/available_models_descriptions.py:89 +msgid "" +"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. " +"Designed for chat and code generation." +msgstr "" + +#: src/available_models_descriptions.py:90 +msgid "" +"A top-performing mixture of experts model, fine-tuned with high-quality data." +msgstr "" + +#: src/available_models_descriptions.py:91 +msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr." +msgstr "" + +#: src/available_models_descriptions.py:92 +msgid "DBRX is an open, general-purpose LLM created by Databricks." +msgstr "" + +#: src/available_models_descriptions.py:93 +msgid "" +"Falcon2 is an 11B parameters causal decoder-only model built by TII and " +"trained over 5T tokens." +msgstr "" + +#: src/available_models_descriptions.py:94 +msgid "" +"A robust conversational model designed to be used for both chat and instruct " +"use cases." +msgstr "" + #~ msgid "Message Received" #~ msgstr "Message reçu" diff --git a/po/fr.po~ b/po/fr.po~ index ad7a6b4..4866eb5 100644 --- a/po/fr.po~ +++ b/po/fr.po~ @@ -6,8 +6,8 @@ msgid "" msgstr "" "Project-Id-Version: 1.0.0\n" -"Report-Msgid-Bugs-To: https://github.com/Jeffser/Alpaca\n" -"POT-Creation-Date: 2024-07-02 18:21-0600\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2024-07-07 16:04-0600\n" "PO-Revision-Date: 2024-07-06 15:45+0200\n" "Last-Translator: Louis Chauvet-Villaret \n" "Language-Team: French\n" @@ -66,7 +66,7 @@ msgstr "Reconnaissance de documents en texte brut" msgid "Import and export chats" msgstr "Importez et exportez les discussions" -#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:853 +#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:860 msgid "Disclaimer" msgstr "Avertissement" @@ -703,7 +703,7 @@ msgstr "Version stable" msgid "This is the first public version of Alpaca" msgstr "Première version publique d'Alpaca" -#: src/window.py:58 src/window.py:1053 src/window.py:1115 src/window.ui:41 +#: src/window.py:58 src/window.py:1083 src/window.py:1145 src/window.ui:41 msgid "New Chat" msgstr "Nouvelle discussion" @@ -798,68 +798,68 @@ msgstr "Discussion exportée avec succès" msgid "Chat imported successfully" msgstr "Discussion importée avec succès" -#: src/window.py:278 +#: src/window.py:280 msgid "Close" msgstr "Fermer" -#: src/window.py:279 src/window.ui:806 +#: src/window.py:281 src/window.ui:813 msgid "Next" msgstr "Suivant" -#: src/window.py:329 +#: src/window.py:332 msgid "Pulling in the background..." msgstr "Téléchargement en arrière-plan..." -#: src/window.py:381 +#: src/window.py:384 msgid "Stop Creating '{}'" msgstr "Arrêter de créer '{}'" -#: src/window.py:418 +#: src/window.py:421 msgid "image" msgstr "Image" -#: src/window.py:588 +#: src/window.py:593 msgid "Remove Message" msgstr "Supprimer le message" -#: src/window.py:593 src/window.py:841 +#: src/window.py:598 src/window.py:869 msgid "Copy Message" msgstr "Copier le message" -#: src/window.py:598 +#: src/window.py:603 msgid "Edit Message" msgstr "Modifier le message" -#: src/window.py:729 +#: src/window.py:661 +msgid "Missing Image" +msgstr "" + +#: src/window.py:677 +msgid "Missing image" +msgstr "" + +#: src/window.py:757 msgid "Remove '{} ({})'" msgstr "Supprimer '{} ({})'" -#: src/window.py:882 -msgid "Message Received" -msgstr "Message reçu" - -#: src/window.py:882 -msgid "New message from '{}'" -msgstr "Nouveau message depuis '{}'" - -#: src/window.py:939 +#: src/window.py:969 msgid "Task Complete" msgstr "Tâche terminée" -#: src/window.py:939 +#: src/window.py:969 msgid "Model '{}' pulled successfully." msgstr "Modèle '{}' téléchargé avec succès." -#: src/window.py:944 +#: src/window.py:974 msgid "Pull Model Error" msgstr "Erreur de téléchargement du modèle" -#: src/window.py:944 +#: src/window.py:974 msgid "Failed to pull model '{}' due to network error." msgstr "" "Échec du téléchargement du modèle '{}' à cause d'une erreur de connexion" -#: src/window.py:978 +#: src/window.py:1008 msgid "Stop Pulling '{} ({})'" msgstr "Arrêter de télécharger '{} ({})'" @@ -871,7 +871,7 @@ msgstr "Menu" msgid "Toggle Sidebar" msgstr "Basculer la barre latérale" -#: src/window.ui:107 src/window.ui:595 +#: src/window.ui:107 src/window.ui:587 msgid "Manage Models" msgstr "Gérer les modèles" @@ -883,29 +883,29 @@ msgstr "Menu des discussions" msgid "Attach File" msgstr "Ajouter un fichier" -#: src/window.ui:238 src/window.ui:1120 +#: src/window.ui:242 src/window.ui:1127 msgid "Send Message" msgstr "Envoyer le message" -#: src/window.ui:286 src/window.ui:965 src/window.ui:1079 +#: src/window.ui:290 src/window.ui:972 src/window.ui:1086 msgid "Preferences" msgstr "Préférences" -#: src/window.ui:289 src/window.ui:1057 +#: src/window.ui:293 src/window.ui:1064 msgid "General" msgstr "Général" -#: src/window.ui:297 +#: src/window.ui:299 msgid "Use Remote Connection to Ollama" msgstr "Utiliser une connexion à distance d'Ollama" -#: src/window.ui:303 +#: src/window.ui:305 msgid "URL of Remote Instance" msgstr "URL de la connexion distante" # I don't really know how to translate "Bearer Token" # I search for it but they don't translate it -#: src/window.ui:310 +#: src/window.ui:312 msgid "Bearer Token (Optional)" msgstr "Bearer Token (Optionnel)" @@ -913,15 +913,11 @@ msgstr "Bearer Token (Optionnel)" msgid "Run Alpaca In Background" msgstr "Exécuter Alpaca en arrière-plan" -#: src/window.ui:331 -msgid "Model" -msgstr "Modèle" - -#: src/window.ui:341 +#: src/window.ui:333 msgid "Temperature" msgstr "Température" -#: src/window.ui:342 +#: src/window.ui:334 msgid "" "The temperature of the model. Increasing the temperature will make the model " "answer more creatively. (Default: 0.8)" @@ -929,11 +925,11 @@ msgstr "" "La température du modèle. Augmenter la température engendrera des réponses " "plus créatives. (défaut : 0.8)" -#: src/window.ui:357 +#: src/window.ui:349 msgid "Seed" msgstr "Graine" -#: src/window.ui:358 +#: src/window.ui:350 msgid "" "Sets the random number seed to use for generation. Setting this to a " "specific number will make the model generate the same text for the same " @@ -943,11 +939,11 @@ msgstr "" "spécifique induira une même réponse pour un même prompt. (défaut : 0 " "(aléatoire))" -#: src/window.ui:372 +#: src/window.ui:364 msgid "Keep Alive Time" msgstr "Temps en mémoire" -#: src/window.ui:373 +#: src/window.ui:365 msgid "" "Controls how long the model will stay loaded into memory following the " "request in minutes (Default: 5)" @@ -955,16 +951,16 @@ msgstr "" "Gérer la durée en minutes durant laquelle le modèle reste chargé en mémoire " "(défaut : 5)" -#: src/window.ui:389 +#: src/window.ui:381 msgid "Ollama Instance" msgstr "Instance d'Ollama" # What is override ??? -#: src/window.ui:393 +#: src/window.ui:385 msgid "Ollama Overrides" msgstr "Paramètres d'Ollama" -#: src/window.ui:394 +#: src/window.ui:386 msgid "" "Manage the arguments used on Ollama, any changes on this page only applies " "to the integrated instance, the instance will restart if you make changes." @@ -973,31 +969,31 @@ msgstr "" "s'appliquent seulement à l'instance intégré. L'instance va redémarrer si " "vous effectuez des changements." -#: src/window.ui:477 +#: src/window.ui:469 msgid "Create" msgstr "Créer" -#: src/window.ui:490 src/window.ui:605 +#: src/window.ui:482 src/window.ui:597 msgid "Create Model" msgstr "Créer un modèle" -#: src/window.ui:516 +#: src/window.ui:508 msgid "Base" msgstr "Base" -#: src/window.ui:534 +#: src/window.ui:526 msgid "Name" msgstr "Nom" -#: src/window.ui:540 +#: src/window.ui:532 msgid "Context" msgstr "Contexte" -#: src/window.ui:555 +#: src/window.ui:547 msgid "Template" msgstr "Template" -#: src/window.ui:561 +#: src/window.ui:553 msgid "" "Some models require a specific template. Please visit the model's website " "for more information if you're unsure." @@ -1005,39 +1001,45 @@ msgstr "" "Certains modèles requièrent un format spécifique. Merci de visiter le site " "du modèle pour plus d'information." -#: src/window.ui:612 +#: src/window.ui:604 msgid "Search Model" msgstr "Chercher un modèle" -#: src/window.ui:672 +#: src/window.ui:664 msgid "No Models Found" msgstr "Aucun modèle trouvé" -#: src/window.ui:673 +#: src/window.ui:665 msgid "Try a different search" msgstr "Essayez une recherche différente" -#: src/window.ui:738 +#: src/window.ui:708 +msgid "" +"By downloading this model you accept the license agreement available on the " +"model's website." +msgstr "" + +#: src/window.ui:745 msgid "Open with Default App" msgstr "Ouvrir avec l'application par défaut" -#: src/window.ui:790 +#: src/window.ui:797 msgid "Previous" msgstr "Précédent" -#: src/window.ui:833 +#: src/window.ui:840 msgid "Welcome to Alpaca" msgstr "Bienvenue sur Alpaca" -#: src/window.ui:834 +#: src/window.ui:841 msgid "Powered by Ollama" msgstr "Fonctionne grâce à Ollama" -#: src/window.ui:837 +#: src/window.ui:844 msgid "Ollama Website" msgstr "Site web d'Ollama" -#: src/window.ui:854 +#: src/window.ui:861 msgid "" "Alpaca and its developers are not liable for any damages to devices or " "software resulting from the execution of code generated by an AI model. " @@ -1048,11 +1050,11 @@ msgstr "" "modèle. Merci de faire attention et de relire attentivement le code avant de " "l'exécuter." -#: src/window.ui:865 +#: src/window.ui:872 msgid "Featured Models" msgstr "Modèles recommandés" -#: src/window.ui:866 +#: src/window.ui:873 msgid "" "Alpaca works locally on your device, to start chatting you'll need an AI " "model, you can either pull models from this list or the 'Manage Models' menu " @@ -1062,94 +1064,103 @@ msgstr "" "vous aurez besoin d'un modèle d'IA, vous pouvez télécharger un modèle soit " "depuis cette liste soit depuis le menu 'Gérer les modèles' plus tard." -#: src/window.ui:876 +#: src/window.ui:883 msgid "Built by Meta" msgstr "Développé par Meta" -#: src/window.ui:894 +#: src/window.ui:901 msgid "Built by Google DeepMind" msgstr "Développé par Google DeepMind" -#: src/window.ui:912 +#: src/window.ui:919 msgid "Built by Microsoft" msgstr "Développé par Microsoft" -#: src/window.ui:930 +#: src/window.ui:937 msgid "Multimodal AI with image recognition" msgstr "IA multimodale avec reconnaissance d'image" -#: src/window.ui:959 +#: src/window.ui:966 msgid "Import Chat" msgstr "Importer une discussion" -#: src/window.ui:969 +#: src/window.ui:976 msgid "Keyboard Shortcuts" msgstr "Raccourcis claviers" -#: src/window.ui:973 +#: src/window.ui:980 msgid "About Alpaca" msgstr "À propos d'Alpaca" -#: src/window.ui:980 src/window.ui:999 +#: src/window.ui:987 src/window.ui:1006 msgid "Rename Chat" msgstr "Renommer la discussion" -#: src/window.ui:984 src/window.ui:1003 +#: src/window.ui:991 src/window.ui:1010 msgid "Export Chat" msgstr "Exporter la discussion" -#: src/window.ui:988 +#: src/window.ui:995 msgid "Clear Chat" msgstr "Supprimer la discussion" -#: src/window.ui:995 +#: src/window.ui:1002 msgid "Delete Chat" msgstr "Supprimer la discussion" -#: src/window.ui:1011 +#: src/window.ui:1018 msgid "From Existing Model" msgstr "Depuis un modèle existant" -#: src/window.ui:1015 +#: src/window.ui:1022 msgid "From GGUF File (Experimental)" msgstr "Depuis un fichier GGUF (Experimental)" -#: src/window.ui:1061 +#: src/window.ui:1068 msgid "Close application" msgstr "Fermer l'application" -#: src/window.ui:1067 +#: src/window.ui:1074 msgid "Import chat" msgstr "Importer une discussion" -#: src/window.ui:1073 +#: src/window.ui:1080 msgid "Clear chat" msgstr "Supprimer la discussion" -#: src/window.ui:1085 +#: src/window.ui:1092 msgid "New chat" msgstr "Nouvelle discussion" -#: src/window.ui:1091 +#: src/window.ui:1098 msgid "Show shortcuts window" msgstr "Voir les raccourcis clavier" -#: src/window.ui:1098 +#: src/window.ui:1105 msgid "Editor" msgstr "Éditeur" -#: src/window.ui:1102 +#: src/window.ui:1109 msgid "Copy" msgstr "Copier" -#: src/window.ui:1108 +#: src/window.ui:1115 msgid "Paste" msgstr "Coller" -#: src/window.ui:1114 +#: src/window.ui:1121 msgid "Insert new line" msgstr "Ajouter une ligne" +#~ msgid "Message Received" +#~ msgstr "Message reçu" + +#~ msgid "New message from '{}'" +#~ msgstr "Nouveau message depuis '{}'" + +#~ msgid "Model" +#~ msgstr "Modèle" + #~ msgid "Send message" #~ msgstr "Envoyer le message" diff --git a/po/nb_NO.po b/po/nb_NO.po index f9faae3..8e9e6f2 100644 --- a/po/nb_NO.po +++ b/po/nb_NO.po @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: 1.0.0\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-07-07 16:06-0600\n" +"POT-Creation-Date: 2024-07-07 17:40-0600\n" "PO-Revision-Date: 2024-07-02 18:24-0600\n" "Last-Translator: Niklas Opsahl Halvorsen\n" "Language-Team: Norwegian\n" @@ -1104,6 +1104,511 @@ msgstr "Lim inn" msgid "Insert new line" msgstr "Sett inn ny linje" +#: src/available_models_descriptions.py:2 +msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B." +msgstr "" + +#: src/available_models_descriptions.py:3 +msgid "Meta Llama 3: The most capable openly available LLM to date" +msgstr "" + +#: src/available_models_descriptions.py:4 +msgid "Qwen2 is a new series of large language models from Alibaba group" +msgstr "" + +#: src/available_models_descriptions.py:5 +msgid "" +"An open-source Mixture-of-Experts code language model that achieves " +"performance comparable to GPT4-Turbo in code-specific tasks." +msgstr "" + +#: src/available_models_descriptions.py:6 +msgid "" +"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art " +"open models by Microsoft." +msgstr "" + +#: src/available_models_descriptions.py:7 +msgid "" +"Aya 23, released by Cohere, is a new family of state-of-the-art, " +"multilingual models that support 23 languages." +msgstr "" + +#: src/available_models_descriptions.py:8 +msgid "The 7B model released by Mistral AI, updated to version 0.3." +msgstr "" + +#: src/available_models_descriptions.py:9 +msgid "" +"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in " +"8x7b and 8x22b parameter sizes." +msgstr "" + +#: src/available_models_descriptions.py:10 +msgid "" +"CodeGemma is a collection of powerful, lightweight models that can perform a " +"variety of coding tasks like fill-in-the-middle code completion, code " +"generation, natural language understanding, mathematical reasoning, and " +"instruction following." +msgstr "" + +#: src/available_models_descriptions.py:11 +msgid "" +"Command R is a Large Language Model optimized for conversational interaction " +"and long context tasks." +msgstr "" + +#: src/available_models_descriptions.py:12 +msgid "" +"Command R+ is a powerful, scalable large language model purpose-built to " +"excel at real-world enterprise use cases." +msgstr "" + +#: src/available_models_descriptions.py:13 +msgid "" +"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines " +"a vision encoder and Vicuna for general-purpose visual and language " +"understanding. Updated to version 1.6." +msgstr "" + +#: src/available_models_descriptions.py:14 +msgid "" +"Gemma is a family of lightweight, state-of-the-art open models built by " +"Google DeepMind. Updated to version 1.1" +msgstr "" + +#: src/available_models_descriptions.py:15 +msgid "" +"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from " +"0.5B to 110B parameters" +msgstr "" + +#: src/available_models_descriptions.py:16 +msgid "" +"Llama 2 is a collection of foundation language models ranging from 7B to 70B " +"parameters." +msgstr "" + +#: src/available_models_descriptions.py:17 +msgid "" +"A large language model that can use text prompts to generate and discuss " +"code." +msgstr "" + +#: src/available_models_descriptions.py:18 +msgid "" +"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of " +"experts models that excels at coding tasks. Created by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:19 +msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope." +msgstr "" + +#: src/available_models_descriptions.py:20 +msgid "" +"DeepSeek Coder is a capable coding model trained on two trillion code and " +"natural language tokens." +msgstr "" + +#: src/available_models_descriptions.py:21 +msgid "" +"A high-performing open embedding model with a large token context window." +msgstr "" + +#: src/available_models_descriptions.py:22 +msgid "" +"Phi-2: a 2.7B language model by Microsoft Research that demonstrates " +"outstanding reasoning and language understanding capabilities." +msgstr "" + +#: src/available_models_descriptions.py:23 +msgid "" +"The uncensored Dolphin model based on Mistral that excels at coding tasks. " +"Updated to version 2.8." +msgstr "" + +#: src/available_models_descriptions.py:24 +msgid "" +"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the " +"Mistral 7B model using the OpenOrca dataset." +msgstr "" + +#: src/available_models_descriptions.py:25 +msgid "" +"A general-purpose model ranging from 3 billion parameters to 70 billion, " +"suitable for entry-level hardware." +msgstr "" + +#: src/available_models_descriptions.py:26 +msgid "State-of-the-art large embedding model from mixedbread.ai" +msgstr "" + +#: src/available_models_descriptions.py:27 +msgid "" +"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on " +"Llama 3 that has a variety of instruction, conversational, and coding skills." +msgstr "" + +#: src/available_models_descriptions.py:28 +msgid "" +"StarCoder2 is the next generation of transparently trained open code LLMs " +"that comes in three sizes: 3B, 7B and 15B parameters." +msgstr "" + +#: src/available_models_descriptions.py:29 +msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability." +msgstr "" + +#: src/available_models_descriptions.py:30 +msgid "" +"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models " +"that are trained to act as helpful assistants." +msgstr "" + +#: src/available_models_descriptions.py:31 +msgid "Yi 1.5 is a high-performing, bilingual language model." +msgstr "" + +#: src/available_models_descriptions.py:32 +msgid "" +"The powerful family of models by Nous Research that excels at scientific " +"discussion and coding tasks." +msgstr "" + +#: src/available_models_descriptions.py:33 +msgid "" +"General use chat model based on Llama and Llama 2 with 2K to 16K context " +"sizes." +msgstr "" + +#: src/available_models_descriptions.py:34 +msgid "" +"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on " +"Llama 2 uncensored by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:35 +msgid "" +"The TinyLlama project is an open endeavor to train a compact 1.1B Llama " +"model on 3 trillion tokens." +msgstr "" + +#: src/available_models_descriptions.py:36 +msgid "" +"State of the art large language model from Microsoft AI with improved " +"performance on complex chat, multilingual, reasoning and agent use cases." +msgstr "" + +#: src/available_models_descriptions.py:37 +msgid "" +"StarCoder is a code generation model trained on 80+ programming languages." +msgstr "" + +#: src/available_models_descriptions.py:38 +msgid "" +"Codestral is Mistral AI’s first-ever code model designed for code generation " +"tasks." +msgstr "" + +#: src/available_models_descriptions.py:39 +msgid "" +"A family of open-source models trained on a wide variety of data, surpassing " +"ChatGPT on various benchmarks. Updated to version 3.5-0106." +msgstr "" + +#: src/available_models_descriptions.py:40 +msgid "" +"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset " +"by Eric Hartford and based on TinyLlama." +msgstr "" + +#: src/available_models_descriptions.py:41 +msgid "" +"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully " +"open datasets." +msgstr "" + +#: src/available_models_descriptions.py:42 +msgid "State-of-the-art code generation model" +msgstr "" + +#: src/available_models_descriptions.py:43 +msgid "" +"Stable Code 3B is a coding model with instruct and code completion variants " +"on par with models such as Code Llama 7B that are 2.5x larger." +msgstr "" + +#: src/available_models_descriptions.py:44 +msgid "" +"A fine-tuned model based on Mistral with good coverage of domain and " +"language." +msgstr "" + +#: src/available_models_descriptions.py:45 +msgid "Model focused on math and logic problems" +msgstr "" + +#: src/available_models_descriptions.py:46 +msgid "" +"CodeQwen1.5 is a large language model pretrained on a large amount of code " +"data." +msgstr "" + +#: src/available_models_descriptions.py:47 +msgid "Code generation model based on Code Llama." +msgstr "" + +#: src/available_models_descriptions.py:48 +msgid "" +"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model " +"trained on multilingual data in English, Spanish, German, Italian, French, " +"Portuguese, and Dutch." +msgstr "" + +#: src/available_models_descriptions.py:49 +msgid "" +"A 7B and 15B uncensored variant of the Dolphin model family that excels at " +"coding, based on StarCoder2." +msgstr "" + +#: src/available_models_descriptions.py:50 +msgid "Embedding models on very large sentence level datasets." +msgstr "" + +#: src/available_models_descriptions.py:51 +msgid "General use models based on Llama and Llama 2 from Nous Research." +msgstr "" + +#: src/available_models_descriptions.py:52 +msgid "" +"Starling is a large language model trained by reinforcement learning from AI " +"feedback focused on improving chatbot helpfulness." +msgstr "" + +#: src/available_models_descriptions.py:53 +msgid "" +"SQLCoder is a code completion model fined-tuned on StarCoder for SQL " +"generation tasks" +msgstr "" + +#: src/available_models_descriptions.py:54 +msgid "" +"Orca 2 is built by Microsoft research, and are a fine-tuned version of " +"Meta's Llama 2 models. The model is designed to excel particularly in " +"reasoning." +msgstr "" + +#: src/available_models_descriptions.py:55 +msgid "" +"This model extends LLama-3 8B's context length from 8k to over 1m tokens." +msgstr "" + +#: src/available_models_descriptions.py:56 +msgid "An advanced language model crafted with 2 trillion bilingual tokens." +msgstr "" + +#: src/available_models_descriptions.py:57 +msgid "An extension of Llama 2 that supports a context of up to 128k tokens." +msgstr "" + +#: src/available_models_descriptions.py:58 +msgid "" +"A model from NVIDIA based on Llama 3 that excels at conversational question " +"answering (QA) and retrieval-augmented generation (RAG)." +msgstr "" + +#: src/available_models_descriptions.py:59 +msgid "" +"A compact, yet powerful 10.7B large language model designed for single-turn " +"conversation." +msgstr "" + +#: src/available_models_descriptions.py:60 +msgid "" +"Conversational model based on Llama 2 that performs competitively on various " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:61 +msgid "A family of open foundation models by IBM for Code Intelligence" +msgstr "" + +#: src/available_models_descriptions.py:62 +msgid "" +"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language " +"model by Microsoft Research." +msgstr "" + +#: src/available_models_descriptions.py:63 +msgid "General use model based on Llama 2." +msgstr "" + +#: src/available_models_descriptions.py:64 +msgid "" +"A companion assistant trained in philosophy, psychology, and personal " +"relationships. Based on Mistral." +msgstr "" + +#: src/available_models_descriptions.py:65 +msgid "" +"Llama 2 based model fine tuned on an Orca-style dataset. Originally called " +"Free Willy." +msgstr "" + +#: src/available_models_descriptions.py:66 +msgid "" +"BakLLaVA is a multimodal model consisting of the Mistral 7B base model " +"augmented with the LLaVA architecture." +msgstr "" + +#: src/available_models_descriptions.py:67 +msgid "" +"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:68 +msgid "Uncensored version of Wizard LM model" +msgstr "" + +#: src/available_models_descriptions.py:69 +msgid "" +"Fine-tuned Llama 2 model to answer medical questions based on an open source " +"medical dataset." +msgstr "" + +#: src/available_models_descriptions.py:70 +msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral." +msgstr "" + +#: src/available_models_descriptions.py:71 +msgid "An extension of Mistral to support context windows of 64K or 128K." +msgstr "" + +#: src/available_models_descriptions.py:72 +msgid "" +"A suite of text embedding models by Snowflake, optimized for performance." +msgstr "" + +#: src/available_models_descriptions.py:73 +msgid "" +"An expansion of Llama 2 that specializes in integrating both general " +"language understanding and domain-specific knowledge, particularly in " +"programming and mathematics." +msgstr "" + +#: src/available_models_descriptions.py:74 +msgid "Great code generation model based on Llama2." +msgstr "" + +#: src/available_models_descriptions.py:75 +msgid "" +"Open-source medical large language model adapted from Llama 2 to the medical " +"domain." +msgstr "" + +#: src/available_models_descriptions.py:76 +msgid "" +"moondream2 is a small vision language model designed to run efficiently on " +"edge devices." +msgstr "" + +#: src/available_models_descriptions.py:77 +msgid "Uncensored Llama2 based model with support for a 16K context window." +msgstr "" + +#: src/available_models_descriptions.py:78 +msgid "" +"Nexus Raven is a 13B instruction tuned model for function calling tasks." +msgstr "" + +#: src/available_models_descriptions.py:79 +msgid "" +"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic " +"instruction data using OSS-Instruct, a novel approach to enlightening LLMs " +"with open-source code snippets." +msgstr "" + +#: src/available_models_descriptions.py:80 +msgid "A strong, economical, and efficient Mixture-of-Experts language model." +msgstr "" + +#: src/available_models_descriptions.py:81 +msgid "" +"A lightweight chat model allowing accurate, and responsive output without " +"requiring high-end hardware." +msgstr "" + +#: src/available_models_descriptions.py:82 +msgid "" +"A high-performing code instruct model created by merging two existing code " +"models." +msgstr "" + +#: src/available_models_descriptions.py:83 +msgid "A new small LLaVA model fine-tuned from Phi 3 Mini." +msgstr "" + +#: src/available_models_descriptions.py:84 +msgid "" +"MistralLite is a fine-tuned model based on Mistral with enhanced " +"capabilities of processing long contexts." +msgstr "" + +#: src/available_models_descriptions.py:85 +msgid "" +"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by " +"MelodysDreamj." +msgstr "" + +#: src/available_models_descriptions.py:86 +msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station." +msgstr "" + +#: src/available_models_descriptions.py:87 +msgid "" +"A language model created by combining two fine-tuned Llama 2 70B models into " +"one." +msgstr "" + +#: src/available_models_descriptions.py:88 +msgid "" +"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by " +"interleaving the model with itself." +msgstr "" + +#: src/available_models_descriptions.py:89 +msgid "" +"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. " +"Designed for chat and code generation." +msgstr "" + +#: src/available_models_descriptions.py:90 +msgid "" +"A top-performing mixture of experts model, fine-tuned with high-quality data." +msgstr "" + +#: src/available_models_descriptions.py:91 +msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr." +msgstr "" + +#: src/available_models_descriptions.py:92 +msgid "DBRX is an open, general-purpose LLM created by Databricks." +msgstr "" + +#: src/available_models_descriptions.py:93 +msgid "" +"Falcon2 is an 11B parameters causal decoder-only model built by TII and " +"trained over 5T tokens." +msgstr "" + +#: src/available_models_descriptions.py:94 +msgid "" +"A robust conversational model designed to be used for both chat and instruct " +"use cases." +msgstr "" + #~ msgid "Message Received" #~ msgstr "Melding Mottatt" diff --git a/po/nb_NO.po~ b/po/nb_NO.po~ new file mode 100644 index 0000000..f9faae3 --- /dev/null +++ b/po/nb_NO.po~ @@ -0,0 +1,1114 @@ +# Norwegian translation for Alpaca +# Copyright (C) 2024 Jeffser +# This file is distributed under the same license as the Alpaca package. +# Niklas Opsahl Halvorsen, 2024. +# +msgid "" +msgstr "" +"Project-Id-Version: 1.0.0\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2024-07-07 16:06-0600\n" +"PO-Revision-Date: 2024-07-02 18:24-0600\n" +"Last-Translator: Niklas Opsahl Halvorsen\n" +"Language-Team: Norwegian\n" +"Language: nb_NO\n" +"MIME-Version: 1.0\n" +"Content-Type: text/plain; charset=UTF-8\n" +"Content-Transfer-Encoding: 8bit\n" +"Plural-Forms: nplurals=2; plural=(n != 1);\n" + +#: data/com.jeffser.Alpaca.desktop.in:3 +#: data/com.jeffser.Alpaca.metainfo.xml.in:7 +msgid "Alpaca" +msgstr "Alpaca" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:8 +msgid "Chat with local AI models" +msgstr "Chat med lokale KI modeller" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:10 +msgid "An Ollama client" +msgstr "En Ollama klient" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:11 +#: data/com.jeffser.Alpaca.metainfo.xml.in:416 +msgid "Features" +msgstr "Funksjonalitet" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:13 +msgid "Built in Ollama instance" +msgstr "Innebygd Ollama programvareinstans" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:14 +#: data/com.jeffser.Alpaca.metainfo.xml.in:418 +msgid "Talk to multiple models in the same conversation" +msgstr "Snakk til flere modeller i samme samtale" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:15 +#: data/com.jeffser.Alpaca.metainfo.xml.in:419 +msgid "Pull and delete models from the app" +msgstr "Last ned og slett modeller fra appen" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:16 +msgid "Have multiple conversations" +msgstr "Ha flere samtaler" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:17 +msgid "Image recognition (Only available with compatible models)" +msgstr "Bildetolkning (tilgjengelig for modeller som støtter det)" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:18 +msgid "Plain text documents recognition" +msgstr "Råtekst dokument tolking" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:19 +msgid "Import and export chats" +msgstr "Importer og eksporter samtaler" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:860 +msgid "Disclaimer" +msgstr "Obs!" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:22 +msgid "" +"This project is not affiliated at all with Ollama, I'm not responsible for " +"any damages to your device or software caused by running code given by any " +"models." +msgstr "" +"Dette prosjektet er ikke assosiert med Ollama, og jeg er ikke ansvarlig for " +"noen skader til din enhet eller programvare ved bruk av kode gitt fra " +"modellene." + +#: data/com.jeffser.Alpaca.metainfo.xml.in:25 +msgid "Jeffry Samuel Eduarte Rojas" +msgstr "Jeffry Samuel Eduarte Rojas" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:51 +msgid "A conversation showing code highlight" +msgstr "En samtale som viser kodebelysning" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:55 +msgid "A conversation involving multiple models" +msgstr "En samtale som involverer flere modeller" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:59 +msgid "Managing models" +msgstr "Administrer modeller" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:70 +#: data/com.jeffser.Alpaca.metainfo.xml.in:119 +msgid "Fix" +msgstr "Fiks" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:71 +msgid "" +"Removed DOCX compatibility temporally due to error with python-lxml " +"dependency" +msgstr "" +"Fjernet DOCX støtte midlertidig fordi problemer med python-lxml avhengighet" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:77 +#: data/com.jeffser.Alpaca.metainfo.xml.in:107 +#: data/com.jeffser.Alpaca.metainfo.xml.in:128 +#: data/com.jeffser.Alpaca.metainfo.xml.in:333 +#: data/com.jeffser.Alpaca.metainfo.xml.in:390 +msgid "Big Update" +msgstr "Stor oppdatering" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:79 +msgid "Added compatibility for PDF" +msgstr "Lo til PDF-støtte." + +#: data/com.jeffser.Alpaca.metainfo.xml.in:80 +msgid "Added compatibility for DOCX" +msgstr "Støtte for DOCX" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:81 +msgid "Merged 'file attachment' menu into one button" +msgstr "Slått sammen filvedleggsmenyen til en knapp" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:88 +#: data/com.jeffser.Alpaca.metainfo.xml.in:281 +msgid "Quick Fix" +msgstr "Rask fiks" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:89 +msgid "" +"There were some errors when transitioning from the old version of chats to " +"the new version. I apologize if this caused any corruption in your chat " +"history. This should be the only time such a transition is needed." +msgstr "" +"Det var noen feil med overføringen fra den eldre versjonen av samtaler til " +"den nye versjonen. Jeg beklager hvis dette skapte korrupsjon i samtale " +"historien. Dette skal være den eneste gangen en slik overføring kreves." + +#: data/com.jeffser.Alpaca.metainfo.xml.in:95 +#: data/com.jeffser.Alpaca.metainfo.xml.in:247 +msgid "Huge Update" +msgstr "Stor oppdatering" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:97 +msgid "Added: Support for plain text files" +msgstr "Støtte for råtekstfiler" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:98 +msgid "Added: New backend system for storing messages" +msgstr "Nytt bakgrunnssystem for å sortere meldinger" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:99 +msgid "Added: Support for changing Ollama's overrides" +msgstr "Støtte for å endre Ollama sine overskrivelser" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:100 +msgid "General Optimization" +msgstr "Generell optimalisering" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:109 +msgid "Added: Support for GGUF models (experimental)" +msgstr "Støtte for GGUF modeller (eksperimentalt)" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:110 +msgid "Added: Support for customization and creation of models" +msgstr "Støtte for personalisering og skapelse av modeller" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:111 +msgid "Fixed: Icons don't appear on non Gnome systems" +msgstr "Fikset et problem hvor ikoner ikke dukket opp på Gnome systemer" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:112 +msgid "Update Ollama to v0.1.39" +msgstr "Oppdater Ollama til v0.1.39" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:121 +msgid "" +"Fixed: app didn't open if models tweaks wasn't present in the config files" +msgstr "" +"Fikset problem hvor app ikke åpnet hvis modellpersonalisering ikke var i " +"konfigurasjonsfiler" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:130 +msgid "Changed multiple icons (paper airplane for the send button)" +msgstr "Endret flere ikoner, som papirflyet for sendeknappen" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:131 +msgid "Combined export / import chat buttons into a menu" +msgstr "Kombinerte eksporter og importer chat-knappene til en meny." + +#: data/com.jeffser.Alpaca.metainfo.xml.in:132 +msgid "Added 'model tweaks' (temperature, seed, keep_alive)" +msgstr "Lo til 'modellpersonalisering' (temperatur, frø, hold_aktiv)" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:133 +msgid "Fixed send / stop button" +msgstr "Fikset send / stopp knapp" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:134 +msgid "Fixed app not checking if remote connection works when starting" +msgstr "Fikset problem hvor app ikke sjekket at eksterne tilkoblinger funker" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:141 +msgid "Daily Update" +msgstr "Daglig oppdatering" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:143 +msgid "Added text ellipsis to chat name so it doesn't change the button width" +msgstr "" +"Lo til tekst ellipsis til chat-navn slik at den ikke endrer knappbredden" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:144 +msgid "New shortcut for creating a chat (CTRL+N)" +msgstr "Ny hurtigtast for å lage samtale (CTRL+N)" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:145 +msgid "New message entry design" +msgstr "Ny meldingsoppføringsdesign" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:146 +msgid "Fixed: Can't rename the same chat multiple times" +msgstr "Fikset: Kan ikke navngi den samme samtalen flere ganger" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:153 +msgid "The fix" +msgstr "Fikset" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:155 +msgid "" +"Fixed: Ollama instance keeps running on the background even when it is " +"disabled" +msgstr "Fikset: Ollama instans kjører i bakgrunnen selv hvis den er skrudd av" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:156 +msgid "Fixed: Can't pull models on the integrated instance" +msgstr "Fikset: Kan ikke nedlaste modeller på den samme instansen" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:163 +msgid "Quick tweaks" +msgstr "Raske endringer" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:165 +msgid "Added progress bar to models that are being pulled" +msgstr "Lo til progresjonsbar på modeller som blir lastet ned" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:166 +msgid "Added size to tags when pulling a model" +msgstr "Lo til størrelse på tagger for nedlastende modeller" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:167 +msgid "General optimizations on the background" +msgstr "Generelle oppstimaliseringer i bakgrunnen" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:174 +msgid "Quick fixes" +msgstr "Raske endringer" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:176 +msgid "Fixed: Scroll when message is received" +msgstr "Fikset: Scroll når beskjed er mottatt." + +#: data/com.jeffser.Alpaca.metainfo.xml.in:177 +msgid "Fixed: Content doesn't change when creating a new chat" +msgstr "Fikset: Innhold ender seg ikke når en ny samtale skapes" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:178 +msgid "Added 'Featured Models' page on welcome dialog" +msgstr "Lo til 'fremhevede modeller' på velkomstdialog" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:185 +msgid "Nice Update" +msgstr "Fin oppdatering" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:187 +msgid "UI tweaks (Thanks Nokse22)" +msgstr "Brukergrensesnitt endringer (Takk Nokse22)" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:188 +msgid "General optimizations" +msgstr "General optimizations" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:189 +msgid "Metadata fixes" +msgstr "Generelle optimaliseringer" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:196 +msgid "Quick fix" +msgstr "Rask fiks" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:198 +msgid "Updated Spanish translation" +msgstr "Oppdaterte Spansk oversettelse" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:199 +msgid "Added compatibility for PNG" +msgstr "Lo til støtte for PNG filformat." + +#: data/com.jeffser.Alpaca.metainfo.xml.in:206 +msgid "New Update" +msgstr "Ny oppdatering" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:208 +msgid "Updated model list" +msgstr "Oppdaterte modell liste" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:209 +msgid "Added image recognition to more models" +msgstr "Lo til bildetolkning for flere modeller" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:210 +msgid "Added Brazilian Portuguese translation (Thanks Daimaar Stein)" +msgstr "Lo til brasiliansk portugalsk oversettelse (Takk Daimaar Stein)" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:211 +msgid "Refined the general UI (Thanks Nokse22)" +msgstr "Finpusset brukergrensesnitt (Takk Nokse22)" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:212 +msgid "Added 'delete message' feature" +msgstr "Lo til melding slett funksjon" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:213 +msgid "" +"Added metadata so that software distributors know that the app is compatible " +"with mobile" +msgstr "" +"Lo til metadata, så programvaresenter vet at appen er kompatibel med mobil" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:214 +msgid "" +"Changed 'send' shortcut to just the return/enter key (to add a new line use " +"shift+return)" +msgstr "" +"Endret 'send' hurtigtast til return/enter knapp (for å skape ny line bruk " +"shift+return)" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:221 +msgid "Bug Fixes" +msgstr "Diverse feil fiks" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:223 +msgid "Fixed: Minor spelling mistake" +msgstr "Fikset: stavelsefeil" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:224 +msgid "Added 'mobile' as a supported form factor" +msgstr "Lo til mobil som støttet formfaktor" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:225 +msgid "Fixed: 'Connection Error' dialog not working properly" +msgstr "Fikset: 'Tilkopling Problem' dialog ikke fungerte som det skal" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:226 +msgid "Fixed: App might freeze randomly on startup" +msgstr "Fikset et problem hvor appen noen ganger fryser på oppstart" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:227 +msgid "Changed 'chats' label on sidebar for 'Alpaca'" +msgstr "Endret 'samtaler' merke på sidefelt for 'Alpaca'" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:234 +msgid "Cool Update" +msgstr "Kul oppdatering" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:236 +msgid "Better design for chat window" +msgstr "Bedre design for samtalevinduet" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:237 +msgid "Better design for chat sidebar" +msgstr "Bedre design for sidefelt" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:238 +msgid "Fixed remote connections" +msgstr "Fikset eksterne tilkoblinger." + +#: data/com.jeffser.Alpaca.metainfo.xml.in:239 +msgid "Fixed Ollama restarting in loop" +msgstr "Fikset problem hvor Ollama sto fast i oppstart" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:240 +msgid "Other cool backend stuff" +msgstr "Annet kult bakgrunnstoff" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:249 +msgid "Added Ollama as part of Alpaca, Ollama will run in a sandbox" +msgstr "Lo til Ollama som del av Alpaca, Ollama vil skjøre i en sandkasse" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:250 +msgid "Added option to connect to remote instances (how it worked before)" +msgstr "" +"Lo til innstilling for å koble til eksterne instanser, som det fungerte før" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:251 +msgid "Added option to import and export chats" +msgstr "Lo til innstilling for å importere og eksportere samtaler" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:252 +msgid "Added option to run Alpaca with Ollama in the background" +msgstr "Innstilling for å kjøre Alpaca med Ollama i bakgrunnen" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:253 +msgid "Added preferences dialog" +msgstr "Personalisering dialog" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:254 +msgid "Changed the welcome dialog" +msgstr "Endret velkomstdialog" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:256 +#: data/com.jeffser.Alpaca.metainfo.xml.in:273 +#: data/com.jeffser.Alpaca.metainfo.xml.in:285 +#: data/com.jeffser.Alpaca.metainfo.xml.in:304 +#: data/com.jeffser.Alpaca.metainfo.xml.in:325 +#: data/com.jeffser.Alpaca.metainfo.xml.in:341 +#: data/com.jeffser.Alpaca.metainfo.xml.in:357 +#: data/com.jeffser.Alpaca.metainfo.xml.in:371 +#: data/com.jeffser.Alpaca.metainfo.xml.in:381 +#: data/com.jeffser.Alpaca.metainfo.xml.in:399 +#: data/com.jeffser.Alpaca.metainfo.xml.in:421 +msgid "Please report any errors to the issues page, thank you." +msgstr "Vær så snill å rapporter problemer i \"issues\" siden, tusen takk." + +#: data/com.jeffser.Alpaca.metainfo.xml.in:264 +msgid "Yet Another Daily Update" +msgstr "Enda en daglig oppdatering" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:266 +msgid "Added better UI for 'Manage Models' dialog" +msgstr "Bedre brukergrensesnitt for modell tilpassing dialog" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:267 +msgid "Added better UI for the chat sidebar" +msgstr "Bedre brukergrensesnitt for samtale sidefelt" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:268 +msgid "" +"Replaced model description with a button to open Ollama's website for the " +"model" +msgstr "" +"Erstattet modellforklaring med en knapp som åpner beskrivelsen i Ollama sin " +"nettside" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:269 +msgid "Added myself to the credits as the spanish translator" +msgstr "Lagt til meg selv i kredittene som spansk oversetter" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:270 +msgid "Using XDG properly to get config folder" +msgstr "Bruker XDG riktig for å nå konfigurasjons mappe" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:271 +msgid "Update for translations" +msgstr "Oppdatering for oversettelser" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:283 +msgid "The last update had some mistakes in the description of the update" +msgstr "Den siste oppdateringen hadde noen feil i beskrivelsen" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:293 +msgid "Another Daily Update" +msgstr "Enda en daglig oppdatering" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:295 +msgid "Added full Spanish translation" +msgstr "Spansk oversettelse er ferdig" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:296 +msgid "Added support for background pulling of multiple models" +msgstr "Støtte for bakgrunnsnedlasting av flere modeller" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:297 +msgid "Added interrupt button" +msgstr "Lo til avbryt knapp." + +#: data/com.jeffser.Alpaca.metainfo.xml.in:298 +msgid "Added basic shortcuts" +msgstr "Hurtigtaster er nå lagt til" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:299 +msgid "Better translation support" +msgstr "Bedre oversettelsestøtte" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:300 +msgid "" +"User can now leave chat name empty when creating a new one, it will add a " +"placeholder name" +msgstr "" +"Bruker kan nå lage ny samtale med tomt navn, appen vil legge til et " +"stedsholdernavn" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:301 +msgid "Better scalling for different window sizes" +msgstr "Bedre skalering for forskjellige vinduestørrelser" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:302 +msgid "Fixed: Can't close app if first time setup fails" +msgstr "Fikset problem hvor appen ikke kan lukkes hvis første oppstart feilet" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:312 +msgid "Really Big Update" +msgstr "Veldig stor oppdatering" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:314 +msgid "Added multiple chats support!" +msgstr "Støtte for flere samtaler!" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:315 +msgid "Added Pango Markup support (bold, list, title, subtitle, monospace)" +msgstr "Støtte for Pango merketekst (fet, liste, tittel, undertekst, monorom)" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:316 +msgid "Added autoscroll if the user is at the bottom of the chat" +msgstr "Lagt til autoscroll hvis brukeren er på bunnen av samtalen" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:317 +msgid "Added support for multiple tags on a single model" +msgstr "Lagt til støtte for flere tagger for en modell" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:318 +msgid "Added better model management dialog" +msgstr "Bedre modell tilpassing dialog." + +#: data/com.jeffser.Alpaca.metainfo.xml.in:319 +msgid "Added loading spinner when sending message" +msgstr "Lagt til Lastejul når det en beskjed sendes" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:320 +msgid "Added notifications if app is not active and a model pull finishes" +msgstr "" +"Appen vil nå sende en beskjed hvis den ikke er åpen mens en modell er ferdig " +"nedlastet" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:321 +msgid "Added new symbolic icon" +msgstr "Lagt til nytt symbolsk ikon" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:322 +msgid "Added frame to message textview widget" +msgstr "Lagt til ramme for melding tekstvindu programsnutt" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:323 +msgid "Fixed \"code blocks shouldn't be editable\"" +msgstr "Fikset \"Kodeblokker skal ikke være redigerbare.\"" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:335 +msgid "Added code highlighting" +msgstr "Lagt til kodebelysning." + +#: data/com.jeffser.Alpaca.metainfo.xml.in:336 +msgid "Added image recognition (llava model)" +msgstr "Lagt til bildetolkning (llava modell)" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:337 +msgid "Added multiline prompt" +msgstr "Lagt til multilinje skrivefelt" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:338 +msgid "Fixed some small bugs" +msgstr "Fikset noen små programfeil" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:339 +msgid "General optimization" +msgstr "Generelle optimalisering" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:349 +msgid "Fixes and features" +msgstr "Korrektur og funksjonalitet" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:351 +msgid "Russian translation (thanks github/alexkdeveloper)" +msgstr "Russisk oversettelse (takk github/alexkdeveloper)" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:352 +msgid "Fixed: Cannot close app on first setup" +msgstr "Fikset: Kan ikke lukke app på første oppstart" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:353 +msgid "Fixed: Brand colors for Flathub" +msgstr "Fikset merkevare farger for Flathub" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:354 +msgid "Fixed: App description" +msgstr "Fikset: App beskrivelse" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:355 +msgid "Fixed: Only show 'save changes dialog' when you actually change the url" +msgstr "Fikset: Bare vis 'lagre endringer dialog' når du faktisk endrer URL" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:365 +msgid "0.2.2 Bug fixes" +msgstr "0.2.2 Diverse løsninger" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:367 +msgid "Toast messages appearing behind dialogs" +msgstr "Toast meldinger dukker opp bak dialoger." + +#: data/com.jeffser.Alpaca.metainfo.xml.in:368 +msgid "Local model list not updating when changing servers" +msgstr "Lokal modelliste som ikke oppdateres ved serverendring" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:369 +msgid "Closing the setup dialog closes the whole app" +msgstr "Lukking av oppstartsdialog lukker hele appen" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:379 +msgid "0.2.1 Data saving fix" +msgstr "0.2.1 Datasparing fiks" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:380 +msgid "" +"The app didn't save the config files and chat history to the right " +"directory, this is now fixed" +msgstr "" +"Appen lagret ikke konfigurasjonfiler og historikk til riktig filplass, dette " +"er nå fikset" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:389 +msgid "0.2.0" +msgstr "0.2.0" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:391 +msgid "New Features" +msgstr "Ny funksjonalitet" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:393 +msgid "Restore chat after closing the app" +msgstr "Gjenopprett samtale etter lukking av appen" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:394 +msgid "A button to clear the chat" +msgstr "En knapp for å rense samtalen" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:395 +msgid "Fixed multiple bugs involving how messages are shown" +msgstr "Fikset flere problemer som involverte hvordan meldingen vises" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:396 +msgid "Added welcome dialog" +msgstr "Lagt til velkomstdialog" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:397 +msgid "More stability" +msgstr "Mer stabilitet" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:407 +msgid "0.1.2 Quick fixes" +msgstr "0.1.2 Raske løsninger" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:408 +msgid "" +"This release fixes some metadata needed to have a proper Flatpak application" +msgstr "Fikset metadata som trengs for å ha korrekt Flatpak applikasjon" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:414 +msgid "0.1.1 Stable Release" +msgstr "0.1.1 Stabil utgivelse" + +#: data/com.jeffser.Alpaca.metainfo.xml.in:415 +msgid "This is the first public version of Alpaca" +msgstr "Dette er den første offentlige versjonen av Alpaca" + +#: src/window.py:58 src/window.py:1083 src/window.py:1145 src/window.ui:41 +msgid "New Chat" +msgstr "Ny samtale" + +#: src/window.py:128 +msgid "An error occurred" +msgstr "Et problem dukket opp" + +#: src/window.py:129 +msgid "Failed to connect to server" +msgstr "Feilet tilkobling til server" + +#: src/window.py:130 +msgid "Could not list local models" +msgstr "Kunne ikke liste lokale modeller" + +#: src/window.py:131 +msgid "Could not delete model" +msgstr "Kunne ikke slette modell" + +#: src/window.py:132 +msgid "Could not pull model" +msgstr "Kunne ikke laste ned modell" + +#: src/window.py:133 +msgid "Cannot open image" +msgstr "Kan ikke åpne bilde" + +#: src/window.py:134 +msgid "Cannot delete chat because it's the only one left" +msgstr "Kan ikke slette samtale fordi det er bare en igjen" + +#: src/window.py:135 +msgid "There was an error with the local Ollama instance, so it has been reset" +msgstr "Det var et problem med lokal Ollama instans, så den har fått omstart" + +#: src/window.py:136 +msgid "Image recognition is only available on specific models" +msgstr "Bildetolkning er bare tilgjengelig på enkelte modeller" + +#: src/window.py:137 +msgid "This video does not have any transcriptions" +msgstr "Denne videoen har ingen transkripsjon" + +#: src/window.py:138 +msgid "This video is not available" +msgstr "Denne videoen er ikke tilgjengelig" + +#: src/window.py:141 +msgid "Please select a model before chatting" +msgstr "Velg en modell før samtalen" + +#: src/window.py:142 +msgid "Chat cannot be cleared while receiving a message" +msgstr "Samtale kan ikke renses mens melding blir mottatt" + +#: src/window.py:143 +msgid "That tag is already being pulled" +msgstr "Denne taggen er allerede under nedlasting" + +#: src/window.py:144 +msgid "That tag has been pulled already" +msgstr "Denne taggen er allerede nedlastet" + +#: src/window.py:145 +msgid "Code copied to the clipboard" +msgstr "Kode kopiert til utklippstavle" + +#: src/window.py:146 +msgid "Message copied to the clipboard" +msgstr "Melding kopiert til utklippstavle" + +#: src/window.py:147 +msgid "Message edited successfully" +msgstr "Melding suksessfullt redigert" + +#: src/window.py:150 +msgid "Model deleted successfully" +msgstr "Modell suksessfullt slettet" + +#: src/window.py:151 +msgid "Model pulled successfully" +msgstr "Modell suksessfullt nedlastet" + +#: src/window.py:152 +msgid "Chat exported successfully" +msgstr "Samtale suksessfullt eksportert" + +#: src/window.py:153 +msgid "Chat imported successfully" +msgstr "Samtale suksessfullt importert" + +#: src/window.py:280 +msgid "Close" +msgstr "Lukk" + +#: src/window.py:281 src/window.ui:813 +msgid "Next" +msgstr "Neste" + +#: src/window.py:332 +msgid "Pulling in the background..." +msgstr "Nedlaster i bakgrunnen..." + +#: src/window.py:384 +msgid "Stop Creating '{}'" +msgstr "Stopp skaping '{}'" + +#: src/window.py:421 +msgid "image" +msgstr "bilde" + +#: src/window.py:593 +msgid "Remove Message" +msgstr "Fjern Melding" + +#: src/window.py:598 src/window.py:869 +msgid "Copy Message" +msgstr "Kopier Melding" + +#: src/window.py:603 +msgid "Edit Message" +msgstr "Rediger Melding" + +#: src/window.py:661 +msgid "Missing Image" +msgstr "" + +#: src/window.py:677 +msgid "Missing image" +msgstr "" + +#: src/window.py:757 +msgid "Remove '{} ({})'" +msgstr "Fjern '{} ({})'" + +#: src/window.py:969 +msgid "Task Complete" +msgstr "Oppgave Ferdig" + +#: src/window.py:969 +msgid "Model '{}' pulled successfully." +msgstr "Modell '{}' suksessfullt nedlasted." + +#: src/window.py:974 +msgid "Pull Model Error" +msgstr "Problem med modell nedlasting" + +#: src/window.py:974 +msgid "Failed to pull model '{}' due to network error." +msgstr "Mislykkes nedlasting av modell '{}' grunnet av nettverks feil." + +#: src/window.py:1008 +msgid "Stop Pulling '{} ({})'" +msgstr "Stopp Nedlasting '{} ({})'" + +#: src/window.ui:52 +msgid "Menu" +msgstr "Meny" + +#: src/window.ui:82 +msgid "Toggle Sidebar" +msgstr "Skru av/på sidefelt" + +#: src/window.ui:107 src/window.ui:587 +msgid "Manage Models" +msgstr "Tilpass Modeller" + +#: src/window.ui:121 +msgid "Chat Menu" +msgstr "Samtale Meny" + +#: src/window.ui:197 +msgid "Attach File" +msgstr "Filvedlegg" + +#: src/window.ui:242 src/window.ui:1127 +msgid "Send Message" +msgstr "Send Melding" + +#: src/window.ui:290 src/window.ui:972 src/window.ui:1086 +msgid "Preferences" +msgstr "Innstillinger" + +#: src/window.ui:293 src/window.ui:1064 +msgid "General" +msgstr "Generelt" + +#: src/window.ui:299 +msgid "Use Remote Connection to Ollama" +msgstr "Bruk ekstern tilkobling til Ollama" + +#: src/window.ui:305 +msgid "URL of Remote Instance" +msgstr "URL til Ekstern Instans" + +#: src/window.ui:312 +msgid "Bearer Token (Optional)" +msgstr "Bearer Token (Frivillig)" + +#: src/window.ui:322 +msgid "Run Alpaca In Background" +msgstr "Kjør Alpaca i Bakgrunn" + +#: src/window.ui:333 +msgid "Temperature" +msgstr "Temperatur" + +#: src/window.ui:334 +msgid "" +"The temperature of the model. Increasing the temperature will make the model " +"answer more creatively. (Default: 0.8)" +msgstr "" +"Temperaturen til modellen. Øking av temperatur vil gjøre modellen mer " +"kreativ (Standard: 0.8)" + +#: src/window.ui:349 +msgid "Seed" +msgstr "Frø" + +#: src/window.ui:350 +msgid "" +"Sets the random number seed to use for generation. Setting this to a " +"specific number will make the model generate the same text for the same " +"prompt. (Default: 0 (random))" +msgstr "" +"Tilfeldig frø for bruk til generasjon. Velg et spesifikk nummer hvis du vil " +"ha samme tekst fra samme inndata. (Standard: 0 (tilfeldig))" + +#: src/window.ui:364 +msgid "Keep Alive Time" +msgstr "Hold Aktiv Tid" + +#: src/window.ui:365 +msgid "" +"Controls how long the model will stay loaded into memory following the " +"request in minutes (Default: 5)" +msgstr "" +"Kontrollerer hvor lenge modellen vil lagres i minne etter forespørselen i " +"minutter (Standard: 5)" + +#: src/window.ui:381 +msgid "Ollama Instance" +msgstr "Ollama Instanser" + +#: src/window.ui:385 +msgid "Ollama Overrides" +msgstr "Ollama Overskrivelser" + +#: src/window.ui:386 +msgid "" +"Manage the arguments used on Ollama, any changes on this page only applies " +"to the integrated instance, the instance will restart if you make changes." +msgstr "" +"Tilpass argumentene brukt i Ollama, endringer i denne siden fullføres bare " +"for den integrerte instansen, og den vil omstartes dersom du gjør endringer." + +#: src/window.ui:469 +msgid "Create" +msgstr "Lag" + +#: src/window.ui:482 src/window.ui:597 +msgid "Create Model" +msgstr "Lag Modell" + +#: src/window.ui:508 +msgid "Base" +msgstr "Standard" + +#: src/window.ui:526 +msgid "Name" +msgstr "Navn" + +#: src/window.ui:532 +msgid "Context" +msgstr "Kontekst" + +#: src/window.ui:547 +msgid "Template" +msgstr "Mal" + +#: src/window.ui:553 +msgid "" +"Some models require a specific template. Please visit the model's website " +"for more information if you're unsure." +msgstr "" +"Noen modeller krever spesifikk mal. Besøk modellens nettside hvis du er " +"usikker." + +#: src/window.ui:604 +msgid "Search Model" +msgstr "Søk Modell" + +#: src/window.ui:664 +msgid "No Models Found" +msgstr "Ingen Modeller Funnet" + +#: src/window.ui:665 +msgid "Try a different search" +msgstr "Prøv andre søkeord" + +#: src/window.ui:708 +msgid "" +"By downloading this model you accept the license agreement available on the " +"model's website." +msgstr "" + +#: src/window.ui:745 +msgid "Open with Default App" +msgstr "Åpne med standard app" + +#: src/window.ui:797 +msgid "Previous" +msgstr "Forrige" + +#: src/window.ui:840 +msgid "Welcome to Alpaca" +msgstr "Velkommen til Alpaca" + +#: src/window.ui:841 +msgid "Powered by Ollama" +msgstr "Drives av Ollama" + +#: src/window.ui:844 +msgid "Ollama Website" +msgstr "Ollama Nettside" + +#: src/window.ui:861 +msgid "" +"Alpaca and its developers are not liable for any damages to devices or " +"software resulting from the execution of code generated by an AI model. " +"Please exercise caution and review the code carefully before running it." +msgstr "" +"Alpaca og dens utviklere er ikke ansvarlig for skader til enheter eller " +"programvare fra bruk av kode generert via KI modellene. Vær OBS og analyser " +"kode før det brukes." + +#: src/window.ui:872 +msgid "Featured Models" +msgstr "Fremhevede Modeller" + +#: src/window.ui:873 +msgid "" +"Alpaca works locally on your device, to start chatting you'll need an AI " +"model, you can either pull models from this list or the 'Manage Models' menu " +"later." +msgstr "" +"Alpaca fungerer lokalt på din enhet, for å starte samtale trenger du en KI " +"modell, du kan nedlaste fra denne listen eller fra modell menyen etterpå." + +#: src/window.ui:883 +msgid "Built by Meta" +msgstr "Bygget av Meta" + +#: src/window.ui:901 +msgid "Built by Google DeepMind" +msgstr "Bygget av Google DeepMind" + +#: src/window.ui:919 +msgid "Built by Microsoft" +msgstr "Bygget av Microsoft" + +#: src/window.ui:937 +msgid "Multimodal AI with image recognition" +msgstr "Multimodal KI med bildetolkning" + +#: src/window.ui:966 +msgid "Import Chat" +msgstr "Importer samtale" + +#: src/window.ui:976 +msgid "Keyboard Shortcuts" +msgstr "Hurtigtaster" + +#: src/window.ui:980 +msgid "About Alpaca" +msgstr "Om Alpaca" + +#: src/window.ui:987 src/window.ui:1006 +msgid "Rename Chat" +msgstr "Navngi samtale" + +#: src/window.ui:991 src/window.ui:1010 +msgid "Export Chat" +msgstr "Eksporter samtale" + +#: src/window.ui:995 +msgid "Clear Chat" +msgstr "Rens samtale" + +#: src/window.ui:1002 +msgid "Delete Chat" +msgstr "Slett samtale" + +#: src/window.ui:1018 +msgid "From Existing Model" +msgstr "Fra Eksisterende Modell" + +#: src/window.ui:1022 +msgid "From GGUF File (Experimental)" +msgstr "Fra GGUF Fil (Eksperimental)" + +#: src/window.ui:1068 +msgid "Close application" +msgstr "Lukk applikasjon" + +#: src/window.ui:1074 +msgid "Import chat" +msgstr "Importer samtale" + +#: src/window.ui:1080 +msgid "Clear chat" +msgstr "Rens samtale" + +#: src/window.ui:1092 +msgid "New chat" +msgstr "Ny samtale" + +#: src/window.ui:1098 +msgid "Show shortcuts window" +msgstr "Vis hurtigtast vindu" + +#: src/window.ui:1105 +msgid "Editor" +msgstr "Redigerer" + +#: src/window.ui:1109 +msgid "Copy" +msgstr "Kopier" + +#: src/window.ui:1115 +msgid "Paste" +msgstr "Lim inn" + +#: src/window.ui:1121 +msgid "Insert new line" +msgstr "Sett inn ny linje" + +#~ msgid "Message Received" +#~ msgstr "Melding Mottatt" + +#~ msgid "New message from '{}'" +#~ msgstr "Ny melding fra '{}'" + +#~ msgid "Model" +#~ msgstr "Modell" diff --git a/po/pt_BR.po b/po/pt_BR.po index 18e9c6d..7e0f1d6 100644 --- a/po/pt_BR.po +++ b/po/pt_BR.po @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: 1.0.0\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-07-07 16:04-0600\n" +"POT-Creation-Date: 2024-07-07 17:40-0600\n" "PO-Revision-Date: 2024-05-23 23:29-0600\n" "Last-Translator: Daimar Stein \n" "Language-Team: Brazilian Portuguese\n" @@ -1166,6 +1166,511 @@ msgstr "Colar" msgid "Insert new line" msgstr "" +#: src/available_models_descriptions.py:2 +msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B." +msgstr "" + +#: src/available_models_descriptions.py:3 +msgid "Meta Llama 3: The most capable openly available LLM to date" +msgstr "" + +#: src/available_models_descriptions.py:4 +msgid "Qwen2 is a new series of large language models from Alibaba group" +msgstr "" + +#: src/available_models_descriptions.py:5 +msgid "" +"An open-source Mixture-of-Experts code language model that achieves " +"performance comparable to GPT4-Turbo in code-specific tasks." +msgstr "" + +#: src/available_models_descriptions.py:6 +msgid "" +"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art " +"open models by Microsoft." +msgstr "" + +#: src/available_models_descriptions.py:7 +msgid "" +"Aya 23, released by Cohere, is a new family of state-of-the-art, " +"multilingual models that support 23 languages." +msgstr "" + +#: src/available_models_descriptions.py:8 +msgid "The 7B model released by Mistral AI, updated to version 0.3." +msgstr "" + +#: src/available_models_descriptions.py:9 +msgid "" +"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in " +"8x7b and 8x22b parameter sizes." +msgstr "" + +#: src/available_models_descriptions.py:10 +msgid "" +"CodeGemma is a collection of powerful, lightweight models that can perform a " +"variety of coding tasks like fill-in-the-middle code completion, code " +"generation, natural language understanding, mathematical reasoning, and " +"instruction following." +msgstr "" + +#: src/available_models_descriptions.py:11 +msgid "" +"Command R is a Large Language Model optimized for conversational interaction " +"and long context tasks." +msgstr "" + +#: src/available_models_descriptions.py:12 +msgid "" +"Command R+ is a powerful, scalable large language model purpose-built to " +"excel at real-world enterprise use cases." +msgstr "" + +#: src/available_models_descriptions.py:13 +msgid "" +"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines " +"a vision encoder and Vicuna for general-purpose visual and language " +"understanding. Updated to version 1.6." +msgstr "" + +#: src/available_models_descriptions.py:14 +msgid "" +"Gemma is a family of lightweight, state-of-the-art open models built by " +"Google DeepMind. Updated to version 1.1" +msgstr "" + +#: src/available_models_descriptions.py:15 +msgid "" +"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from " +"0.5B to 110B parameters" +msgstr "" + +#: src/available_models_descriptions.py:16 +msgid "" +"Llama 2 is a collection of foundation language models ranging from 7B to 70B " +"parameters." +msgstr "" + +#: src/available_models_descriptions.py:17 +msgid "" +"A large language model that can use text prompts to generate and discuss " +"code." +msgstr "" + +#: src/available_models_descriptions.py:18 +msgid "" +"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of " +"experts models that excels at coding tasks. Created by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:19 +msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope." +msgstr "" + +#: src/available_models_descriptions.py:20 +msgid "" +"DeepSeek Coder is a capable coding model trained on two trillion code and " +"natural language tokens." +msgstr "" + +#: src/available_models_descriptions.py:21 +msgid "" +"A high-performing open embedding model with a large token context window." +msgstr "" + +#: src/available_models_descriptions.py:22 +msgid "" +"Phi-2: a 2.7B language model by Microsoft Research that demonstrates " +"outstanding reasoning and language understanding capabilities." +msgstr "" + +#: src/available_models_descriptions.py:23 +msgid "" +"The uncensored Dolphin model based on Mistral that excels at coding tasks. " +"Updated to version 2.8." +msgstr "" + +#: src/available_models_descriptions.py:24 +msgid "" +"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the " +"Mistral 7B model using the OpenOrca dataset." +msgstr "" + +#: src/available_models_descriptions.py:25 +msgid "" +"A general-purpose model ranging from 3 billion parameters to 70 billion, " +"suitable for entry-level hardware." +msgstr "" + +#: src/available_models_descriptions.py:26 +msgid "State-of-the-art large embedding model from mixedbread.ai" +msgstr "" + +#: src/available_models_descriptions.py:27 +msgid "" +"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on " +"Llama 3 that has a variety of instruction, conversational, and coding skills." +msgstr "" + +#: src/available_models_descriptions.py:28 +msgid "" +"StarCoder2 is the next generation of transparently trained open code LLMs " +"that comes in three sizes: 3B, 7B and 15B parameters." +msgstr "" + +#: src/available_models_descriptions.py:29 +msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability." +msgstr "" + +#: src/available_models_descriptions.py:30 +msgid "" +"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models " +"that are trained to act as helpful assistants." +msgstr "" + +#: src/available_models_descriptions.py:31 +msgid "Yi 1.5 is a high-performing, bilingual language model." +msgstr "" + +#: src/available_models_descriptions.py:32 +msgid "" +"The powerful family of models by Nous Research that excels at scientific " +"discussion and coding tasks." +msgstr "" + +#: src/available_models_descriptions.py:33 +msgid "" +"General use chat model based on Llama and Llama 2 with 2K to 16K context " +"sizes." +msgstr "" + +#: src/available_models_descriptions.py:34 +msgid "" +"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on " +"Llama 2 uncensored by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:35 +msgid "" +"The TinyLlama project is an open endeavor to train a compact 1.1B Llama " +"model on 3 trillion tokens." +msgstr "" + +#: src/available_models_descriptions.py:36 +msgid "" +"State of the art large language model from Microsoft AI with improved " +"performance on complex chat, multilingual, reasoning and agent use cases." +msgstr "" + +#: src/available_models_descriptions.py:37 +msgid "" +"StarCoder is a code generation model trained on 80+ programming languages." +msgstr "" + +#: src/available_models_descriptions.py:38 +msgid "" +"Codestral is Mistral AI’s first-ever code model designed for code generation " +"tasks." +msgstr "" + +#: src/available_models_descriptions.py:39 +msgid "" +"A family of open-source models trained on a wide variety of data, surpassing " +"ChatGPT on various benchmarks. Updated to version 3.5-0106." +msgstr "" + +#: src/available_models_descriptions.py:40 +msgid "" +"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset " +"by Eric Hartford and based on TinyLlama." +msgstr "" + +#: src/available_models_descriptions.py:41 +msgid "" +"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully " +"open datasets." +msgstr "" + +#: src/available_models_descriptions.py:42 +msgid "State-of-the-art code generation model" +msgstr "" + +#: src/available_models_descriptions.py:43 +msgid "" +"Stable Code 3B is a coding model with instruct and code completion variants " +"on par with models such as Code Llama 7B that are 2.5x larger." +msgstr "" + +#: src/available_models_descriptions.py:44 +msgid "" +"A fine-tuned model based on Mistral with good coverage of domain and " +"language." +msgstr "" + +#: src/available_models_descriptions.py:45 +msgid "Model focused on math and logic problems" +msgstr "" + +#: src/available_models_descriptions.py:46 +msgid "" +"CodeQwen1.5 is a large language model pretrained on a large amount of code " +"data." +msgstr "" + +#: src/available_models_descriptions.py:47 +msgid "Code generation model based on Code Llama." +msgstr "" + +#: src/available_models_descriptions.py:48 +msgid "" +"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model " +"trained on multilingual data in English, Spanish, German, Italian, French, " +"Portuguese, and Dutch." +msgstr "" + +#: src/available_models_descriptions.py:49 +msgid "" +"A 7B and 15B uncensored variant of the Dolphin model family that excels at " +"coding, based on StarCoder2." +msgstr "" + +#: src/available_models_descriptions.py:50 +msgid "Embedding models on very large sentence level datasets." +msgstr "" + +#: src/available_models_descriptions.py:51 +msgid "General use models based on Llama and Llama 2 from Nous Research." +msgstr "" + +#: src/available_models_descriptions.py:52 +msgid "" +"Starling is a large language model trained by reinforcement learning from AI " +"feedback focused on improving chatbot helpfulness." +msgstr "" + +#: src/available_models_descriptions.py:53 +msgid "" +"SQLCoder is a code completion model fined-tuned on StarCoder for SQL " +"generation tasks" +msgstr "" + +#: src/available_models_descriptions.py:54 +msgid "" +"Orca 2 is built by Microsoft research, and are a fine-tuned version of " +"Meta's Llama 2 models. The model is designed to excel particularly in " +"reasoning." +msgstr "" + +#: src/available_models_descriptions.py:55 +msgid "" +"This model extends LLama-3 8B's context length from 8k to over 1m tokens." +msgstr "" + +#: src/available_models_descriptions.py:56 +msgid "An advanced language model crafted with 2 trillion bilingual tokens." +msgstr "" + +#: src/available_models_descriptions.py:57 +msgid "An extension of Llama 2 that supports a context of up to 128k tokens." +msgstr "" + +#: src/available_models_descriptions.py:58 +msgid "" +"A model from NVIDIA based on Llama 3 that excels at conversational question " +"answering (QA) and retrieval-augmented generation (RAG)." +msgstr "" + +#: src/available_models_descriptions.py:59 +msgid "" +"A compact, yet powerful 10.7B large language model designed for single-turn " +"conversation." +msgstr "" + +#: src/available_models_descriptions.py:60 +msgid "" +"Conversational model based on Llama 2 that performs competitively on various " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:61 +msgid "A family of open foundation models by IBM for Code Intelligence" +msgstr "" + +#: src/available_models_descriptions.py:62 +msgid "" +"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language " +"model by Microsoft Research." +msgstr "" + +#: src/available_models_descriptions.py:63 +msgid "General use model based on Llama 2." +msgstr "" + +#: src/available_models_descriptions.py:64 +msgid "" +"A companion assistant trained in philosophy, psychology, and personal " +"relationships. Based on Mistral." +msgstr "" + +#: src/available_models_descriptions.py:65 +msgid "" +"Llama 2 based model fine tuned on an Orca-style dataset. Originally called " +"Free Willy." +msgstr "" + +#: src/available_models_descriptions.py:66 +msgid "" +"BakLLaVA is a multimodal model consisting of the Mistral 7B base model " +"augmented with the LLaVA architecture." +msgstr "" + +#: src/available_models_descriptions.py:67 +msgid "" +"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:68 +msgid "Uncensored version of Wizard LM model" +msgstr "" + +#: src/available_models_descriptions.py:69 +msgid "" +"Fine-tuned Llama 2 model to answer medical questions based on an open source " +"medical dataset." +msgstr "" + +#: src/available_models_descriptions.py:70 +msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral." +msgstr "" + +#: src/available_models_descriptions.py:71 +msgid "An extension of Mistral to support context windows of 64K or 128K." +msgstr "" + +#: src/available_models_descriptions.py:72 +msgid "" +"A suite of text embedding models by Snowflake, optimized for performance." +msgstr "" + +#: src/available_models_descriptions.py:73 +msgid "" +"An expansion of Llama 2 that specializes in integrating both general " +"language understanding and domain-specific knowledge, particularly in " +"programming and mathematics." +msgstr "" + +#: src/available_models_descriptions.py:74 +msgid "Great code generation model based on Llama2." +msgstr "" + +#: src/available_models_descriptions.py:75 +msgid "" +"Open-source medical large language model adapted from Llama 2 to the medical " +"domain." +msgstr "" + +#: src/available_models_descriptions.py:76 +msgid "" +"moondream2 is a small vision language model designed to run efficiently on " +"edge devices." +msgstr "" + +#: src/available_models_descriptions.py:77 +msgid "Uncensored Llama2 based model with support for a 16K context window." +msgstr "" + +#: src/available_models_descriptions.py:78 +msgid "" +"Nexus Raven is a 13B instruction tuned model for function calling tasks." +msgstr "" + +#: src/available_models_descriptions.py:79 +msgid "" +"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic " +"instruction data using OSS-Instruct, a novel approach to enlightening LLMs " +"with open-source code snippets." +msgstr "" + +#: src/available_models_descriptions.py:80 +msgid "A strong, economical, and efficient Mixture-of-Experts language model." +msgstr "" + +#: src/available_models_descriptions.py:81 +msgid "" +"A lightweight chat model allowing accurate, and responsive output without " +"requiring high-end hardware." +msgstr "" + +#: src/available_models_descriptions.py:82 +msgid "" +"A high-performing code instruct model created by merging two existing code " +"models." +msgstr "" + +#: src/available_models_descriptions.py:83 +msgid "A new small LLaVA model fine-tuned from Phi 3 Mini." +msgstr "" + +#: src/available_models_descriptions.py:84 +msgid "" +"MistralLite is a fine-tuned model based on Mistral with enhanced " +"capabilities of processing long contexts." +msgstr "" + +#: src/available_models_descriptions.py:85 +msgid "" +"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by " +"MelodysDreamj." +msgstr "" + +#: src/available_models_descriptions.py:86 +msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station." +msgstr "" + +#: src/available_models_descriptions.py:87 +msgid "" +"A language model created by combining two fine-tuned Llama 2 70B models into " +"one." +msgstr "" + +#: src/available_models_descriptions.py:88 +msgid "" +"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by " +"interleaving the model with itself." +msgstr "" + +#: src/available_models_descriptions.py:89 +msgid "" +"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. " +"Designed for chat and code generation." +msgstr "" + +#: src/available_models_descriptions.py:90 +msgid "" +"A top-performing mixture of experts model, fine-tuned with high-quality data." +msgstr "" + +#: src/available_models_descriptions.py:91 +msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr." +msgstr "" + +#: src/available_models_descriptions.py:92 +msgid "DBRX is an open, general-purpose LLM created by Databricks." +msgstr "" + +#: src/available_models_descriptions.py:93 +msgid "" +"Falcon2 is an 11B parameters causal decoder-only model built by TII and " +"trained over 5T tokens." +msgstr "" + +#: src/available_models_descriptions.py:94 +msgid "" +"A robust conversational model designed to be used for both chat and instruct " +"use cases." +msgstr "" + #, fuzzy #~ msgid "Model" #~ msgstr "Parar Modelo" diff --git a/po/pt_BR.po~ b/po/pt_BR.po~ index 1b5590d..18e9c6d 100644 --- a/po/pt_BR.po~ +++ b/po/pt_BR.po~ @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: 1.0.0\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-07-02 18:21-0600\n" +"POT-Creation-Date: 2024-07-07 16:04-0600\n" "PO-Revision-Date: 2024-05-23 23:29-0600\n" "Last-Translator: Daimar Stein \n" "Language-Team: Brazilian Portuguese\n" @@ -70,7 +70,7 @@ msgstr "" msgid "Import and export chats" msgstr "Importe e exporte conversas" -#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:853 +#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:860 msgid "Disclaimer" msgstr "Aviso Legal" @@ -714,7 +714,7 @@ msgstr "0.1.1 Lançamento Estável" msgid "This is the first public version of Alpaca" msgstr "Essa é a primeira versão pública de Alpaca" -#: src/window.py:58 src/window.py:1053 src/window.py:1115 src/window.ui:41 +#: src/window.py:58 src/window.py:1083 src/window.py:1145 src/window.ui:41 msgid "New Chat" msgstr "Nova Conversa" @@ -812,72 +812,72 @@ msgstr "Conversa exportada com sucesso" msgid "Chat imported successfully" msgstr "Conversa importada com sucesso" -#: src/window.py:278 +#: src/window.py:280 msgid "Close" msgstr "" -#: src/window.py:279 src/window.ui:806 +#: src/window.py:281 src/window.ui:813 msgid "Next" msgstr "Próximo" -#: src/window.py:329 +#: src/window.py:332 #, fuzzy msgid "Pulling in the background..." msgstr "Executar em segundo plano" -#: src/window.py:381 +#: src/window.py:384 msgid "Stop Creating '{}'" msgstr "" -#: src/window.py:418 +#: src/window.py:421 #, fuzzy msgid "image" msgstr "Imagem" -#: src/window.py:588 +#: src/window.py:593 #, fuzzy msgid "Remove Message" msgstr "Remover Imagem" -#: src/window.py:593 src/window.py:841 +#: src/window.py:598 src/window.py:869 #, fuzzy msgid "Copy Message" msgstr "Enviar Mensagem" -#: src/window.py:598 +#: src/window.py:603 #, fuzzy msgid "Edit Message" msgstr "Enviar Mensagem" -#: src/window.py:729 +#: src/window.py:661 +msgid "Missing Image" +msgstr "" + +#: src/window.py:677 +msgid "Missing image" +msgstr "" + +#: src/window.py:757 msgid "Remove '{} ({})'" msgstr "" -#: src/window.py:882 -msgid "Message Received" -msgstr "" - -#: src/window.py:882 -msgid "New message from '{}'" -msgstr "" - -#: src/window.py:939 +#: src/window.py:969 msgid "Task Complete" msgstr "Tarefa Concluída" -#: src/window.py:939 +#: src/window.py:969 msgid "Model '{}' pulled successfully." msgstr "O modelo '{}' foi baixado com sucesso" -#: src/window.py:944 +#: src/window.py:974 msgid "Pull Model Error" msgstr "Erro ao Baixar Modelo" -#: src/window.py:944 +#: src/window.py:974 msgid "Failed to pull model '{}' due to network error." msgstr "Não foi possível baixar o modelo '{}' devido a um erro de rede." -#: src/window.py:978 +#: src/window.py:1008 msgid "Stop Pulling '{} ({})'" msgstr "" @@ -889,7 +889,7 @@ msgstr "Menu" msgid "Toggle Sidebar" msgstr "Alternar barra lateral" -#: src/window.ui:107 src/window.ui:595 +#: src/window.ui:107 src/window.ui:587 #, fuzzy msgid "Manage Models" msgstr "Gerenciar modelos" @@ -903,29 +903,29 @@ msgstr "Menu" msgid "Attach File" msgstr "" -#: src/window.ui:238 src/window.ui:1120 +#: src/window.ui:242 src/window.ui:1127 msgid "Send Message" msgstr "Enviar Mensagem" -#: src/window.ui:286 src/window.ui:965 src/window.ui:1079 +#: src/window.ui:290 src/window.ui:972 src/window.ui:1086 msgid "Preferences" msgstr "Preferências" -#: src/window.ui:289 src/window.ui:1057 +#: src/window.ui:293 src/window.ui:1064 msgid "General" msgstr "Geral" -#: src/window.ui:297 +#: src/window.ui:299 #, fuzzy msgid "Use Remote Connection to Ollama" msgstr "Gerencia uma conexão remota com Ollama" -#: src/window.ui:303 +#: src/window.ui:305 #, fuzzy msgid "URL of Remote Instance" msgstr "URL da instância remota" -#: src/window.ui:310 +#: src/window.ui:312 msgid "Bearer Token (Optional)" msgstr "" @@ -934,125 +934,126 @@ msgstr "" msgid "Run Alpaca In Background" msgstr "Executar em segundo plano" -#: src/window.ui:331 -#, fuzzy -msgid "Model" -msgstr "Parar Modelo" - -#: src/window.ui:341 +#: src/window.ui:333 #, fuzzy msgid "Temperature" msgstr "Funcionalidades" -#: src/window.ui:342 +#: src/window.ui:334 msgid "" "The temperature of the model. Increasing the temperature will make the model " "answer more creatively. (Default: 0.8)" msgstr "" -#: src/window.ui:357 +#: src/window.ui:349 msgid "Seed" msgstr "" -#: src/window.ui:358 +#: src/window.ui:350 msgid "" "Sets the random number seed to use for generation. Setting this to a " "specific number will make the model generate the same text for the same " "prompt. (Default: 0 (random))" msgstr "" -#: src/window.ui:372 +#: src/window.ui:364 msgid "Keep Alive Time" msgstr "" -#: src/window.ui:373 +#: src/window.ui:365 msgid "" "Controls how long the model will stay loaded into memory following the " "request in minutes (Default: 5)" msgstr "" -#: src/window.ui:389 +#: src/window.ui:381 #, fuzzy msgid "Ollama Instance" msgstr "Um cliente Ollama" -#: src/window.ui:393 +#: src/window.ui:385 #, fuzzy msgid "Ollama Overrides" msgstr "Site do Ollama" -#: src/window.ui:394 +#: src/window.ui:386 msgid "" "Manage the arguments used on Ollama, any changes on this page only applies " "to the integrated instance, the instance will restart if you make changes." msgstr "" -#: src/window.ui:477 +#: src/window.ui:469 msgid "Create" msgstr "Criar" -#: src/window.ui:490 src/window.ui:605 +#: src/window.ui:482 src/window.ui:597 #, fuzzy msgid "Create Model" msgstr "Criar" -#: src/window.ui:516 +#: src/window.ui:508 msgid "Base" msgstr "" -#: src/window.ui:534 +#: src/window.ui:526 msgid "Name" msgstr "" -#: src/window.ui:540 +#: src/window.ui:532 msgid "Context" msgstr "" -#: src/window.ui:555 +#: src/window.ui:547 #, fuzzy msgid "Template" msgstr "Funcionalidades" -#: src/window.ui:561 +#: src/window.ui:553 msgid "" "Some models require a specific template. Please visit the model's website " "for more information if you're unsure." msgstr "" -#: src/window.ui:612 +#: src/window.ui:604 #, fuzzy msgid "Search Model" msgstr "Funcionalidades" -#: src/window.ui:672 +#: src/window.ui:664 msgid "No Models Found" msgstr "" -#: src/window.ui:673 +#: src/window.ui:665 msgid "Try a different search" msgstr "" -#: src/window.ui:738 +#: src/window.ui:708 +msgid "" +"By downloading this model you accept the license agreement available on the " +"model's website." +msgstr "" + +#: src/window.ui:745 msgid "Open with Default App" msgstr "" -#: src/window.ui:790 +#: src/window.ui:797 msgid "Previous" msgstr "Anterior" -#: src/window.ui:833 +#: src/window.ui:840 msgid "Welcome to Alpaca" msgstr "Bem-vindo(a) a Alpaca" -#: src/window.ui:834 +#: src/window.ui:841 msgid "Powered by Ollama" msgstr "Com tecnologia Ollama" -#: src/window.ui:837 +#: src/window.ui:844 msgid "Ollama Website" msgstr "Site do Ollama" -#: src/window.ui:854 +#: src/window.ui:861 msgid "" "Alpaca and its developers are not liable for any damages to devices or " "software resulting from the execution of code generated by an AI model. " @@ -1063,108 +1064,112 @@ msgstr "" "por um modelo de IA. Por favor, tenha cuidado e revise o código com cuidado " "antes de executá-lo." -#: src/window.ui:865 +#: src/window.ui:872 #, fuzzy msgid "Featured Models" msgstr "Funcionalidades" -#: src/window.ui:866 +#: src/window.ui:873 msgid "" "Alpaca works locally on your device, to start chatting you'll need an AI " "model, you can either pull models from this list or the 'Manage Models' menu " "later." msgstr "" -#: src/window.ui:876 +#: src/window.ui:883 msgid "Built by Meta" msgstr "" -#: src/window.ui:894 +#: src/window.ui:901 msgid "Built by Google DeepMind" msgstr "" -#: src/window.ui:912 +#: src/window.ui:919 msgid "Built by Microsoft" msgstr "" -#: src/window.ui:930 +#: src/window.ui:937 msgid "Multimodal AI with image recognition" msgstr "" -#: src/window.ui:959 +#: src/window.ui:966 #, fuzzy msgid "Import Chat" msgstr "Importar conversa" -#: src/window.ui:969 +#: src/window.ui:976 msgid "Keyboard Shortcuts" msgstr "Atalhos de Teclado" -#: src/window.ui:973 +#: src/window.ui:980 msgid "About Alpaca" msgstr "Sobre Alpaca" -#: src/window.ui:980 src/window.ui:999 +#: src/window.ui:987 src/window.ui:1006 msgid "Rename Chat" msgstr "Renomear Conversa" -#: src/window.ui:984 src/window.ui:1003 +#: src/window.ui:991 src/window.ui:1010 #, fuzzy msgid "Export Chat" msgstr "Importar conversa" -#: src/window.ui:988 +#: src/window.ui:995 msgid "Clear Chat" msgstr "Limpar Conversa" -#: src/window.ui:995 +#: src/window.ui:1002 msgid "Delete Chat" msgstr "Excluir Conversa" -#: src/window.ui:1011 +#: src/window.ui:1018 msgid "From Existing Model" msgstr "" -#: src/window.ui:1015 +#: src/window.ui:1022 msgid "From GGUF File (Experimental)" msgstr "" -#: src/window.ui:1061 +#: src/window.ui:1068 msgid "Close application" msgstr "Fechar aplicativo" -#: src/window.ui:1067 +#: src/window.ui:1074 msgid "Import chat" msgstr "Importar conversa" -#: src/window.ui:1073 +#: src/window.ui:1080 msgid "Clear chat" msgstr "Limpar chat" -#: src/window.ui:1085 +#: src/window.ui:1092 msgid "New chat" msgstr "Nova conversa" -#: src/window.ui:1091 +#: src/window.ui:1098 msgid "Show shortcuts window" msgstr "Mostrar janela de atalhos" -#: src/window.ui:1098 +#: src/window.ui:1105 msgid "Editor" msgstr "Editor" -#: src/window.ui:1102 +#: src/window.ui:1109 msgid "Copy" msgstr "Copiar" -#: src/window.ui:1108 +#: src/window.ui:1115 msgid "Paste" msgstr "Colar" -#: src/window.ui:1114 +#: src/window.ui:1121 msgid "Insert new line" msgstr "" +#, fuzzy +#~ msgid "Model" +#~ msgstr "Parar Modelo" + #, fuzzy #~ msgid "Send message" #~ msgstr "Enviar Mensagem" diff --git a/po/ru.po b/po/ru.po index 38c9f40..f66f7db 100644 --- a/po/ru.po +++ b/po/ru.po @@ -7,7 +7,7 @@ msgid "" msgstr "" "Project-Id-Version: 1.0.0\n" "Report-Msgid-Bugs-To: \n" -"POT-Creation-Date: 2024-07-07 16:04-0600\n" +"POT-Creation-Date: 2024-07-07 17:40-0600\n" "PO-Revision-Date: 2024-05-25 10:44+0800\n" "Last-Translator: (YOUR NAME) <(EMAIL OPTIONAL)>\n" "Language-Team: Russian\n" @@ -1114,6 +1114,511 @@ msgstr "Вставить" msgid "Insert new line" msgstr "Вставить новую строку" +#: src/available_models_descriptions.py:2 +msgid "Google Gemma 2 is now available in 2 sizes, 9B and 27B." +msgstr "" + +#: src/available_models_descriptions.py:3 +msgid "Meta Llama 3: The most capable openly available LLM to date" +msgstr "" + +#: src/available_models_descriptions.py:4 +msgid "Qwen2 is a new series of large language models from Alibaba group" +msgstr "" + +#: src/available_models_descriptions.py:5 +msgid "" +"An open-source Mixture-of-Experts code language model that achieves " +"performance comparable to GPT4-Turbo in code-specific tasks." +msgstr "" + +#: src/available_models_descriptions.py:6 +msgid "" +"Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art " +"open models by Microsoft." +msgstr "" + +#: src/available_models_descriptions.py:7 +msgid "" +"Aya 23, released by Cohere, is a new family of state-of-the-art, " +"multilingual models that support 23 languages." +msgstr "" + +#: src/available_models_descriptions.py:8 +msgid "The 7B model released by Mistral AI, updated to version 0.3." +msgstr "" + +#: src/available_models_descriptions.py:9 +msgid "" +"A set of Mixture of Experts (MoE) model with open weights by Mistral AI in " +"8x7b and 8x22b parameter sizes." +msgstr "" + +#: src/available_models_descriptions.py:10 +msgid "" +"CodeGemma is a collection of powerful, lightweight models that can perform a " +"variety of coding tasks like fill-in-the-middle code completion, code " +"generation, natural language understanding, mathematical reasoning, and " +"instruction following." +msgstr "" + +#: src/available_models_descriptions.py:11 +msgid "" +"Command R is a Large Language Model optimized for conversational interaction " +"and long context tasks." +msgstr "" + +#: src/available_models_descriptions.py:12 +msgid "" +"Command R+ is a powerful, scalable large language model purpose-built to " +"excel at real-world enterprise use cases." +msgstr "" + +#: src/available_models_descriptions.py:13 +msgid "" +"🌋 LLaVA is a novel end-to-end trained large multimodal model that combines " +"a vision encoder and Vicuna for general-purpose visual and language " +"understanding. Updated to version 1.6." +msgstr "" + +#: src/available_models_descriptions.py:14 +msgid "" +"Gemma is a family of lightweight, state-of-the-art open models built by " +"Google DeepMind. Updated to version 1.1" +msgstr "" + +#: src/available_models_descriptions.py:15 +msgid "" +"Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from " +"0.5B to 110B parameters" +msgstr "" + +#: src/available_models_descriptions.py:16 +msgid "" +"Llama 2 is a collection of foundation language models ranging from 7B to 70B " +"parameters." +msgstr "" + +#: src/available_models_descriptions.py:17 +msgid "" +"A large language model that can use text prompts to generate and discuss " +"code." +msgstr "" + +#: src/available_models_descriptions.py:18 +msgid "" +"Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of " +"experts models that excels at coding tasks. Created by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:19 +msgid "Uncensored Llama 2 model by George Sung and Jarrad Hope." +msgstr "" + +#: src/available_models_descriptions.py:20 +msgid "" +"DeepSeek Coder is a capable coding model trained on two trillion code and " +"natural language tokens." +msgstr "" + +#: src/available_models_descriptions.py:21 +msgid "" +"A high-performing open embedding model with a large token context window." +msgstr "" + +#: src/available_models_descriptions.py:22 +msgid "" +"Phi-2: a 2.7B language model by Microsoft Research that demonstrates " +"outstanding reasoning and language understanding capabilities." +msgstr "" + +#: src/available_models_descriptions.py:23 +msgid "" +"The uncensored Dolphin model based on Mistral that excels at coding tasks. " +"Updated to version 2.8." +msgstr "" + +#: src/available_models_descriptions.py:24 +msgid "" +"Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the " +"Mistral 7B model using the OpenOrca dataset." +msgstr "" + +#: src/available_models_descriptions.py:25 +msgid "" +"A general-purpose model ranging from 3 billion parameters to 70 billion, " +"suitable for entry-level hardware." +msgstr "" + +#: src/available_models_descriptions.py:26 +msgid "State-of-the-art large embedding model from mixedbread.ai" +msgstr "" + +#: src/available_models_descriptions.py:27 +msgid "" +"Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on " +"Llama 3 that has a variety of instruction, conversational, and coding skills." +msgstr "" + +#: src/available_models_descriptions.py:28 +msgid "" +"StarCoder2 is the next generation of transparently trained open code LLMs " +"that comes in three sizes: 3B, 7B and 15B parameters." +msgstr "" + +#: src/available_models_descriptions.py:29 +msgid "Llama 2 based model fine tuned to improve Chinese dialogue ability." +msgstr "" + +#: src/available_models_descriptions.py:30 +msgid "" +"Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models " +"that are trained to act as helpful assistants." +msgstr "" + +#: src/available_models_descriptions.py:31 +msgid "Yi 1.5 is a high-performing, bilingual language model." +msgstr "" + +#: src/available_models_descriptions.py:32 +msgid "" +"The powerful family of models by Nous Research that excels at scientific " +"discussion and coding tasks." +msgstr "" + +#: src/available_models_descriptions.py:33 +msgid "" +"General use chat model based on Llama and Llama 2 with 2K to 16K context " +"sizes." +msgstr "" + +#: src/available_models_descriptions.py:34 +msgid "" +"Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on " +"Llama 2 uncensored by Eric Hartford." +msgstr "" + +#: src/available_models_descriptions.py:35 +msgid "" +"The TinyLlama project is an open endeavor to train a compact 1.1B Llama " +"model on 3 trillion tokens." +msgstr "" + +#: src/available_models_descriptions.py:36 +msgid "" +"State of the art large language model from Microsoft AI with improved " +"performance on complex chat, multilingual, reasoning and agent use cases." +msgstr "" + +#: src/available_models_descriptions.py:37 +msgid "" +"StarCoder is a code generation model trained on 80+ programming languages." +msgstr "" + +#: src/available_models_descriptions.py:38 +msgid "" +"Codestral is Mistral AI’s first-ever code model designed for code generation " +"tasks." +msgstr "" + +#: src/available_models_descriptions.py:39 +msgid "" +"A family of open-source models trained on a wide variety of data, surpassing " +"ChatGPT on various benchmarks. Updated to version 3.5-0106." +msgstr "" + +#: src/available_models_descriptions.py:40 +msgid "" +"An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset " +"by Eric Hartford and based on TinyLlama." +msgstr "" + +#: src/available_models_descriptions.py:41 +msgid "" +"OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully " +"open datasets." +msgstr "" + +#: src/available_models_descriptions.py:42 +msgid "State-of-the-art code generation model" +msgstr "" + +#: src/available_models_descriptions.py:43 +msgid "" +"Stable Code 3B is a coding model with instruct and code completion variants " +"on par with models such as Code Llama 7B that are 2.5x larger." +msgstr "" + +#: src/available_models_descriptions.py:44 +msgid "" +"A fine-tuned model based on Mistral with good coverage of domain and " +"language." +msgstr "" + +#: src/available_models_descriptions.py:45 +msgid "Model focused on math and logic problems" +msgstr "" + +#: src/available_models_descriptions.py:46 +msgid "" +"CodeQwen1.5 is a large language model pretrained on a large amount of code " +"data." +msgstr "" + +#: src/available_models_descriptions.py:47 +msgid "Code generation model based on Code Llama." +msgstr "" + +#: src/available_models_descriptions.py:48 +msgid "" +"Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model " +"trained on multilingual data in English, Spanish, German, Italian, French, " +"Portuguese, and Dutch." +msgstr "" + +#: src/available_models_descriptions.py:49 +msgid "" +"A 7B and 15B uncensored variant of the Dolphin model family that excels at " +"coding, based on StarCoder2." +msgstr "" + +#: src/available_models_descriptions.py:50 +msgid "Embedding models on very large sentence level datasets." +msgstr "" + +#: src/available_models_descriptions.py:51 +msgid "General use models based on Llama and Llama 2 from Nous Research." +msgstr "" + +#: src/available_models_descriptions.py:52 +msgid "" +"Starling is a large language model trained by reinforcement learning from AI " +"feedback focused on improving chatbot helpfulness." +msgstr "" + +#: src/available_models_descriptions.py:53 +msgid "" +"SQLCoder is a code completion model fined-tuned on StarCoder for SQL " +"generation tasks" +msgstr "" + +#: src/available_models_descriptions.py:54 +msgid "" +"Orca 2 is built by Microsoft research, and are a fine-tuned version of " +"Meta's Llama 2 models. The model is designed to excel particularly in " +"reasoning." +msgstr "" + +#: src/available_models_descriptions.py:55 +msgid "" +"This model extends LLama-3 8B's context length from 8k to over 1m tokens." +msgstr "" + +#: src/available_models_descriptions.py:56 +msgid "An advanced language model crafted with 2 trillion bilingual tokens." +msgstr "" + +#: src/available_models_descriptions.py:57 +msgid "An extension of Llama 2 that supports a context of up to 128k tokens." +msgstr "" + +#: src/available_models_descriptions.py:58 +msgid "" +"A model from NVIDIA based on Llama 3 that excels at conversational question " +"answering (QA) and retrieval-augmented generation (RAG)." +msgstr "" + +#: src/available_models_descriptions.py:59 +msgid "" +"A compact, yet powerful 10.7B large language model designed for single-turn " +"conversation." +msgstr "" + +#: src/available_models_descriptions.py:60 +msgid "" +"Conversational model based on Llama 2 that performs competitively on various " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:61 +msgid "A family of open foundation models by IBM for Code Intelligence" +msgstr "" + +#: src/available_models_descriptions.py:62 +msgid "" +"2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language " +"model by Microsoft Research." +msgstr "" + +#: src/available_models_descriptions.py:63 +msgid "General use model based on Llama 2." +msgstr "" + +#: src/available_models_descriptions.py:64 +msgid "" +"A companion assistant trained in philosophy, psychology, and personal " +"relationships. Based on Mistral." +msgstr "" + +#: src/available_models_descriptions.py:65 +msgid "" +"Llama 2 based model fine tuned on an Orca-style dataset. Originally called " +"Free Willy." +msgstr "" + +#: src/available_models_descriptions.py:66 +msgid "" +"BakLLaVA is a multimodal model consisting of the Mistral 7B base model " +"augmented with the LLaVA architecture." +msgstr "" + +#: src/available_models_descriptions.py:67 +msgid "" +"A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several " +"benchmarks." +msgstr "" + +#: src/available_models_descriptions.py:68 +msgid "Uncensored version of Wizard LM model" +msgstr "" + +#: src/available_models_descriptions.py:69 +msgid "" +"Fine-tuned Llama 2 model to answer medical questions based on an open source " +"medical dataset." +msgstr "" + +#: src/available_models_descriptions.py:70 +msgid "The Nous Hermes 2 model from Nous Research, now trained over Mixtral." +msgstr "" + +#: src/available_models_descriptions.py:71 +msgid "An extension of Mistral to support context windows of 64K or 128K." +msgstr "" + +#: src/available_models_descriptions.py:72 +msgid "" +"A suite of text embedding models by Snowflake, optimized for performance." +msgstr "" + +#: src/available_models_descriptions.py:73 +msgid "" +"An expansion of Llama 2 that specializes in integrating both general " +"language understanding and domain-specific knowledge, particularly in " +"programming and mathematics." +msgstr "" + +#: src/available_models_descriptions.py:74 +msgid "Great code generation model based on Llama2." +msgstr "" + +#: src/available_models_descriptions.py:75 +msgid "" +"Open-source medical large language model adapted from Llama 2 to the medical " +"domain." +msgstr "" + +#: src/available_models_descriptions.py:76 +msgid "" +"moondream2 is a small vision language model designed to run efficiently on " +"edge devices." +msgstr "" + +#: src/available_models_descriptions.py:77 +msgid "Uncensored Llama2 based model with support for a 16K context window." +msgstr "" + +#: src/available_models_descriptions.py:78 +msgid "" +"Nexus Raven is a 13B instruction tuned model for function calling tasks." +msgstr "" + +#: src/available_models_descriptions.py:79 +msgid "" +"🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic " +"instruction data using OSS-Instruct, a novel approach to enlightening LLMs " +"with open-source code snippets." +msgstr "" + +#: src/available_models_descriptions.py:80 +msgid "A strong, economical, and efficient Mixture-of-Experts language model." +msgstr "" + +#: src/available_models_descriptions.py:81 +msgid "" +"A lightweight chat model allowing accurate, and responsive output without " +"requiring high-end hardware." +msgstr "" + +#: src/available_models_descriptions.py:82 +msgid "" +"A high-performing code instruct model created by merging two existing code " +"models." +msgstr "" + +#: src/available_models_descriptions.py:83 +msgid "A new small LLaVA model fine-tuned from Phi 3 Mini." +msgstr "" + +#: src/available_models_descriptions.py:84 +msgid "" +"MistralLite is a fine-tuned model based on Mistral with enhanced " +"capabilities of processing long contexts." +msgstr "" + +#: src/available_models_descriptions.py:85 +msgid "" +"Wizard Vicuna is a 13B parameter model based on Llama 2 trained by " +"MelodysDreamj." +msgstr "" + +#: src/available_models_descriptions.py:86 +msgid "7B parameter text-to-SQL model made by MotherDuck and Numbers Station." +msgstr "" + +#: src/available_models_descriptions.py:87 +msgid "" +"A language model created by combining two fine-tuned Llama 2 70B models into " +"one." +msgstr "" + +#: src/available_models_descriptions.py:88 +msgid "" +"MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by " +"interleaving the model with itself." +msgstr "" + +#: src/available_models_descriptions.py:89 +msgid "" +"Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. " +"Designed for chat and code generation." +msgstr "" + +#: src/available_models_descriptions.py:90 +msgid "" +"A top-performing mixture of experts model, fine-tuned with high-quality data." +msgstr "" + +#: src/available_models_descriptions.py:91 +msgid "A 7B chat model fine-tuned with high-quality data and based on Zephyr." +msgstr "" + +#: src/available_models_descriptions.py:92 +msgid "DBRX is an open, general-purpose LLM created by Databricks." +msgstr "" + +#: src/available_models_descriptions.py:93 +msgid "" +"Falcon2 is an 11B parameters causal decoder-only model built by TII and " +"trained over 5T tokens." +msgstr "" + +#: src/available_models_descriptions.py:94 +msgid "" +"A robust conversational model designed to be used for both chat and instruct " +"use cases." +msgstr "" + #, fuzzy #~ msgid "Model" #~ msgstr "Остановить Модель" diff --git a/po/ru.po~ b/po/ru.po~ index 79f7a52..38c9f40 100644 --- a/po/ru.po~ +++ b/po/ru.po~ @@ -6,8 +6,8 @@ msgid "" msgstr "" "Project-Id-Version: 1.0.0\n" -"Report-Msgid-Bugs-To: https://github.com/Jeffser/Alpaca\n" -"POT-Creation-Date: 2024-07-02 18:21-0600\n" +"Report-Msgid-Bugs-To: \n" +"POT-Creation-Date: 2024-07-07 16:04-0600\n" "PO-Revision-Date: 2024-05-25 10:44+0800\n" "Last-Translator: (YOUR NAME) <(EMAIL OPTIONAL)>\n" "Language-Team: Russian\n" @@ -73,7 +73,7 @@ msgstr "" msgid "Import and export chats" msgstr "Импорт чата" -#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:853 +#: data/com.jeffser.Alpaca.metainfo.xml.in:21 src/window.ui:860 msgid "Disclaimer" msgstr "Отказ от ответственности" @@ -665,7 +665,7 @@ msgstr "" msgid "This is the first public version of Alpaca" msgstr "" -#: src/window.py:58 src/window.py:1053 src/window.py:1115 src/window.ui:41 +#: src/window.py:58 src/window.py:1083 src/window.py:1145 src/window.ui:41 msgid "New Chat" msgstr "Новый Чат" @@ -760,72 +760,72 @@ msgstr "Чат успешно экспортирован" msgid "Chat imported successfully" msgstr "Чат успешно импортирован" -#: src/window.py:278 +#: src/window.py:280 msgid "Close" msgstr "" -#: src/window.py:279 src/window.ui:806 +#: src/window.py:281 src/window.ui:813 msgid "Next" msgstr "Следующий" -#: src/window.py:329 +#: src/window.py:332 #, fuzzy msgid "Pulling in the background..." msgstr "Запуск в фоновом режиме" -#: src/window.py:381 +#: src/window.py:384 msgid "Stop Creating '{}'" msgstr "" -#: src/window.py:418 +#: src/window.py:421 #, fuzzy msgid "image" msgstr "Изображение" -#: src/window.py:588 +#: src/window.py:593 #, fuzzy msgid "Remove Message" msgstr "Удалить Изображение" -#: src/window.py:593 src/window.py:841 +#: src/window.py:598 src/window.py:869 #, fuzzy msgid "Copy Message" msgstr "Отправить Сообщение" -#: src/window.py:598 +#: src/window.py:603 #, fuzzy msgid "Edit Message" msgstr "Отправить Сообщение" -#: src/window.py:729 +#: src/window.py:661 +msgid "Missing Image" +msgstr "" + +#: src/window.py:677 +msgid "Missing image" +msgstr "" + +#: src/window.py:757 msgid "Remove '{} ({})'" msgstr "" -#: src/window.py:882 -msgid "Message Received" -msgstr "" - -#: src/window.py:882 -msgid "New message from '{}'" -msgstr "" - -#: src/window.py:939 +#: src/window.py:969 msgid "Task Complete" msgstr "Задача выполнена" -#: src/window.py:939 +#: src/window.py:969 msgid "Model '{}' pulled successfully." msgstr "Модель '{}' успешно извлечена." -#: src/window.py:944 +#: src/window.py:974 msgid "Pull Model Error" msgstr "Ошибка Извлечения Модели" -#: src/window.py:944 +#: src/window.py:974 msgid "Failed to pull model '{}' due to network error." msgstr "Не удалось извлечь модель '{}' из-за сетевой ошибки." -#: src/window.py:978 +#: src/window.py:1008 msgid "Stop Pulling '{} ({})'" msgstr "" @@ -837,7 +837,7 @@ msgstr "Меню" msgid "Toggle Sidebar" msgstr "Переключение боковой панели" -#: src/window.ui:107 src/window.ui:595 +#: src/window.ui:107 src/window.ui:587 #, fuzzy msgid "Manage Models" msgstr "Управление моделями" @@ -851,29 +851,29 @@ msgstr "Меню" msgid "Attach File" msgstr "" -#: src/window.ui:238 src/window.ui:1120 +#: src/window.ui:242 src/window.ui:1127 msgid "Send Message" msgstr "Отправить Сообщение" -#: src/window.ui:286 src/window.ui:965 src/window.ui:1079 +#: src/window.ui:290 src/window.ui:972 src/window.ui:1086 msgid "Preferences" msgstr "Настройки" -#: src/window.ui:289 src/window.ui:1057 +#: src/window.ui:293 src/window.ui:1064 msgid "General" msgstr "Общие" -#: src/window.ui:297 +#: src/window.ui:299 #, fuzzy msgid "Use Remote Connection to Ollama" msgstr "Управление удаленным подключением к Ollama" -#: src/window.ui:303 +#: src/window.ui:305 #, fuzzy msgid "URL of Remote Instance" msgstr "URL-адрес удаленного экземпляра" -#: src/window.ui:310 +#: src/window.ui:312 msgid "Bearer Token (Optional)" msgstr "" @@ -882,124 +882,125 @@ msgstr "" msgid "Run Alpaca In Background" msgstr "Запуск в фоновом режиме" -#: src/window.ui:331 -#, fuzzy -msgid "Model" -msgstr "Остановить Модель" - -#: src/window.ui:341 +#: src/window.ui:333 #, fuzzy msgid "Temperature" msgstr "Новый Чат" -#: src/window.ui:342 +#: src/window.ui:334 msgid "" "The temperature of the model. Increasing the temperature will make the model " "answer more creatively. (Default: 0.8)" msgstr "" -#: src/window.ui:357 +#: src/window.ui:349 msgid "Seed" msgstr "" -#: src/window.ui:358 +#: src/window.ui:350 msgid "" "Sets the random number seed to use for generation. Setting this to a " "specific number will make the model generate the same text for the same " "prompt. (Default: 0 (random))" msgstr "" -#: src/window.ui:372 +#: src/window.ui:364 msgid "Keep Alive Time" msgstr "" -#: src/window.ui:373 +#: src/window.ui:365 msgid "" "Controls how long the model will stay loaded into memory following the " "request in minutes (Default: 5)" msgstr "" -#: src/window.ui:389 +#: src/window.ui:381 #, fuzzy msgid "Ollama Instance" msgstr "Веб-сайт Ollama" -#: src/window.ui:393 +#: src/window.ui:385 #, fuzzy msgid "Ollama Overrides" msgstr "Веб-сайт Ollama" -#: src/window.ui:394 +#: src/window.ui:386 msgid "" "Manage the arguments used on Ollama, any changes on this page only applies " "to the integrated instance, the instance will restart if you make changes." msgstr "" -#: src/window.ui:477 +#: src/window.ui:469 msgid "Create" msgstr "Создать" -#: src/window.ui:490 src/window.ui:605 +#: src/window.ui:482 src/window.ui:597 #, fuzzy msgid "Create Model" msgstr "Удалить Модель" -#: src/window.ui:516 +#: src/window.ui:508 msgid "Base" msgstr "" -#: src/window.ui:534 +#: src/window.ui:526 msgid "Name" msgstr "" -#: src/window.ui:540 +#: src/window.ui:532 msgid "Context" msgstr "" -#: src/window.ui:555 +#: src/window.ui:547 msgid "Template" msgstr "" -#: src/window.ui:561 +#: src/window.ui:553 msgid "" "Some models require a specific template. Please visit the model's website " "for more information if you're unsure." msgstr "" -#: src/window.ui:612 +#: src/window.ui:604 #, fuzzy msgid "Search Model" msgstr "Остановить Модель" -#: src/window.ui:672 +#: src/window.ui:664 msgid "No Models Found" msgstr "" -#: src/window.ui:673 +#: src/window.ui:665 msgid "Try a different search" msgstr "" -#: src/window.ui:738 +#: src/window.ui:708 +msgid "" +"By downloading this model you accept the license agreement available on the " +"model's website." +msgstr "" + +#: src/window.ui:745 msgid "Open with Default App" msgstr "" -#: src/window.ui:790 +#: src/window.ui:797 msgid "Previous" msgstr "Предыдущий" -#: src/window.ui:833 +#: src/window.ui:840 msgid "Welcome to Alpaca" msgstr "Добро пожаловать в Alpaca" -#: src/window.ui:834 +#: src/window.ui:841 msgid "Powered by Ollama" msgstr "При поддержке Ollama" -#: src/window.ui:837 +#: src/window.ui:844 msgid "Ollama Website" msgstr "Веб-сайт Ollama" -#: src/window.ui:854 +#: src/window.ui:861 msgid "" "Alpaca and its developers are not liable for any damages to devices or " "software resulting from the execution of code generated by an AI model. " @@ -1011,108 +1012,112 @@ msgstr "" "Пожалуйста, будьте осторожны и внимательно ознакомьтесь с кодом перед его " "запуском." -#: src/window.ui:865 +#: src/window.ui:872 #, fuzzy msgid "Featured Models" msgstr "Удалить Модель" -#: src/window.ui:866 +#: src/window.ui:873 msgid "" "Alpaca works locally on your device, to start chatting you'll need an AI " "model, you can either pull models from this list or the 'Manage Models' menu " "later." msgstr "" -#: src/window.ui:876 +#: src/window.ui:883 msgid "Built by Meta" msgstr "" -#: src/window.ui:894 +#: src/window.ui:901 msgid "Built by Google DeepMind" msgstr "" -#: src/window.ui:912 +#: src/window.ui:919 msgid "Built by Microsoft" msgstr "" -#: src/window.ui:930 +#: src/window.ui:937 msgid "Multimodal AI with image recognition" msgstr "" -#: src/window.ui:959 +#: src/window.ui:966 #, fuzzy msgid "Import Chat" msgstr "Импорт чата" -#: src/window.ui:969 +#: src/window.ui:976 msgid "Keyboard Shortcuts" msgstr "Комбинации Клавиш" -#: src/window.ui:973 +#: src/window.ui:980 msgid "About Alpaca" msgstr "О Программе" -#: src/window.ui:980 src/window.ui:999 +#: src/window.ui:987 src/window.ui:1006 msgid "Rename Chat" msgstr "Переименовать Чат" -#: src/window.ui:984 src/window.ui:1003 +#: src/window.ui:991 src/window.ui:1010 #, fuzzy msgid "Export Chat" msgstr "Экспорт чата" -#: src/window.ui:988 +#: src/window.ui:995 msgid "Clear Chat" msgstr "Очистить Чат" -#: src/window.ui:995 +#: src/window.ui:1002 msgid "Delete Chat" msgstr "Удалить Чат" -#: src/window.ui:1011 +#: src/window.ui:1018 msgid "From Existing Model" msgstr "" -#: src/window.ui:1015 +#: src/window.ui:1022 msgid "From GGUF File (Experimental)" msgstr "" -#: src/window.ui:1061 +#: src/window.ui:1068 msgid "Close application" msgstr "Закрыть приложение" -#: src/window.ui:1067 +#: src/window.ui:1074 msgid "Import chat" msgstr "Импорт чата" -#: src/window.ui:1073 +#: src/window.ui:1080 msgid "Clear chat" msgstr "Очистить чат" -#: src/window.ui:1085 +#: src/window.ui:1092 msgid "New chat" msgstr "Новый чат" -#: src/window.ui:1091 +#: src/window.ui:1098 msgid "Show shortcuts window" msgstr "Показать окно комбинаций клавиш" -#: src/window.ui:1098 +#: src/window.ui:1105 msgid "Editor" msgstr "Редактор" -#: src/window.ui:1102 +#: src/window.ui:1109 msgid "Copy" msgstr "Копировать" -#: src/window.ui:1108 +#: src/window.ui:1115 msgid "Paste" msgstr "Вставить" -#: src/window.ui:1114 +#: src/window.ui:1121 msgid "Insert new line" msgstr "Вставить новую строку" +#, fuzzy +#~ msgid "Model" +#~ msgstr "Остановить Модель" + #~ msgctxt "shortcut window" #~ msgid "General" #~ msgstr "Общие" diff --git a/src/available_models_descriptions.py b/src/available_models_descriptions.py new file mode 100644 index 0000000..4652a63 --- /dev/null +++ b/src/available_models_descriptions.py @@ -0,0 +1,95 @@ +descriptions = { + 'gemma2': _("Google Gemma 2 is now available in 2 sizes, 9B and 27B."), + 'llama3': _("Meta Llama 3: The most capable openly available LLM to date"), + 'qwen2': _("Qwen2 is a new series of large language models from Alibaba group"), + 'deepseek-coder-v2': _("An open-source Mixture-of-Experts code language model that achieves performance comparable to GPT4-Turbo in code-specific tasks."), + 'phi3': _("Phi-3 is a family of lightweight 3B (Mini) and 14B (Medium) state-of-the-art open models by Microsoft."), + 'aya': _("Aya 23, released by Cohere, is a new family of state-of-the-art, multilingual models that support 23 languages."), + 'mistral': _("The 7B model released by Mistral AI, updated to version 0.3."), + 'mixtral': _("A set of Mixture of Experts (MoE) model with open weights by Mistral AI in 8x7b and 8x22b parameter sizes."), + 'codegemma': _("CodeGemma is a collection of powerful, lightweight models that can perform a variety of coding tasks like fill-in-the-middle code completion, code generation, natural language understanding, mathematical reasoning, and instruction following."), + 'command-r': _("Command R is a Large Language Model optimized for conversational interaction and long context tasks."), + 'command-r-plus': _("Command R+ is a powerful, scalable large language model purpose-built to excel at real-world enterprise use cases."), + 'llava': _("🌋 LLaVA is a novel end-to-end trained large multimodal model that combines a vision encoder and Vicuna for general-purpose visual and language understanding. Updated to version 1.6."), + 'gemma': _("Gemma is a family of lightweight, state-of-the-art open models built by Google DeepMind. Updated to version 1.1"), + 'qwen': _("Qwen 1.5 is a series of large language models by Alibaba Cloud spanning from 0.5B to 110B parameters"), + 'llama2': _("Llama 2 is a collection of foundation language models ranging from 7B to 70B parameters."), + 'codellama': _("A large language model that can use text prompts to generate and discuss code."), + 'dolphin-mixtral': _("Uncensored, 8x7b and 8x22b fine-tuned models based on the Mixtral mixture of experts models that excels at coding tasks. Created by Eric Hartford."), + 'llama2-uncensored': _("Uncensored Llama 2 model by George Sung and Jarrad Hope."), + 'deepseek-coder': _("DeepSeek Coder is a capable coding model trained on two trillion code and natural language tokens."), + 'nomic-embed-text': _("A high-performing open embedding model with a large token context window."), + 'phi': _("Phi-2: a 2.7B language model by Microsoft Research that demonstrates outstanding reasoning and language understanding capabilities."), + 'dolphin-mistral': _("The uncensored Dolphin model based on Mistral that excels at coding tasks. Updated to version 2.8."), + 'mistral-openorca': _("Mistral OpenOrca is a 7 billion parameter model, fine-tuned on top of the Mistral 7B model using the OpenOrca dataset."), + 'orca-mini': _("A general-purpose model ranging from 3 billion parameters to 70 billion, suitable for entry-level hardware."), + 'mxbai-embed-large': _("State-of-the-art large embedding model from mixedbread.ai"), + 'dolphin-llama3': _("Dolphin 2.9 is a new model with 8B and 70B sizes by Eric Hartford based on Llama 3 that has a variety of instruction, conversational, and coding skills."), + 'starcoder2': _("StarCoder2 is the next generation of transparently trained open code LLMs that comes in three sizes: 3B, 7B and 15B parameters."), + 'llama2-chinese': _("Llama 2 based model fine tuned to improve Chinese dialogue ability."), + 'zephyr': _("Zephyr is a series of fine-tuned versions of the Mistral and Mixtral models that are trained to act as helpful assistants."), + 'yi': _("Yi 1.5 is a high-performing, bilingual language model."), + 'nous-hermes2': _("The powerful family of models by Nous Research that excels at scientific discussion and coding tasks."), + 'vicuna': _("General use chat model based on Llama and Llama 2 with 2K to 16K context sizes."), + 'wizard-vicuna-uncensored': _("Wizard Vicuna Uncensored is a 7B, 13B, and 30B parameter model based on Llama 2 uncensored by Eric Hartford."), + 'tinyllama': _("The TinyLlama project is an open endeavor to train a compact 1.1B Llama model on 3 trillion tokens."), + 'wizardlm2': _("State of the art large language model from Microsoft AI with improved performance on complex chat, multilingual, reasoning and agent use cases."), + 'starcoder': _("StarCoder is a code generation model trained on 80+ programming languages."), + 'codestral': _("Codestral is Mistral AI’s first-ever code model designed for code generation tasks."), + 'openchat': _("A family of open-source models trained on a wide variety of data, surpassing ChatGPT on various benchmarks. Updated to version 3.5-0106."), + 'tinydolphin': _("An experimental 1.1B parameter model trained on the new Dolphin 2.8 dataset by Eric Hartford and based on TinyLlama."), + 'openhermes': _("OpenHermes 2.5 is a 7B model fine-tuned by Teknium on Mistral with fully open datasets."), + 'wizardcoder': _("State-of-the-art code generation model"), + 'stable-code': _("Stable Code 3B is a coding model with instruct and code completion variants on par with models such as Code Llama 7B that are 2.5x larger."), + 'neural-chat': _("A fine-tuned model based on Mistral with good coverage of domain and language."), + 'wizard-math': _("Model focused on math and logic problems"), + 'codeqwen': _("CodeQwen1.5 is a large language model pretrained on a large amount of code data."), + 'phind-codellama': _("Code generation model based on Code Llama."), + 'stablelm2': _("Stable LM 2 is a state-of-the-art 1.6B and 12B parameter language model trained on multilingual data in English, Spanish, German, Italian, French, Portuguese, and Dutch."), + 'dolphincoder': _("A 7B and 15B uncensored variant of the Dolphin model family that excels at coding, based on StarCoder2."), + 'all-minilm': _("Embedding models on very large sentence level datasets."), + 'nous-hermes': _("General use models based on Llama and Llama 2 from Nous Research."), + 'starling-lm': _("Starling is a large language model trained by reinforcement learning from AI feedback focused on improving chatbot helpfulness."), + 'sqlcoder': _("SQLCoder is a code completion model fined-tuned on StarCoder for SQL generation tasks"), + 'orca2': _("Orca 2 is built by Microsoft research, and are a fine-tuned version of Meta's Llama 2 models. The model is designed to excel particularly in reasoning."), + 'llama3-gradient': _("This model extends LLama-3 8B's context length from 8k to over 1m tokens."), + 'deepseek-llm': _("An advanced language model crafted with 2 trillion bilingual tokens."), + 'yarn-llama2': _("An extension of Llama 2 that supports a context of up to 128k tokens."), + 'llama3-chatqa': _("A model from NVIDIA based on Llama 3 that excels at conversational question answering (QA) and retrieval-augmented generation (RAG)."), + 'solar': _("A compact, yet powerful 10.7B large language model designed for single-turn conversation."), + 'xwinlm': _("Conversational model based on Llama 2 that performs competitively on various benchmarks."), + 'granite-code': _("A family of open foundation models by IBM for Code Intelligence"), + 'dolphin-phi': _("2.7B uncensored Dolphin model by Eric Hartford, based on the Phi language model by Microsoft Research."), + 'wizardlm': _("General use model based on Llama 2."), + 'samantha-mistral': _("A companion assistant trained in philosophy, psychology, and personal relationships. Based on Mistral."), + 'stable-beluga': _("Llama 2 based model fine tuned on an Orca-style dataset. Originally called Free Willy."), + 'bakllava': _("BakLLaVA is a multimodal model consisting of the Mistral 7B base model augmented with the LLaVA architecture."), + 'llava-llama3': _("A LLaVA model fine-tuned from Llama 3 Instruct with better scores in several benchmarks."), + 'wizardlm-uncensored': _("Uncensored version of Wizard LM model"), + 'medllama2': _("Fine-tuned Llama 2 model to answer medical questions based on an open source medical dataset."), + 'nous-hermes2-mixtral': _("The Nous Hermes 2 model from Nous Research, now trained over Mixtral."), + 'yarn-mistral': _("An extension of Mistral to support context windows of 64K or 128K."), + 'snowflake-arctic-embed': _("A suite of text embedding models by Snowflake, optimized for performance."), + 'llama-pro': _("An expansion of Llama 2 that specializes in integrating both general language understanding and domain-specific knowledge, particularly in programming and mathematics."), + 'codeup': _("Great code generation model based on Llama2."), + 'meditron': _("Open-source medical large language model adapted from Llama 2 to the medical domain."), + 'moondream': _("moondream2 is a small vision language model designed to run efficiently on edge devices."), + 'everythinglm': _("Uncensored Llama2 based model with support for a 16K context window."), + 'nexusraven': _("Nexus Raven is a 13B instruction tuned model for function calling tasks."), + 'magicoder': _("🎩 Magicoder is a family of 7B parameter models trained on 75K synthetic instruction data using OSS-Instruct, a novel approach to enlightening LLMs with open-source code snippets."), + 'deepseek-v2': _("A strong, economical, and efficient Mixture-of-Experts language model."), + 'stablelm-zephyr': _("A lightweight chat model allowing accurate, and responsive output without requiring high-end hardware."), + 'codebooga': _("A high-performing code instruct model created by merging two existing code models."), + 'llava-phi3': _("A new small LLaVA model fine-tuned from Phi 3 Mini."), + 'mistrallite': _("MistralLite is a fine-tuned model based on Mistral with enhanced capabilities of processing long contexts."), + 'wizard-vicuna': _("Wizard Vicuna is a 13B parameter model based on Llama 2 trained by MelodysDreamj."), + 'duckdb-nsql': _("7B parameter text-to-SQL model made by MotherDuck and Numbers Station."), + 'goliath': _("A language model created by combining two fine-tuned Llama 2 70B models into one."), + 'megadolphin': _("MegaDolphin-2.2-120b is a transformation of Dolphin-2.2-70b created by interleaving the model with itself."), + 'open-orca-platypus2': _("Merge of the Open Orca OpenChat model and the Garage-bAInd Platypus 2 model. Designed for chat and code generation."), + 'notux': _("A top-performing mixture of experts model, fine-tuned with high-quality data."), + 'notus': _("A 7B chat model fine-tuned with high-quality data and based on Zephyr."), + 'dbrx': _("DBRX is an open, general-purpose LLM created by Databricks."), + 'falcon2': _("Falcon2 is an 11B parameters causal decoder-only model built by TII and trained over 5T tokens."), + 'alfred': _("A robust conversational model designed to be used for both chat and instruct use cases."), +} \ No newline at end of file diff --git a/src/meson.build b/src/meson.build index 8fc3649..75adf37 100644 --- a/src/meson.build +++ b/src/meson.build @@ -43,7 +43,8 @@ alpaca_sources = [ 'dialogs.py', 'local_instance.py', 'update_history.py', - 'available_models.json' + 'available_models.json', + 'available_models_descriptions.py' ] install_data(alpaca_sources, install_dir: moduledir) diff --git a/src/window.py b/src/window.py index 93d3610..8f1bf1c 100644 --- a/src/window.py +++ b/src/window.py @@ -27,7 +27,7 @@ from io import BytesIO from PIL import Image from pypdf import PdfReader from datetime import datetime -from . import dialogs, local_instance, connection_handler, update_history +from . import dialogs, local_instance, connection_handler, update_history, available_models_descriptions @Gtk.Template(resource_path='/com/jeffser/Alpaca/window.ui') class AlpacaWindow(Adw.ApplicationWindow): @@ -1045,7 +1045,7 @@ Generate a title following these rules: for name, model_info in self.available_models.items(): model = Adw.ActionRow( title = "{}{} by {}".format('🖼 ' if model_info["image"] else '', name.replace("-", " ").title(), model_info['author']), - subtitle = model_info["description"], # + ("\n\n Image Recognition " if model_info["image"] else ""), + subtitle = available_models_descriptions.descriptions[name], # + ("\n\n Image Recognition " if model_info["image"] else ""), #("Image recognition capable\n" if model_info["image"] else "") + #title = f"{name.capitalize()} by {model_info['author']}", #subtitle = f"" + (_("(Image recognition capable)\n") if model_info["image"] else "") + f"{model_info['description']}", diff --git a/update_available_models_description.py b/update_available_models_description.py new file mode 100644 index 0000000..87cb456 --- /dev/null +++ b/update_available_models_description.py @@ -0,0 +1,9 @@ +import json +with open('src/available_models.json', 'r') as f: + data = json.load(f) +results = 'descriptions = {\n' +for key, value in data.items(): + results += f" '{key}': _(\"{value['description']}\"),\n" +results += '}' +with open('src/available_models_descriptions.py', 'w+') as f: + f.write(results) \ No newline at end of file