From 61f9e187bde1fca821870fd30801470c5aea979f Mon Sep 17 00:00:00 2001 From: jeffser Date: Mon, 14 Oct 2024 16:28:13 -0600 Subject: [PATCH] Added link to AMD Support label --- src/connection_handler.py | 2 +- src/window.ui | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/connection_handler.py b/src/connection_handler.py index 07cb459..db2b071 100644 --- a/src/connection_handler.py +++ b/src/connection_handler.py @@ -23,7 +23,7 @@ def log_output(pipe): window.show_toast(_("Model request too large for system"), window.main_overlay) elif 'msg="amdgpu detected, but no compatible rocm library found.' in line: if bool(os.getenv("FLATPAK_ID")): - window.ollama_information_label.set_label(_("AMD GPU detected but the extension is missing, Ollama will use CPU")) + window.ollama_information_label.set_label(_("AMD GPU detected but the extension is missing, Ollama will use CPU")) else: window.ollama_information_label.set_label(_("AMD GPU detected but ROCm is missing, Ollama will use CPU")) window.ollama_information_label.set_css_classes(['dim-label', 'error']) diff --git a/src/window.ui b/src/window.ui index dce099d..11fdeca 100644 --- a/src/window.ui +++ b/src/window.ui @@ -475,6 +475,7 @@ true + true Integrated Ollama instance is not running