From 27126736a43a337fbfbb5de8558d59d821d55e13 Mon Sep 17 00:00:00 2001 From: jeffser Date: Mon, 14 Oct 2024 15:59:05 -0600 Subject: [PATCH] Fixed reconnection dialog not selecting 'use local instance' --- src/connection_handler.py | 5 ++++- src/window.py | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/connection_handler.py b/src/connection_handler.py index 5dbae1d..07cb459 100644 --- a/src/connection_handler.py +++ b/src/connection_handler.py @@ -22,7 +22,10 @@ def log_output(pipe): if 'msg="model request too large for system"' in line: window.show_toast(_("Model request too large for system"), window.main_overlay) elif 'msg="amdgpu detected, but no compatible rocm library found.' in line: - window.ollama_information_label.set_label(_("AMD GPU detected but the extension is missing, Ollama will use CPU")) + if bool(os.getenv("FLATPAK_ID")): + window.ollama_information_label.set_label(_("AMD GPU detected but the extension is missing, Ollama will use CPU")) + else: + window.ollama_information_label.set_label(_("AMD GPU detected but ROCm is missing, Ollama will use CPU")) window.ollama_information_label.set_css_classes(['dim-label', 'error']) elif 'msg="amdgpu is supported"' in line: window.ollama_information_label.set_label(_("Using AMD GPU type '{}'").format(line.split('=')[-1])) diff --git a/src/window.py b/src/window.py index d10dd78..437b0cc 100644 --- a/src/window.py +++ b/src/window.py @@ -631,7 +631,7 @@ Generate a title following these rules: if self.ollama_instance.remote: options = { _("Close Alpaca"): {"callback": lambda *_: self.get_application().quit(), "appearance": "destructive"}, - _("Use Local Instance"): {"callback": lambda *_: window.remote_connection_switch.set_active(False)}, + _("Use Local Instance"): {"callback": lambda *_: self.remote_connection_switch.set_active(False)}, _("Connect"): {"callback": lambda url, bearer: generic_actions.connect_remote(url,bearer), "appearance": "suggested"} } entries = [