Fixed reconnection dialog not selecting 'use local instance'

This commit is contained in:
jeffser 2024-10-14 15:59:05 -06:00
parent c9cf2bfefc
commit 27126736a4
2 changed files with 5 additions and 2 deletions

View File

@ -22,7 +22,10 @@ def log_output(pipe):
if 'msg="model request too large for system"' in line:
window.show_toast(_("Model request too large for system"), window.main_overlay)
elif 'msg="amdgpu detected, but no compatible rocm library found.' in line:
window.ollama_information_label.set_label(_("AMD GPU detected but the extension is missing, Ollama will use CPU"))
if bool(os.getenv("FLATPAK_ID")):
window.ollama_information_label.set_label(_("AMD GPU detected but <a href='appstream://com.jeffser.Alpaca.Plugins.AMD'>the extension</a> is missing, Ollama will use CPU"))
else:
window.ollama_information_label.set_label(_("AMD GPU detected but ROCm is missing, Ollama will use CPU"))
window.ollama_information_label.set_css_classes(['dim-label', 'error'])
elif 'msg="amdgpu is supported"' in line:
window.ollama_information_label.set_label(_("Using AMD GPU type '{}'").format(line.split('=')[-1]))

View File

@ -631,7 +631,7 @@ Generate a title following these rules:
if self.ollama_instance.remote:
options = {
_("Close Alpaca"): {"callback": lambda *_: self.get_application().quit(), "appearance": "destructive"},
_("Use Local Instance"): {"callback": lambda *_: window.remote_connection_switch.set_active(False)},
_("Use Local Instance"): {"callback": lambda *_: self.remote_connection_switch.set_active(False)},
_("Connect"): {"callback": lambda url, bearer: generic_actions.connect_remote(url,bearer), "appearance": "suggested"}
}
entries = [