Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
83db9fd9d4 | ||
|
|
6cb49cfc98 | ||
|
|
a928d2c074 | ||
|
|
4d35cea229 | ||
|
|
51d2326dee | ||
|
|
80dcae194b | ||
|
|
50759adb8e | ||
|
|
f46d16d257 | ||
|
|
5a9eeefaa7 |
@@ -8,6 +8,9 @@ Alpaca is an [Ollama](https://github.com/ollama/ollama) client where you can man
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> Please checkout [this discussion](https://github.com/Jeffser/Alpaca/discussions/292), I want to start developing a new app alongside Alpaca but I need some suggestions, thanks!
|
||||||
|
|
||||||
> [!WARNING]
|
> [!WARNING]
|
||||||
> This project is not affiliated at all with Ollama, I'm not responsible for any damages to your device or software caused by running code given by any AI models.
|
> This project is not affiliated at all with Ollama, I'm not responsible for any damages to your device or software caused by running code given by any AI models.
|
||||||
|
|
||||||
|
|||||||
@@ -8,14 +8,13 @@
|
|||||||
"--share=network",
|
"--share=network",
|
||||||
"--share=ipc",
|
"--share=ipc",
|
||||||
"--socket=fallback-x11",
|
"--socket=fallback-x11",
|
||||||
"--device=dri",
|
"--device=all",
|
||||||
"--socket=wayland",
|
"--socket=wayland",
|
||||||
"--filesystem=/sys/module/amdgpu:ro",
|
"--filesystem=/sys/module/amdgpu:ro",
|
||||||
"--env=LD_LIBRARY_PATH=/app/lib:/usr/lib/x86_64-linux-gnu/GL/default/lib:/usr/lib/x86_64-linux-gnu/openh264/extra:/usr/lib/x86_64-linux-gnu/openh264/extra:/usr/lib/sdk/llvm15/lib:/usr/lib/x86_64-linux-gnu/GL/default/lib:/usr/lib/ollama:/app/plugins/AMD/lib/ollama"
|
"--env=LD_LIBRARY_PATH=/app/lib:/usr/lib/x86_64-linux-gnu/GL/default/lib:/usr/lib/x86_64-linux-gnu/openh264/extra:/usr/lib/x86_64-linux-gnu/openh264/extra:/usr/lib/sdk/llvm15/lib:/usr/lib/x86_64-linux-gnu/GL/default/lib:/usr/lib/ollama:/app/plugins/AMD/lib/ollama"
|
||||||
],
|
],
|
||||||
"add-extensions": {
|
"add-extensions": {
|
||||||
"com.jeffser.Alpaca.Plugins": {
|
"com.jeffser.Alpaca.Plugins": {
|
||||||
"version": "1.0",
|
|
||||||
"add-ld-path": "/app/plugins/AMD/lib/ollama",
|
"add-ld-path": "/app/plugins/AMD/lib/ollama",
|
||||||
"directory": "plugins",
|
"directory": "plugins",
|
||||||
"no-autodownload": true,
|
"no-autodownload": true,
|
||||||
@@ -151,6 +150,22 @@
|
|||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "libnuma",
|
||||||
|
"buildsystem": "autotools",
|
||||||
|
"build-commands": [
|
||||||
|
"autoreconf -i",
|
||||||
|
"make",
|
||||||
|
"make install"
|
||||||
|
],
|
||||||
|
"sources": [
|
||||||
|
{
|
||||||
|
"type": "archive",
|
||||||
|
"url": "https://github.com/numactl/numactl/releases/download/v2.0.18/numactl-2.0.18.tar.gz",
|
||||||
|
"sha256": "b4fc0956317680579992d7815bc43d0538960dc73aa1dd8ca7e3806e30bc1274"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name" : "alpaca",
|
"name" : "alpaca",
|
||||||
"builddir" : true,
|
"builddir" : true,
|
||||||
|
|||||||
@@ -78,6 +78,17 @@
|
|||||||
<url type="contribute">https://github.com/Jeffser/Alpaca/discussions/154</url>
|
<url type="contribute">https://github.com/Jeffser/Alpaca/discussions/154</url>
|
||||||
<url type="vcs-browser">https://github.com/Jeffser/Alpaca</url>
|
<url type="vcs-browser">https://github.com/Jeffser/Alpaca</url>
|
||||||
<releases>
|
<releases>
|
||||||
|
<release version="2.0.1" date="2024-09-11">
|
||||||
|
<url type="details">https://github.com/Jeffser/Alpaca/releases/tag/2.0.1</url>
|
||||||
|
<description>
|
||||||
|
<p>Fixes</p>
|
||||||
|
<ul>
|
||||||
|
<li>Fixed 'clear chat' option</li>
|
||||||
|
<li>Fixed welcome dialog causing the local instance to not launch</li>
|
||||||
|
<li>Fixed support for AMD GPUs</li>
|
||||||
|
</ul>
|
||||||
|
</description>
|
||||||
|
</release>
|
||||||
<release version="2.0.0" date="2024-09-01">
|
<release version="2.0.0" date="2024-09-01">
|
||||||
<url type="details">https://github.com/Jeffser/Alpaca/releases/tag/2.0.0</url>
|
<url type="details">https://github.com/Jeffser/Alpaca/releases/tag/2.0.0</url>
|
||||||
<description>
|
<description>
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
project('Alpaca', 'c',
|
project('Alpaca', 'c',
|
||||||
version: '2.0.0',
|
version: '2.0.1',
|
||||||
meson_version: '>= 0.62.0',
|
meson_version: '>= 0.62.0',
|
||||||
default_options: [ 'warning_level=2', 'werror=false', ],
|
default_options: [ 'warning_level=2', 'werror=false', ],
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -107,8 +107,11 @@ class instance():
|
|||||||
logger.info("Starting Alpaca's Ollama instance...")
|
logger.info("Starting Alpaca's Ollama instance...")
|
||||||
logger.debug(params)
|
logger.debug(params)
|
||||||
logger.info("Started Alpaca's Ollama instance")
|
logger.info("Started Alpaca's Ollama instance")
|
||||||
v_str = subprocess.check_output("ollama -v", shell=True).decode('utf-8')
|
try:
|
||||||
logger.info('Ollama version: {}'.format(v_str.split('client version is ')[1].strip()))
|
v_str = subprocess.check_output("ollama -v", shell=True).decode('utf-8')
|
||||||
|
logger.info(v_str.split('\n')[1].strip('Warning: ').strip())
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(e)
|
||||||
self.instance = instance
|
self.instance = instance
|
||||||
if not self.idle_timer:
|
if not self.idle_timer:
|
||||||
self.start_timer()
|
self.start_timer()
|
||||||
|
|||||||
@@ -81,7 +81,7 @@ class chat(Gtk.ScrolledWindow):
|
|||||||
def clear_chat(self):
|
def clear_chat(self):
|
||||||
if self.busy:
|
if self.busy:
|
||||||
self.stop_message()
|
self.stop_message()
|
||||||
self.message = {}
|
self.messages = {}
|
||||||
self.stop_message()
|
self.stop_message()
|
||||||
for widget in list(self.container):
|
for widget in list(self.container):
|
||||||
self.container.remove(widget)
|
self.container.remove(widget)
|
||||||
|
|||||||
@@ -14,10 +14,11 @@ logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
def clear_chat_response(self, dialog, task):
|
def clear_chat_response(self, dialog, task):
|
||||||
if dialog.choose_finish(task) == "clear":
|
if dialog.choose_finish(task) == "clear":
|
||||||
self.chat_list_box.get_current_chat().clear_chat()
|
self.chat_list_box.get_current_chat().show_welcome_screen(len(self.model_manager.get_model_list()) > 0)
|
||||||
|
self.save_history(self.chat_list_box.get_current_chat())
|
||||||
|
|
||||||
def clear_chat(self):
|
def clear_chat(self):
|
||||||
if self.bot_message is not None:
|
if self.chat_list_box.get_current_chat().busy:
|
||||||
self.show_toast(_("Chat cannot be cleared while receiving a message"), self.main_overlay)
|
self.show_toast(_("Chat cannot be cleared while receiving a message"), self.main_overlay)
|
||||||
return
|
return
|
||||||
dialog = Adw.AlertDialog(
|
dialog = Adw.AlertDialog(
|
||||||
|
|||||||
@@ -193,11 +193,9 @@ class AlpacaWindow(Adw.ApplicationWindow):
|
|||||||
if index == carousel.get_n_pages()-1:
|
if index == carousel.get_n_pages()-1:
|
||||||
self.welcome_next_button.set_label(_("Close"))
|
self.welcome_next_button.set_label(_("Close"))
|
||||||
self.welcome_next_button.set_tooltip_text(_("Close"))
|
self.welcome_next_button.set_tooltip_text(_("Close"))
|
||||||
self.welcome_next_button.set_sensitive(self.ready)
|
|
||||||
else:
|
else:
|
||||||
self.welcome_next_button.set_label(_("Next"))
|
self.welcome_next_button.set_label(_("Next"))
|
||||||
self.welcome_next_button.set_tooltip_text(_("Next"))
|
self.welcome_next_button.set_tooltip_text(_("Next"))
|
||||||
self.welcome_next_button.set_sensitive(True)
|
|
||||||
|
|
||||||
@Gtk.Template.Callback()
|
@Gtk.Template.Callback()
|
||||||
def welcome_previous_button_activate(self, button):
|
def welcome_previous_button_activate(self, button):
|
||||||
@@ -209,6 +207,8 @@ class AlpacaWindow(Adw.ApplicationWindow):
|
|||||||
self.welcome_carousel.scroll_to(self.welcome_carousel.get_nth_page(self.welcome_carousel.get_position()+1), True)
|
self.welcome_carousel.scroll_to(self.welcome_carousel.get_nth_page(self.welcome_carousel.get_position()+1), True)
|
||||||
else:
|
else:
|
||||||
self.welcome_dialog.force_close()
|
self.welcome_dialog.force_close()
|
||||||
|
if not self.ready:
|
||||||
|
self.launch_dialog.present(self)
|
||||||
|
|
||||||
@Gtk.Template.Callback()
|
@Gtk.Template.Callback()
|
||||||
def change_remote_connection(self, switcher, *_):
|
def change_remote_connection(self, switcher, *_):
|
||||||
@@ -562,7 +562,7 @@ Generate a title following these rules:
|
|||||||
|
|
||||||
|
|
||||||
def save_history(self, chat:chat_widget.chat=None):
|
def save_history(self, chat:chat_widget.chat=None):
|
||||||
logger.debug("Saving history")
|
logger.info("Saving history")
|
||||||
history = None
|
history = None
|
||||||
if chat and os.path.exists(os.path.join(data_dir, "chats", "chats.json")):
|
if chat and os.path.exists(os.path.join(data_dir, "chats", "chats.json")):
|
||||||
history = {'chats': {chat.get_name(): {'messages': chat.messages_to_dict()}}}
|
history = {'chats': {chat.get_name(): {'messages': chat.messages_to_dict()}}}
|
||||||
@@ -831,16 +831,14 @@ Generate a title following these rules:
|
|||||||
GLib.idle_add(self.load_history)
|
GLib.idle_add(self.load_history)
|
||||||
self.launch_level_bar.set_value(5)
|
self.launch_level_bar.set_value(5)
|
||||||
|
|
||||||
if self.ollama_instance.remote:
|
|
||||||
time.sleep(.5) #This is to prevent errors with gtk creating the launch dialog and closing it too quickly
|
|
||||||
#Close launch dialog
|
|
||||||
if show_launch_dialog:
|
|
||||||
GLib.idle_add(self.launch_dialog.force_close)
|
|
||||||
#Save preferences
|
#Save preferences
|
||||||
if save:
|
if save:
|
||||||
self.save_server_config()
|
self.save_server_config()
|
||||||
GLib.idle_add(self.welcome_next_button.set_sensitive, True)
|
|
||||||
|
time.sleep(.5) #This is to prevent errors with gtk creating the launch dialog and closing it too quickly
|
||||||
self.ready = True
|
self.ready = True
|
||||||
|
#Close launch dialog
|
||||||
|
GLib.idle_add(self.launch_dialog.force_close)
|
||||||
|
|
||||||
def __init__(self, **kwargs):
|
def __init__(self, **kwargs):
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
|
|||||||
Reference in New Issue
Block a user