24 Commits
2.0.0 ... 2.0.2

Author SHA1 Message Date
jeffser
295429acdf Changed version 2024-09-11 23:46:59 -06:00
jeffser
a842258e9e Preparing for 2.0.2 2024-09-11 23:28:08 -06:00
jeffser
053efabfc8 Removed pkgbuild 2024-09-11 23:16:28 -06:00
Jeffry Samuel
a12083bfe9 Update README.md 2024-09-11 23:16:00 -06:00
jeffser
672b8098bd package name should be lowercase 2024-09-11 23:11:39 -06:00
jeffser
db03cce49f Made it use the stable version instead of git 2024-09-11 22:56:02 -06:00
jeffser
e8b0733c32 Changed package name to com.jeffser.Alpaca 2024-09-11 22:49:25 -06:00
jeffser
68d970716f Fixes for system installation 2024-09-11 22:44:07 -06:00
jeffser
a0338bcccb Fixed launch error when displaying welcome dialog 2024-09-11 22:33:11 -06:00
jeffser
eb92126e4b Merge branch 'main' of github.com-jeffser:Jeffser/Alpaca 2024-09-11 22:31:25 -06:00
jeffser
d26caea5f0 Changed behavior of Welcome dialog (again yeah) 2024-09-11 22:31:18 -06:00
Jeffry Samuel
6d339aad5e Added system installation instructions 2024-09-11 22:09:56 -06:00
jeffser
e7b6da4f62 Merge branch 'main' of github.com-jeffser:Jeffser/Alpaca 2024-09-11 22:04:39 -06:00
jeffser
37e36add45 Added makepkg 2024-09-11 22:04:30 -06:00
Jeffry Samuel
ed2501adf4 Update README.md 2024-09-11 17:36:20 -06:00
jeffser
83db9fd9d4 Preparing version 2.0.1 2024-09-11 16:34:07 -06:00
jeffser
6cb49cfc98 Removed version from extensions 2024-09-11 15:35:18 -06:00
jeffser
a928d2c074 Added device=all because of AMD GPUs 2024-09-11 15:10:01 -06:00
jeffser
4d35cea229 Added libnuma to dependencies 2024-09-11 13:06:30 -06:00
jeffser
51d2326dee Fixed clear chat 2024-09-11 10:51:04 -06:00
jeffser
80dcae194b Fixed Ollama instance sometimes failing to give version 2024-09-11 10:36:07 -06:00
jeffser
50759adb8e Merge branch 'main' of github.com-jeffser:Jeffser/Alpaca 2024-09-11 10:24:22 -06:00
jeffser
f46d16d257 Changed launch / welcome dialog behavior 2024-09-11 10:24:12 -06:00
Jeffry Samuel
5a9eeefaa7 Update README.md 2024-09-10 20:18:57 -06:00
8 changed files with 84 additions and 31 deletions

View File

@@ -8,6 +8,9 @@ Alpaca is an [Ollama](https://github.com/ollama/ollama) client where you can man
---
> [!NOTE]
> Please checkout [this discussion](https://github.com/Jeffser/Alpaca/discussions/292), I want to start developing a new app alongside Alpaca but I need some suggestions, thanks!
> [!WARNING]
> This project is not affiliated at all with Ollama, I'm not responsible for any damages to your device or software caused by running code given by any AI models.
@@ -83,3 +86,15 @@ Want to add a language? Visit [this discussion](https://github.com/Jeffser/Alpac
- [Aleksana](https://github.com/Aleksanaa) for her help with better handling of directories
- Sponsors for giving me enough money to be able to take a ride to my campus every time I need to <3
- Everyone that has shared kind words of encouragement!
---
## Dependencies
- [Requests](https://github.com/psf/requests)
- [Pillow](https://github.com/python-pillow/Pillow)
- [Pypdf](https://github.com/py-pdf/pypdf)
- [Pytube](https://github.com/pytube/pytube)
- [Html2Text](https://github.com/aaronsw/html2text)
- [Ollama](https://github.com/ollama/ollama)
- [Numactl](https://github.com/numactl/numactl)

View File

@@ -8,14 +8,13 @@
"--share=network",
"--share=ipc",
"--socket=fallback-x11",
"--device=dri",
"--device=all",
"--socket=wayland",
"--filesystem=/sys/module/amdgpu:ro",
"--env=LD_LIBRARY_PATH=/app/lib:/usr/lib/x86_64-linux-gnu/GL/default/lib:/usr/lib/x86_64-linux-gnu/openh264/extra:/usr/lib/x86_64-linux-gnu/openh264/extra:/usr/lib/sdk/llvm15/lib:/usr/lib/x86_64-linux-gnu/GL/default/lib:/usr/lib/ollama:/app/plugins/AMD/lib/ollama"
],
"add-extensions": {
"com.jeffser.Alpaca.Plugins": {
"version": "1.0",
"add-ld-path": "/app/plugins/AMD/lib/ollama",
"directory": "plugins",
"no-autodownload": true,
@@ -151,6 +150,22 @@
}
]
},
{
"name": "libnuma",
"buildsystem": "autotools",
"build-commands": [
"autoreconf -i",
"make",
"make install"
],
"sources": [
{
"type": "archive",
"url": "https://github.com/numactl/numactl/releases/download/v2.0.18/numactl-2.0.18.tar.gz",
"sha256": "b4fc0956317680579992d7815bc43d0538960dc73aa1dd8ca7e3806e30bc1274"
}
]
},
{
"name" : "alpaca",
"builddir" : true,

View File

@@ -78,6 +78,27 @@
<url type="contribute">https://github.com/Jeffser/Alpaca/discussions/154</url>
<url type="vcs-browser">https://github.com/Jeffser/Alpaca</url>
<releases>
<release version="2.0.2" date="2024-09-11">
<url type="details">https://github.com/Jeffser/Alpaca/releases/tag/2.0.2</url>
<description>
<p>Fixes</p>
<ul>
<li>Fixed error on first run (welcome dialog)</li>
<li>Fixed checker for Ollama instance (used on system packages)</li>
</ul>
</description>
</release>
<release version="2.0.1" date="2024-09-11">
<url type="details">https://github.com/Jeffser/Alpaca/releases/tag/2.0.1</url>
<description>
<p>Fixes</p>
<ul>
<li>Fixed 'clear chat' option</li>
<li>Fixed welcome dialog causing the local instance to not launch</li>
<li>Fixed support for AMD GPUs</li>
</ul>
</description>
</release>
<release version="2.0.0" date="2024-09-01">
<url type="details">https://github.com/Jeffser/Alpaca/releases/tag/2.0.0</url>
<description>

View File

@@ -1,5 +1,5 @@
project('Alpaca', 'c',
version: '2.0.0',
version: '2.0.2',
meson_version: '>= 0.62.0',
default_options: [ 'warning_level=2', 'werror=false', ],
)

View File

@@ -107,8 +107,11 @@ class instance():
logger.info("Starting Alpaca's Ollama instance...")
logger.debug(params)
logger.info("Started Alpaca's Ollama instance")
v_str = subprocess.check_output("ollama -v", shell=True).decode('utf-8')
logger.info('Ollama version: {}'.format(v_str.split('client version is ')[1].strip()))
try:
v_str = subprocess.check_output("ollama -v", shell=True).decode('utf-8')
logger.info(v_str.split('\n')[1].strip('Warning: ').strip())
except Exception as e:
logger.error(e)
self.instance = instance
if not self.idle_timer:
self.start_timer()

View File

@@ -81,7 +81,7 @@ class chat(Gtk.ScrolledWindow):
def clear_chat(self):
if self.busy:
self.stop_message()
self.message = {}
self.messages = {}
self.stop_message()
for widget in list(self.container):
self.container.remove(widget)

View File

@@ -14,10 +14,11 @@ logger = logging.getLogger(__name__)
def clear_chat_response(self, dialog, task):
if dialog.choose_finish(task) == "clear":
self.chat_list_box.get_current_chat().clear_chat()
self.chat_list_box.get_current_chat().show_welcome_screen(len(self.model_manager.get_model_list()) > 0)
self.save_history(self.chat_list_box.get_current_chat())
def clear_chat(self):
if self.bot_message is not None:
if self.chat_list_box.get_current_chat().busy:
self.show_toast(_("Chat cannot be cleared while receiving a message"), self.main_overlay)
return
dialog = Adw.AlertDialog(

View File

@@ -50,7 +50,6 @@ class AlpacaWindow(Adw.ApplicationWindow):
_ = gettext.gettext
#Variables
ready = False #Used with welcome dialog
attachments = {}
header_bar = Gtk.Template.Child()
@@ -193,11 +192,9 @@ class AlpacaWindow(Adw.ApplicationWindow):
if index == carousel.get_n_pages()-1:
self.welcome_next_button.set_label(_("Close"))
self.welcome_next_button.set_tooltip_text(_("Close"))
self.welcome_next_button.set_sensitive(self.ready)
else:
self.welcome_next_button.set_label(_("Next"))
self.welcome_next_button.set_tooltip_text(_("Next"))
self.welcome_next_button.set_sensitive(True)
@Gtk.Template.Callback()
def welcome_previous_button_activate(self, button):
@@ -209,6 +206,11 @@ class AlpacaWindow(Adw.ApplicationWindow):
self.welcome_carousel.scroll_to(self.welcome_carousel.get_nth_page(self.welcome_carousel.get_position()+1), True)
else:
self.welcome_dialog.force_close()
if shutil.which('ollama'):
threading.Thread(target=self.prepare_alpaca, args=(11435, '', False, {'temperature': 0.7, 'seed': 0, 'keep_alive': 5}, {}, '', 0, True, True)).start()
else:
threading.Thread(target=self.prepare_alpaca, args=(11435, 'http://0.0.0.0:11434', True, {'temperature': 0.7, 'seed': 0, 'keep_alive': 5}, {}, '', 0, True, False)).start()
self.powersaver_warning_switch.set_active(True)
@Gtk.Template.Callback()
def change_remote_connection(self, switcher, *_):
@@ -329,16 +331,17 @@ class AlpacaWindow(Adw.ApplicationWindow):
@Gtk.Template.Callback()
def model_search_changed(self, entry):
results = 0
for model in list(self.model_manager.available_list):
model.set_visible(re.search(entry.get_text(), '{} {} {} {} {}'.format(model.get_name(), model.model_title, model.model_author, model.model_description, (_('image') if model.image_recognition else '')), re.IGNORECASE))
if model.get_visible():
results += 1
if entry.get_text() and results == 0:
self.no_results_page.set_visible(True)
self.model_scroller.set_visible(False)
else:
self.model_scroller.set_visible(True)
self.no_results_page.set_visible(False)
if self.model_manager:
for model in list(self.model_manager.available_list):
model.set_visible(re.search(entry.get_text(), '{} {} {} {} {}'.format(model.get_name(), model.model_title, model.model_author, model.model_description, (_('image') if model.image_recognition else '')), re.IGNORECASE))
if model.get_visible():
results += 1
if entry.get_text() and results == 0:
self.no_results_page.set_visible(True)
self.model_scroller.set_visible(False)
else:
self.model_scroller.set_visible(True)
self.no_results_page.set_visible(False)
@Gtk.Template.Callback()
def on_clipboard_paste(self, textview):
@@ -562,7 +565,7 @@ Generate a title following these rules:
def save_history(self, chat:chat_widget.chat=None):
logger.debug("Saving history")
logger.info("Saving history")
history = None
if chat and os.path.exists(os.path.join(data_dir, "chats", "chats.json")):
history = {'chats': {chat.get_name(): {'messages': chat.messages_to_dict()}}}
@@ -831,16 +834,13 @@ Generate a title following these rules:
GLib.idle_add(self.load_history)
self.launch_level_bar.set_value(5)
if self.ollama_instance.remote:
time.sleep(.5) #This is to prevent errors with gtk creating the launch dialog and closing it too quickly
#Close launch dialog
if show_launch_dialog:
GLib.idle_add(self.launch_dialog.force_close)
#Save preferences
if save:
self.save_server_config()
GLib.idle_add(self.welcome_next_button.set_sensitive, True)
self.ready = True
time.sleep(.5) #This is to prevent errors with gtk creating the launch dialog and closing it too quickly
#Close launch dialog
GLib.idle_add(self.launch_dialog.force_close)
def __init__(self, **kwargs):
super().__init__(**kwargs)
@@ -908,8 +908,6 @@ Generate a title following these rules:
threading.Thread(target=self.prepare_alpaca, args=(11435, '', False, {'temperature': 0.7, 'seed': 0, 'keep_alive': 5}, {}, '', 0, True, True)).start()
self.powersaver_warning_switch.set_active(True)
else:
threading.Thread(target=self.prepare_alpaca, args=(11435, '', False, {'temperature': 0.7, 'seed': 0, 'keep_alive': 5}, {}, '', 0, True, False)).start()
self.powersaver_warning_switch.set_active(True)
self.welcome_dialog.present(self)
if self.powersaver_warning_switch.get_active():