This commit is contained in:
hueso 2025-03-06 20:19:14 -03:00
parent 1576952d8f
commit 193aef1d0b
2 changed files with 38 additions and 25 deletions

View File

@ -8,6 +8,7 @@ pytest
pytest-cov
pytest-mypy-plugins
result
slixmpp
slixmpp_omemo
slixmpp==1.8.6
slixmpp_omemo==0.9.1
twine
ollama

View File

@ -2,7 +2,7 @@ from enum import Enum
from typing import Dict, Optional
import re
import ollama
from ollama import Client
from slixmpp import ClientXMPP, JID
from slixmpp.exceptions import IqTimeout, IqError
from slixmpp.stanza import Message
@ -26,8 +26,8 @@ class LEVELS(Enum):
class LLMS(Enum):
LLAMA3 = "llama3"
MISTRAL = "mistral"
LLAMA3 = "based"
MISTRAL = "shreyshah/satoshi-7b-q4_k_m"
class OllamaBot(ClientXMPP):
@ -37,7 +37,7 @@ class OllamaBot(ClientXMPP):
def __init__(self, jid: JidStr, password: str):
ClientXMPP.__init__(self, jid, password)
self.model: LLMS = LLMS.LLAMA3
self.model: LLMS = LLMS.MISTRAL
self.prefix_re: re.Pattern = re.compile(r"^%s" % self.cmd_prefix)
self.cmd_re: re.Pattern = re.compile(
r"^%s(?P<command>\w+)(?:\s+(?P<args>.*))?" % self.cmd_prefix
@ -82,35 +82,35 @@ class OllamaBot(ClientXMPP):
async def cmd_help(self, mto: JID, mtype: Optional[MessageTypes]) -> None:
body = (
"Hello, I am the ollama_slixmpp_omemo_bot!\n\n"
"The following commands are available:\n\n"
f"{self.cmd_prefix}verbose - Send message or reply with log messages.\n\n"
f"{self.cmd_prefix}error -Send message or reply only on error.\n\n"
f"{self.cmd_prefix}llama3 - Enable the llama3 model.\n\n"
f"{self.cmd_prefix}mistral - Enable the mistral model.\n\n"
f"Typing anything else will be sent to {self.model.value}!\n\n"
"Hello, I am Satoshi Nakamoto!\n\n"
# "The following commands are available:\n\n"
# f"{self.cmd_prefix}verbose - Send message or reply with log messages.\n\n"
# f"{self.cmd_prefix}error -Send message or reply only on error.\n\n"
# f"{self.cmd_prefix}llama3 - Enable the llama3 model.\n\n"
# f"{self.cmd_prefix}mistral - Enable the mistral model.\n\n"
# f"Typing anything else will be sent to {self.model.value}!\n\n"
)
return await self.encrypted_reply(mto, mtype, body)
return await self.plain_reply(mto, mtype, body)
async def cmd_set_llama3(self, mto: JID, mtype: Optional[MessageTypes]) -> None:
self.model = LLMS.LLAMA3
body: str = f"""Model set to {LLMS.LLAMA3.value}"""
return await self.encrypted_reply(mto, mtype, body)
return await self.plain_reply(mto, mtype, body)
async def cmd_set_mistral(self, mto: JID, mtype: Optional[MessageTypes]) -> None:
self.model = LLMS.MISTRAL
body: str = f"""Model set to {LLMS.MISTRAL.value}"""
return await self.encrypted_reply(mto, mtype, body)
return await self.plain_reply(mto, mtype, body)
async def cmd_verbose(self, mto: JID, mtype: Optional[MessageTypes]) -> None:
self.debug_level = LEVELS.DEBUG
body: str = """Debug level set to 'verbose'."""
return await self.encrypted_reply(mto, mtype, body)
return await self.plain_reply(mto, mtype, body)
async def cmd_error(self, mto: JID, mtype: Optional[MessageTypes]) -> None:
self.debug_level = LEVELS.ERROR
body: str = """Debug level set to 'error'."""
return await self.encrypted_reply(mto, mtype, body)
return await self.plain_reply(mto, mtype, body)
async def message_handler(
self, msg: Message, allow_untrusted: bool = False
@ -120,11 +120,16 @@ class OllamaBot(ClientXMPP):
mtype: Optional[MessageTypes] = msg["type"]
if mtype not in ("chat", "normal"):
return None
if not msg["body"]:
return None
log = open("log.txt", "a", 1)
if not self["xep_0384"].is_encrypted(msg):
if self.debug_level == LEVELS.DEBUG:
await self.plain_reply(
mto, mtype, f"Echo unencrypted message: {msg['body']}"
log.write(f"{mfrom}: {msg['body']}\n")
ollama_server_response: Optional[str] = self.message_to_ollama_server(
msg["body"]
)
await self.plain_reply(mto, mtype, f"{ollama_server_response or ''}")
return None
try:
encrypted = msg["omemo_encrypted"]
@ -133,13 +138,14 @@ class OllamaBot(ClientXMPP):
)
if body is not None:
decoded: str = body.decode("utf8")
log.write(f"{mfrom}: {decoded}\n")
if self.is_command(decoded):
await self.handle_command(mto, mtype, decoded)
elif self.debug_level == LEVELS.DEBUG:
ollama_server_response: Optional[str] = (
self.message_to_ollama_server(decoded)
)
await self.encrypted_reply(
await self.plain_reply(
mto, mtype, f"{ollama_server_response or ''}"
)
except MissingOwnKey:
@ -149,7 +155,7 @@ class OllamaBot(ClientXMPP):
"Error: Message not encrypted for me.",
)
except NoAvailableSession:
await self.encrypted_reply(
await self.plain_reply(
mto,
mtype,
"Error: Message uses an encrypted session I don't know about.",
@ -178,6 +184,8 @@ class OllamaBot(ClientXMPP):
return None
async def plain_reply(self, mto: JID, mtype: Optional[MessageTypes], body):
log = open("log.txt", "a", 1)
log.write(f"SATOSHI: {body}\n")
msg = self.make_message(mto=mto, mtype=mtype)
msg["body"] = body
return msg.send()
@ -227,7 +235,11 @@ class OllamaBot(ClientXMPP):
def message_to_ollama_server(self, msg: Optional[str]) -> Optional[str]:
if msg is not None:
response = ollama.chat(
client = Client(
host="https://solarpunk.land/ollama/",
headers={"Authorization": "Bearer xxx"},
)
response = client.chat(
model=self.model.value,
messages=[{"role": "user", "content": f"{msg}"}],
)