♻️ working poc

This commit is contained in:
Laureηt 2023-05-22 21:19:11 +02:00
parent d1b3b55ee3
commit c7a6c050d4
Signed by: Laurent
SSH key fingerprint: SHA256:kZEpW8cMJ54PDeCvOhzreNr4FSh6R13CMGH/POoO8DI
2 changed files with 138 additions and 42 deletions

View file

@ -2,8 +2,10 @@
import asyncio
import logging
import time
from textwrap import dedent
import click
from llama_cpp import Llama
from nio import AsyncClient, MatrixRoom, RoomMessageText
@ -15,17 +17,24 @@ class LLMClient(AsyncClient):
def __init__(
self,
user: str,
username: str,
homeserver: str,
device_id: str,
preprompt: str,
room: str,
):
"""Create a new LLMClient instance."""
super().__init__(
user=user,
user=f"@{username}:{homeserver.removeprefix('https://')}",
homeserver=homeserver,
device_id=device_id,
)
self.spawn_time = time.time() * 1000
self.username = username
self.preprompt = preprompt
self.room = room
# create the Llama instance
self.llm = Llama(
model_path="../../../llama.cpp/models/sv13B/stable-vicuna-13B.ggml.q5_1.bin",
@ -37,67 +46,126 @@ class LLMClient(AsyncClient):
async def message_callback(self, room: MatrixRoom, event: RoomMessageText):
"""Process new messages as they come in."""
# ignore messages sent in other rooms
if room.room_id != ROOM:
logger.debug(f"Received new message in room {room.room_id}.")
logger.debug(f"Message body: {event.body}")
# ignore our own messages
if event.sender == self.user:
logger.debug("Ignoring our own message.")
return
if f"<{USERNAME}>" in event.body:
logging.debug("Received message including our identifier")
# ignore messages pre-spawn
if event.server_timestamp < self.spawn_time:
logger.debug("Ignoring message pre-spawn.")
return
prompt = dedent(
f"""
{PREPROMPT}
<{event.sender}>: {event.body}
<{USERNAME}>:
""",
).strip()
# ignore messages sent in other rooms
if room.room_id != self.room:
logger.debug("Ignoring message in different room.")
return
# enable typing indicator
await self.room_typing(ROOM, typing_state=True)
if self.username not in event.body:
logger.debug("Ignoring message not directed at us.")
return
output = self.llm(
prompt,
max_tokens=100,
stop=["<{event.sender}>:", "\n"],
echo=True,
)
prompt = dedent(
f"""
{self.preprompt}
<{event.sender}>: {event.body}
<pipobot>:
""",
).strip()
# retreive the response
output = output["choices"][0]["text"] # type: ignore
output = output.removeprefix(prompt).strip()
logger.debug(f"Prompt: {prompt}")
# disable typing indicator
await self.room_typing(ROOM, typing_state=False)
# enable typing indicator
await self.room_typing(
self.room,
typing_state=True,
timeout=100000000,
)
# send the response
await self.room_send(
room_id=ROOM,
message_type="m.room.message",
content={
"msgtype": "m.text",
"body": output,
},
)
output = self.llm(
prompt,
max_tokens=100,
stop=["<{event.sender}>"],
echo=True,
)
# retreive the response
output = output["choices"][0]["text"] # type: ignore
output = output.removeprefix(prompt).strip()
# disable typing indicator
await self.room_typing(self.room, typing_state=False)
# send the response
await self.room_send(
room_id=self.room,
message_type="m.room.message",
content={
"msgtype": "m.text",
"body": output,
},
)
async def main() -> None:
@click.command()
@click.option("--homeserver", "-h", help="The homeserver to connect to.", required=True)
@click.option("--device-id", "-d", help="The device ID to use.", required=True)
@click.option("--username", "-u", help="The username to log in as.", required=True)
@click.option("--password", "-p", help="The password to log in with.", required=True)
@click.option("--room", "-r", help="The room to join.", required=True)
@click.option("--preprompt", "-t", help="The preprompt to use.", required=True)
def main(
homeserver: str,
device_id: str,
username: str,
password: str,
room: str,
preprompt,
) -> None:
asyncio.get_event_loop().run_until_complete(
_main(
homeserver=homeserver,
device_id=device_id,
username=username,
password=password,
preprompt=preprompt,
room=room,
)
)
async def _main(
homeserver: str,
device_id: str,
username: str,
password: str,
room: str,
preprompt,
) -> None:
"""Run the main program."""
# create the client
client = LLMClient(
homeserver=HOMESERVER,
device_id=DEVICE_ID,
user=USERNAME,
homeserver=homeserver,
device_id=device_id,
username=username,
room=room,
preprompt=preprompt,
)
# Login to the homeserver
print(await client.login(PASSWORD))
print(await client.login(password))
# Join the room, if not already joined
print(await client.join(ROOM))
print(await client.join(room))
# Sync with the server forever
await client.sync_forever(timeout=30000)
if __name__ == "__main__":
asyncio.get_event_loop().run_until_complete(main())
logging.basicConfig(level=logging.DEBUG)
main(auto_envvar_prefix="NIOLLM")
main(auto_envvar_prefix="NIOLLM")

28
nio-llm/test.py Normal file
View file

@ -0,0 +1,28 @@
from textwrap import dedent
from llama_cpp import Llama
llm = Llama(model_path="../../../llama.cpp/models/sv13B/stable-vicuna-13B.ggml.q5_1.bin", n_threads=12)
msg = dedent(
"""
You are pipobot, an arrogant assistant. Answer as concisely as possible.
<@fainsil:inpt.fr>: Qu'est ce qu'une intégrale de Lebesgue ?
<@pipobot:inpt.fr>:
""",
).strip()
print(msg)
print(repr(msg))
output = llm(
msg,
max_tokens=100,
stop=["<@fainsil:inpt.fr>:", "\n"],
echo=True,
)
print(output)
res = output["choices"][0]["text"]
print(res)
print(res.removeprefix(msg).strip())