2023-05-24 19:57:48 +00:00
|
|
|
"""The main program for nio-llm."""
|
|
|
|
|
|
|
|
import asyncio
|
|
|
|
import logging
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
from huggingface_hub import hf_hub_download
|
2023-06-12 17:48:28 +00:00
|
|
|
from jsonargparse import CLI
|
2023-05-24 19:57:48 +00:00
|
|
|
from rich.logging import RichHandler
|
|
|
|
|
|
|
|
from nio_llm.client import LLMClient
|
|
|
|
|
|
|
|
logger = logging.getLogger("nio-llm.main")
|
|
|
|
|
|
|
|
|
|
|
|
def main(
|
|
|
|
room: str,
|
2023-05-24 20:29:09 +00:00
|
|
|
password: str,
|
|
|
|
username: str,
|
2023-05-24 19:57:48 +00:00
|
|
|
preprompt: str,
|
2023-06-12 19:12:21 +00:00
|
|
|
device_id: str = "nio-llm",
|
|
|
|
homeserver: str = "https://matrix.org",
|
2023-06-12 17:48:28 +00:00
|
|
|
ggml_repoid: str = "TheBloke/stable-vicuna-13B-GGML",
|
|
|
|
ggml_filename: str = "stable-vicuna-13B.ggmlv3.q5_1.bin",
|
|
|
|
sync_timeout: int = 30000,
|
2023-05-24 19:57:48 +00:00
|
|
|
) -> None:
|
2023-06-12 17:48:28 +00:00
|
|
|
"""Download llama model from HuggingFace and start the client.
|
2023-05-24 19:57:48 +00:00
|
|
|
|
2023-06-12 17:48:28 +00:00
|
|
|
Args:
|
|
|
|
room (`str`):
|
|
|
|
The room to join.
|
|
|
|
password (`str`):
|
|
|
|
The password to log in with.
|
|
|
|
username (`str`):
|
|
|
|
The username to log in as.
|
|
|
|
device_id (`str`):
|
|
|
|
The device ID to use.
|
|
|
|
preprompt (`str`):
|
|
|
|
The preprompt to use.
|
|
|
|
ggml_repoid (`str`, default `"TheBloke/stable-vicuna-13B-GGML"`):
|
|
|
|
The HuggingFace Hub repo ID to download the model from.
|
|
|
|
ggml_filename (`str`, default `"stable-vicuna-13B.ggmlv3.q5_1.bin"`):
|
|
|
|
The HuggingFace Hub filename to download the model from.
|
|
|
|
homeserver (`str`, default `"matrix.org"`):
|
|
|
|
The homeserver to connect to.
|
|
|
|
sync_timeout (`int`, default `30000`):
|
|
|
|
The timeout to use when syncing with the homeserver.
|
2023-05-24 19:57:48 +00:00
|
|
|
"""
|
|
|
|
# download the model
|
|
|
|
ggml_path = Path(
|
|
|
|
hf_hub_download(
|
2023-05-29 16:06:16 +00:00
|
|
|
repo_id=ggml_repoid,
|
|
|
|
filename=ggml_filename,
|
2023-05-24 19:57:48 +00:00
|
|
|
),
|
|
|
|
)
|
|
|
|
|
|
|
|
# create the client
|
|
|
|
client = LLMClient(
|
|
|
|
room=room,
|
2023-05-24 20:29:09 +00:00
|
|
|
username=username,
|
|
|
|
device_id=device_id,
|
2023-05-24 19:57:48 +00:00
|
|
|
ggml_path=ggml_path,
|
2023-05-24 20:29:09 +00:00
|
|
|
preprompt=preprompt,
|
|
|
|
homeserver=homeserver,
|
2023-05-24 19:57:48 +00:00
|
|
|
)
|
|
|
|
|
2023-05-29 16:26:19 +00:00
|
|
|
# start the client
|
|
|
|
asyncio.get_event_loop().run_until_complete(
|
|
|
|
client.start(
|
|
|
|
password=password,
|
|
|
|
sync_timeout=sync_timeout,
|
|
|
|
),
|
|
|
|
)
|
2023-05-24 19:57:48 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
# set up logging
|
|
|
|
logging.captureWarnings(True)
|
|
|
|
logging.basicConfig(
|
|
|
|
level="DEBUG",
|
|
|
|
format="%(name)s: %(message)s",
|
|
|
|
handlers=[RichHandler(markup=True)],
|
|
|
|
)
|
|
|
|
|
|
|
|
# run the main program (with environment variables)
|
2023-06-12 19:12:21 +00:00
|
|
|
CLI(
|
|
|
|
components=main,
|
|
|
|
as_positional=False,
|
|
|
|
env_prefix="NIO_LLM",
|
|
|
|
default_env=True,
|
|
|
|
)
|