From 8dac964d2d8dea5ff16ec4190ebb1fbea648a235 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Laure=CE=B7t?= Date: Thu, 19 Oct 2023 21:23:15 +0000 Subject: [PATCH] =?UTF-8?q?=F0=9F=94=A7=20change=20default=20history=5Fsiz?= =?UTF-8?q?e?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/nio_llm/__main__.py | 4 ++-- src/nio_llm/client.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/nio_llm/__main__.py b/src/nio_llm/__main__.py index 183ef39..d2113e9 100644 --- a/src/nio_llm/__main__.py +++ b/src/nio_llm/__main__.py @@ -23,7 +23,7 @@ def main( openai_api_endpoint: str = "http://localhost:8000/v1", openai_temperature: float = 0, openai_max_tokens: int = 256, - history_size: int = 3, + history_size: int = 5, ) -> None: """Instantiate and start the client. @@ -60,7 +60,7 @@ def main( Defaults to `256`. history_size (`int`): The number of messages to keep in history. - Defaults to `3`. + Defaults to `5`. """ # create the client client = LLMClient( diff --git a/src/nio_llm/client.py b/src/nio_llm/client.py index f7e318d..7453437 100644 --- a/src/nio_llm/client.py +++ b/src/nio_llm/client.py @@ -181,7 +181,6 @@ class LLMClient(AsyncClient): for message in self.history ], ], - stop=["<|im_end|>"], temperature=self.openai_temperature, max_tokens=self.openai_max_tokens, )