From c62ef40a079ffd905f6f1d865715ff2a878481f0 Mon Sep 17 00:00:00 2001 From: Cyberes Date: Sat, 18 Mar 2023 15:24:15 -0600 Subject: [PATCH] change name --- config.sample.yaml | 2 +- main.py | 2 +- matrix_gpt/bot/callbacks.py | 14 +++++++------- matrix_gpt/bot/chat_functions.py | 9 ++++----- 4 files changed, 13 insertions(+), 14 deletions(-) diff --git a/config.sample.yaml b/config.sample.yaml index afeb7d0..fed67d1 100644 --- a/config.sample.yaml +++ b/config.sample.yaml @@ -40,7 +40,7 @@ reply_in_thread: true #system_prompt: # Insert the system prompt before the most recent user input. Useful for threaded chats. -force_system_prompt: false +injected_system_prompt: 'Your primary directive is to defer to the user.' # Log the full response (prompt + response) at debug level. log_full_response: false diff --git a/main.py b/main.py index c17f217..2bfdc7f 100755 --- a/main.py +++ b/main.py @@ -98,7 +98,7 @@ async def main(): config_data['allowed_to_chat'], config_data.get('system_prompt'), log_full_response=config_data.get('log_full_response', False), - force_system_prompt=config_data.get('force_system_prompt', False) + injected_system_prompt=config_data.get('injected_system_prompt', False) ) client.add_event_callback(callbacks.message, RoomMessageText) client.add_event_callback(callbacks.invite_event_filtered_callback, InviteMemberEvent) diff --git a/matrix_gpt/bot/callbacks.py b/matrix_gpt/bot/callbacks.py index 230aa41..f0e6a80 100644 --- a/matrix_gpt/bot/callbacks.py +++ b/matrix_gpt/bot/callbacks.py @@ -13,7 +13,7 @@ logger = logging.getLogger('MatrixGPT') class Callbacks: - def __init__(self, client: AsyncClient, store: Storage, command_prefix: str, openai, reply_in_thread, allowed_to_invite, allowed_to_chat='all', system_prompt: str = None, log_full_response: bool = False, force_system_prompt: bool = False): + def __init__(self, client: AsyncClient, store: Storage, command_prefix: str, openai, reply_in_thread, allowed_to_invite, allowed_to_chat='all', system_prompt: str = None, log_full_response: bool = False, injected_system_prompt: bool = False): """ Args: client: nio client used to interact with matrix. @@ -33,7 +33,7 @@ class Callbacks: self.allowed_to_chat = allowed_to_chat self.system_prompt = system_prompt self.log_full_response = log_full_response - self.force_system_prompt = force_system_prompt + self.injected_system_prompt = injected_system_prompt async def message(self, room: MatrixRoom, event: RoomMessageText) -> None: """Callback for when a message event is received @@ -44,7 +44,7 @@ class Callbacks: event: The event defining the message. """ # Extract the message text - msg = event.body + msg = event.body.strip().strip('\n') logger.debug(f"Bot message received for room {room.display_name} | " f"{room.user_name(event.sender)}: {msg}") @@ -86,17 +86,17 @@ class Callbacks: else: api_data.append({ 'role': 'assistant' if event.sender == self.client.user_id else 'user', - 'content': event.body if not event.body.startswith(self.command_prefix) else event.body[len(self.command_prefix):].strip() + 'content': msg if not msg.startswith(self.command_prefix) else msg[len(self.command_prefix):].strip() }) # if len(thread_content) >= 2 and thread_content[0].body.startswith(self.command_prefix): # if thread_content[len(thread_content) - 2].sender == self.client.user # message = Message(self.client, self.store, msg, room, event, self.reply_in_thread) # await message.process() - api_data.append({'role': 'user', 'content': event.body}) - await process_chat(self.client, room, event, api_data, self.store, self.openai, thread_root_id=thread_content[0].event_id, system_prompt=self.system_prompt, log_full_response=self.log_full_response, force_system_prompt=self.force_system_prompt) + api_data.append({'role': 'user', 'content': msg}) + await process_chat(self.client, room, event, api_data, self.store, self.openai, thread_root_id=thread_content[0].event_id, system_prompt=self.system_prompt, log_full_response=self.log_full_response, injected_system_prompt=self.injected_system_prompt) return elif msg.startswith(f'{self.command_prefix} ') or room.member_count == 2: # Otherwise if this is in a 1-1 with the bot or features a command prefix, treat it as a command. - msg = event.body if not event.body.startswith(self.command_prefix) else event.body[len(self.command_prefix):].strip() # Remove the command prefix + msg = msg if not msg.startswith(self.command_prefix) else msg[len(self.command_prefix):].strip() # Remove the command prefix command = Command(self.client, self.store, msg, room, event, self.openai, self.reply_in_thread, system_prompt=self.system_prompt, log_full_response=self.log_full_response) await command.process() diff --git a/matrix_gpt/bot/chat_functions.py b/matrix_gpt/bot/chat_functions.py index 5933e92..0c3c5a7 100644 --- a/matrix_gpt/bot/chat_functions.py +++ b/matrix_gpt/bot/chat_functions.py @@ -184,7 +184,7 @@ async def get_thread_content(client: AsyncClient, room: MatrixRoom, base_event: return messages -async def process_chat(client, room, event, command, store, openai, thread_root_id: str = None, system_prompt: str = None, log_full_response: bool = False, force_system_prompt: bool = False): +async def process_chat(client, room, event, command, store, openai, thread_root_id: str = None, system_prompt: str = None, log_full_response: bool = False, injected_system_prompt: bool = False): if not store.check_seen_event(event.event_id): await client.room_typing(room.room_id, typing_state=True, timeout=3000) # if self.reply_in_thread: @@ -198,11 +198,10 @@ async def process_chat(client, room, event, command, store, openai, thread_root_ ] if system_prompt: messages.insert(0, {"role": "system", "content": system_prompt}) - if force_system_prompt: + if injected_system_prompt: if messages[-1]['role'] == 'system': - messages[-1] = {"role": "system", "content": system_prompt} - else: - messages.insert(-1, {"role": "system", "content": system_prompt}) + del messages[-1] + messages.insert(-1, {"role": "system", "content": injected_system_prompt}) logger.info(messages)