This commit is contained in:
Cyberes 2023-03-18 14:58:30 -06:00
parent e3e14181ac
commit f5ad27d434
5 changed files with 20 additions and 11 deletions

View File

@ -39,4 +39,7 @@ reply_in_thread: true
# For example, you can instruct the assistant with "You are a helpful assistant."
#system_prompt:
# Log the full response (prompt + response).
log_full_response: false
logout_other_devices: false

View File

@ -89,7 +89,7 @@ async def main():
storage = Storage(Path(config_data['data_storage'], 'matrixgpt.db'))
# Set up event callbacks
callbacks = Callbacks(client, storage, config_data['command_prefix'], openai_config, config_data.get('reply_in_thread', False), config_data['allowed_to_invite'], config_data['allowed_to_chat'], config_data.get('system_prompt'))
callbacks = Callbacks(client, storage, config_data['command_prefix'], openai_config, config_data.get('reply_in_thread', False), config_data['allowed_to_invite'], config_data['allowed_to_chat'], config_data.get('system_prompt'), log_full_response=config_data.get('log_full_response', False))
client.add_event_callback(callbacks.message, RoomMessageText)
client.add_event_callback(callbacks.invite_event_filtered_callback, InviteMemberEvent)
client.add_event_callback(callbacks.decryption_failure, MegolmEvent)

View File

@ -21,6 +21,7 @@ class Command:
openai,
reply_in_thread,
system_prompt: str = None,
log_full_response: bool = False
):
"""A command made by a user.
@ -47,6 +48,7 @@ class Command:
self.openai = openai
self.reply_in_thread = reply_in_thread
self.system_prompt = system_prompt
self.log_full_response = log_full_response
async def process(self):
"""Process the command"""
@ -62,7 +64,7 @@ class Command:
await self._process_chat()
async def _process_chat(self):
await process_chat(self.client, self.room, self.event, self.command, self.store, self.openai, system_prompt=self.system_prompt)
await process_chat(self.client, self.room, self.event, self.command, self.store, self.openai, system_prompt=self.system_prompt, log_full_response=self.log_full_response)
async def _show_help(self):
"""Show the help text"""

View File

@ -13,7 +13,7 @@ logger = logging.getLogger('MatrixGPT')
class Callbacks:
def __init__(self, client: AsyncClient, store: Storage, command_prefix: str, openai, reply_in_thread, allowed_to_invite, allowed_to_chat='all', system_prompt: str = None, ):
def __init__(self, client: AsyncClient, store: Storage, command_prefix: str, openai, reply_in_thread, allowed_to_invite, allowed_to_chat='all', system_prompt: str = None, log_full_response:bool=False):
"""
Args:
client: nio client used to interact with matrix.
@ -32,6 +32,7 @@ class Callbacks:
self.allowed_to_invite = allowed_to_invite if allowed_to_invite else []
self.allowed_to_chat = allowed_to_chat
self.system_prompt = system_prompt
self.log_full_response = log_full_response
async def message(self, room: MatrixRoom, event: RoomMessageText) -> None:
"""Callback for when a message event is received
@ -90,12 +91,12 @@ class Callbacks:
# message = Message(self.client, self.store, msg, room, event, self.reply_in_thread)
# await message.process()
api_data.append({'role': 'user', 'content': event.body})
await process_chat(self.client, room, event, api_data, self.store, self.openai, thread_root_id=thread_content[0].event_id, system_prompt=self.system_prompt)
await process_chat(self.client, room, event, api_data, self.store, self.openai, thread_root_id=thread_content[0].event_id, system_prompt=self.system_prompt, log_full_response=self.log_full_response)
return
elif msg.startswith(f'{self.command_prefix} ') or room.member_count == 2:
# Otherwise if this is in a 1-1 with the bot or features a command prefix, treat it as a command.
msg = event.body if not event.body.startswith(self.command_prefix) else event.body[len(self.command_prefix):].strip() # Remove the command prefix
command = Command(self.client, self.store, msg, room, event, self.openai, self.reply_in_thread, system_prompt=self.system_prompt)
command = Command(self.client, self.store, msg, room, event, self.openai, self.reply_in_thread, system_prompt=self.system_prompt, log_full_response=self.log_full_response)
await command.process()
async def invite(self, room: MatrixRoom, event: InviteMemberEvent) -> None:

View File

@ -184,7 +184,7 @@ async def get_thread_content(client: AsyncClient, room: MatrixRoom, base_event:
return messages
async def process_chat(client, room, event, command, store, openai, thread_root_id: str = None, system_prompt: str = None):
async def process_chat(client, room, event, command, store, openai, thread_root_id: str = None, system_prompt: str = None, log_full_response: bool = False):
if not store.check_seen_event(event.event_id):
await client.room_typing(room.room_id, typing_state=True, timeout=3000)
# if self.reply_in_thread:
@ -206,12 +206,15 @@ async def process_chat(client, room, event, command, store, openai, thread_root_
temperature=0,
)
text_response = response["choices"][0]["message"]["content"].strip().strip('\n')
z = text_response.replace("\n", "\\n")
if log_full_response:
if isinstance(command, str):
x = command.replace("\n", "\\n")
else:
x = command
z = text_response.replace("\n", "\\n")
logger.info(f'Reply to {event.event_id} --> "{x}" and bot responded with "{z}"')
else:
logger.info(f'Reply to {event.event_id} --> "{z}"')
resp = await send_text_to_room(client, room.room_id, text_response, reply_to_event_id=event.event_id, thread=True, thread_root_id=thread_root_id if thread_root_id else event.event_id)
await client.room_typing(room.room_id, typing_state=False, timeout=3000)
store.add_event_id(event.event_id)