force system prompt

This commit is contained in:
Cyberes 2023-03-18 15:18:22 -06:00
parent 9f5475db75
commit e08518f866
4 changed files with 27 additions and 6 deletions

View File

@ -39,6 +39,9 @@ reply_in_thread: true
# For example, you can instruct the assistant with "You are a helpful assistant."
#system_prompt:
# Insert the system prompt before the most recent user input. Useful for threaded chats.
force_system_prompt: false
# Log the full response (prompt + response) at debug level.
log_full_response: false

14
main.py
View File

@ -54,7 +54,6 @@ check_config_value_exists(config_data, 'openai_api_key')
check_config_value_exists(config_data, 'openai_model')
check_config_value_exists(config_data, 'data_storage')
# check_config_value_exists(config_data, 'autojoin_rooms')
def retry(msg=None):
@ -89,7 +88,18 @@ async def main():
storage = Storage(Path(config_data['data_storage'], 'matrixgpt.db'))
# Set up event callbacks
callbacks = Callbacks(client, storage, config_data['command_prefix'], openai_config, config_data.get('reply_in_thread', False), config_data['allowed_to_invite'], config_data['allowed_to_chat'], config_data.get('system_prompt'), log_full_response=config_data.get('log_full_response', False))
callbacks = Callbacks(
client,
storage,
config_data['command_prefix'],
openai_config,
config_data.get('reply_in_thread', False),
config_data['allowed_to_invite'],
config_data['allowed_to_chat'],
config_data.get('system_prompt'),
log_full_response=config_data.get('log_full_response', False),
force_system_prompt=config_data.get('force_system_prompt', False)
)
client.add_event_callback(callbacks.message, RoomMessageText)
client.add_event_callback(callbacks.invite_event_filtered_callback, InviteMemberEvent)
client.add_event_callback(callbacks.decryption_failure, MegolmEvent)

View File

@ -13,7 +13,7 @@ logger = logging.getLogger('MatrixGPT')
class Callbacks:
def __init__(self, client: AsyncClient, store: Storage, command_prefix: str, openai, reply_in_thread, allowed_to_invite, allowed_to_chat='all', system_prompt: str = None, log_full_response: bool = False):
def __init__(self, client: AsyncClient, store: Storage, command_prefix: str, openai, reply_in_thread, allowed_to_invite, allowed_to_chat='all', system_prompt: str = None, log_full_response: bool = False, force_system_prompt: bool = False):
"""
Args:
client: nio client used to interact with matrix.
@ -33,6 +33,7 @@ class Callbacks:
self.allowed_to_chat = allowed_to_chat
self.system_prompt = system_prompt
self.log_full_response = log_full_response
self.force_system_prompt = force_system_prompt
async def message(self, room: MatrixRoom, event: RoomMessageText) -> None:
"""Callback for when a message event is received
@ -91,7 +92,7 @@ class Callbacks:
# message = Message(self.client, self.store, msg, room, event, self.reply_in_thread)
# await message.process()
api_data.append({'role': 'user', 'content': event.body})
await process_chat(self.client, room, event, api_data, self.store, self.openai, thread_root_id=thread_content[0].event_id, system_prompt=self.system_prompt, log_full_response=self.log_full_response)
await process_chat(self.client, room, event, api_data, self.store, self.openai, thread_root_id=thread_content[0].event_id, system_prompt=self.system_prompt, log_full_response=self.log_full_response, force_system_prompt=self.force_system_prompt)
return
elif msg.startswith(f'{self.command_prefix} ') or room.member_count == 2:
# Otherwise if this is in a 1-1 with the bot or features a command prefix, treat it as a command.

View File

@ -184,7 +184,7 @@ async def get_thread_content(client: AsyncClient, room: MatrixRoom, base_event:
return messages
async def process_chat(client, room, event, command, store, openai, thread_root_id: str = None, system_prompt: str = None, log_full_response: bool = False):
async def process_chat(client, room, event, command, store, openai, thread_root_id: str = None, system_prompt: str = None, log_full_response: bool = False, force_system_prompt: bool = False):
if not store.check_seen_event(event.event_id):
await client.room_typing(room.room_id, typing_state=True, timeout=3000)
# if self.reply_in_thread:
@ -197,7 +197,14 @@ async def process_chat(client, room, event, command, store, openai, thread_root_
{'role': 'user', 'content': command},
]
if system_prompt:
messages.insert(0, {"role": "system", "content": system_prompt}, )
messages.insert(0, {"role": "system", "content": system_prompt})
if force_system_prompt:
if messages[-1]['role'] == 'system':
messages[-1] = {"role": "system", "content": system_prompt}
else:
messages.insert(-1, {"role": "system", "content": system_prompt})
logger.info(messages)
response = openai['openai'].ChatCompletion.create(
model=openai['model'],