From 5f4fa8648077d69dbdeba70ac40933c20477bed2 Mon Sep 17 00:00:00 2001 From: Cyberes Date: Tue, 9 Apr 2024 19:34:05 -0600 Subject: [PATCH] add vision to help text --- README.md | 5 +++++ matrix_gpt/config.py | 1 + matrix_gpt/handle_actions.py | 5 +++-- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index a41e4c9..4caa214 100644 --- a/README.md +++ b/README.md @@ -45,3 +45,8 @@ The bot can give helpful reactions: - 🕒 means the API timed out. - ❌ means the bot encountered an exception. - ❌ 🔐 means there was a decryption failure. + +## TODO + +- [ ] Dalle bot +- [ ] Fix the typing indicator being removed when two responses are generating \ No newline at end of file diff --git a/matrix_gpt/config.py b/matrix_gpt/config.py index dc8b52e..aedef31 100644 --- a/matrix_gpt/config.py +++ b/matrix_gpt/config.py @@ -57,6 +57,7 @@ DEFAULT_LISTS = { 'system_prompt': None, 'injected_system_prompt': None, 'api_base': None, + 'vision': False, 'help': None, } } diff --git a/matrix_gpt/handle_actions.py b/matrix_gpt/handle_actions.py index 6828490..ded5fbb 100644 --- a/matrix_gpt/handle_actions.py +++ b/matrix_gpt/handle_actions.py @@ -117,11 +117,12 @@ async def sound_off(room: MatrixRoom, event: RoomMessageText, client_helper: Mat `!matrixgpt` - show this help message.\n\n""" for command in global_config['command']: - max_tokens = command['max_tokens'] if command['max_tokens'] > 0 else 'max' + max_tokens = f' Max tokens: {command["max_tokens"]}.' if command['max_tokens'] > 0 else '' system_prompt_text = f" System prompt: yes." if command['system_prompt'] else '' injected_system_prompt_text = f" Injected system prompt: yes." if command['injected_system_prompt'] else '' help_text = f" ***{command['help'].strip('.')}.***" if command['help'] else '' - text_response = text_response + f"`{command['trigger']}` - Model: {command['model']}. Temperature: {command['temperature']}. Max tokens: {max_tokens}.{system_prompt_text}{injected_system_prompt_text}{help_text}\n\n" + vision_text = ' Vision: yes.' if command['vision'] else '' + text_response = text_response + f"`{command['trigger']}` - Model: {command['model']}. Temperature: {command['temperature']}.{max_tokens}{vision_text}{system_prompt_text}{injected_system_prompt_text}{help_text}\n\n" return await client_helper.send_text_to_room( room.room_id, text_response,