add a bit more logging and docs, update readme

This commit is contained in:
Cyberes 2024-04-10 18:16:36 -06:00
parent 391679055d
commit 7c911d1235
4 changed files with 30 additions and 5 deletions

View File

@ -3,8 +3,7 @@
_Chatbots for Matrix._
This bot supports OpenAI, Anthropic, and locally hosted models that use an OpenAI-compatible endpoint. It can run
multiple different models using
different triggers, such as `!c4` for GPT4 and `!ca` for Anthropic, all through the same bot.
multiple different models using different triggers, such as `!c4` for GPT4 and `!ca` for Anthropic, all through the same bot.
**Supported Services**
@ -42,7 +41,7 @@ Use `!matrixgpt` to view the bot's help. The bot also responds to `!bots`.
<br>
- Don't try to use two bots in the same thread.
- You can DM a bot for a private chat.
- You can DM the bot for a private chat.
- The bot will move its read marker whenever a message is sent in the room.
<br>
@ -57,4 +56,8 @@ The bot can give helpful reactions:
## TODO
- [ ] Dalle bot
- [ ] Fix the typing indicator being removed when two responses are generating
- [ ] Add our own context mechanism to Copilot?
- [ ] Improve error messages sent with reactions to narrow down where the issue occurred.
- [ ] Allow replying to an image post which will give a vision model an image + text on the first message.
- [ ] Fix the typing indicator being removed when two responses are generating.
- [ ] ~~Add vision to Copilot~~ (not doing, API to unstable).

View File

@ -65,6 +65,10 @@ async def main(args):
logger.debug(f'Command Prefixes: {[k for k, v in global_config.command_prefixes.items()]}')
logger.info(f"OpenAI API key: {'yes' if global_config['openai'].get('api_key') else 'no'}")
logger.info(f"Anthropic API key: {'yes' if global_config['anthropic'].get('api_key') else 'no'}")
logger.info(f"Copilot API key: {'yes' if global_config['copilot'].get('api_key') else 'no'}")
client_helper = MatrixClientHelper(
user_id=global_config['auth']['username'],
passwd=global_config['auth']['password'],

View File

@ -31,6 +31,17 @@ async def generate_ai_response(
await client.room_typing(room.room_id, typing_state=True, timeout=global_config['response_timeout'] * 1000)
api_client = api_client_helper.get_client(command_info.api_type, client_helper)
if not api_client:
# If this was None then we were missing an API key for this client type. Error has already been logged.
await client_helper.react_to_event(
room.room_id,
event.event_id,
'',
extra_error=f'No API key for model {command_info.model}' if global_config['send_extra_messages'] else None
)
await client.room_typing(room.room_id, typing_state=False, timeout=1000)
return
messages = api_client.assemble_context(msg, system_prompt=command_info.system_prompt, injected_system_prompt=command_info.injected_system_prompt)
if api_client.check_ignore_request():

View File

@ -18,6 +18,13 @@ _REGEX_ATTR_LINK_RE_STR = [r'\[\^\d*\^]\[', r']']
_REGEX_ATTR_LINK_RE = re.compile(r'\d*'.join(_REGEX_ATTR_LINK_RE_STR))
"""
To implement context, could we maybe pickle the `sydney` object and track state via requester event ID?
Probably best not to store it in memory, but maybe a sqlite database in /tmp?
But might have to store it in memory because of async issues and not being able to restore the state of an old async loop.
"""
class CopilotClient(ApiClient):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)