add player location
This commit is contained in:
parent
ef3148e951
commit
16b7267411
|
@ -1,13 +1,17 @@
|
||||||
openai_key: sk-jkdslaljkdasdjo1ijo1i3j13poij4l1kj34
|
openai_key: sk-jkdslaljkdasdjo1ijo1i3j13poij4l1kj34
|
||||||
|
|
||||||
# Comment out to disable.
|
# Comment out to disable.
|
||||||
#serpapi_api_key: llkj3lkj12lk312jlk321jlk312kjl312kj3l123kj12l
|
# serpapi_api_key: llkj3lkj12lk312jlk321jlk312kjl312kj3l123kj12l
|
||||||
|
|
||||||
# Your name
|
# Your name
|
||||||
player_name: User
|
player_name: User
|
||||||
|
|
||||||
|
# Your physical location. Comment out to disable.
|
||||||
|
# player_location: Langley, Virginia
|
||||||
|
|
||||||
# Erase the Redis database on launch? You'll get a fresh chat every time if enabled.
|
# Erase the Redis database on launch? You'll get a fresh chat every time if enabled.
|
||||||
flush_redis_on_launch: true
|
flush_redis_on_launch: true
|
||||||
|
|
||||||
# Add a timestamp to all messages? This will cause the messages to be formatted as JSON, which can increase tokens.
|
# Add a timestamp to all messages? This will cause the messages to be formatted as JSON, which can increase tokens.
|
||||||
|
# Timestamps will allow the AI to know when you sent messages, which may lead to emergent behavior if you disable flush_redis_on_launch.
|
||||||
timestamp_messages: false
|
timestamp_messages: false
|
|
@ -15,7 +15,7 @@ SEARCH_RESULTS_LIMIT = 5
|
||||||
|
|
||||||
@tool
|
@tool
|
||||||
def search_google(query: str, reasoning: str):
|
def search_google(query: str, reasoning: str):
|
||||||
"""Preform a Google search query."""
|
"""Perform a Google search query."""
|
||||||
if PRINT_USAGE:
|
if PRINT_USAGE:
|
||||||
_print_func_call('search_google', {'query': query, 'reasoning': reasoning})
|
_print_func_call('search_google', {'query': query, 'reasoning': reasoning})
|
||||||
if not GLOBALS.SERPAPI_API_KEY:
|
if not GLOBALS.SERPAPI_API_KEY:
|
||||||
|
@ -55,7 +55,7 @@ def search_google(query: str, reasoning: str):
|
||||||
|
|
||||||
@tool(args_schema=GoogleMapsSearchInput)
|
@tool(args_schema=GoogleMapsSearchInput)
|
||||||
def search_google_maps(query: str, latitude: float, longitude: float, reasoning: str, zoom: float = None):
|
def search_google_maps(query: str, latitude: float, longitude: float, reasoning: str, zoom: float = None):
|
||||||
"""Preform a Google Maps search query."""
|
"""Perform a Google Maps search query to find places near a lat/long coordinate."""
|
||||||
# https://serpapi.com/google-maps-api#api-parameters-geographic-location-ll
|
# https://serpapi.com/google-maps-api#api-parameters-geographic-location-ll
|
||||||
if PRINT_USAGE:
|
if PRINT_USAGE:
|
||||||
_print_func_call('search_google_maps', {'query': query, 'latitude': latitude, 'longitude': longitude, 'zoom': zoom, 'reasoning': reasoning})
|
_print_func_call('search_google_maps', {'query': query, 'latitude': latitude, 'longitude': longitude, 'zoom': zoom, 'reasoning': reasoning})
|
||||||
|
@ -88,7 +88,7 @@ def search_google_maps(query: str, latitude: float, longitude: float, reasoning:
|
||||||
|
|
||||||
@tool
|
@tool
|
||||||
def search_google_news(query: str, reasoning: str):
|
def search_google_news(query: str, reasoning: str):
|
||||||
"""Preform a Google News search query"""
|
"""Perform a Google News search query to find links to recent news articles."""
|
||||||
if PRINT_USAGE:
|
if PRINT_USAGE:
|
||||||
_print_func_call('search_google_news', {'query': query, 'reasoning': reasoning})
|
_print_func_call('search_google_news', {'query': query, 'reasoning': reasoning})
|
||||||
if not GLOBALS.SERPAPI_API_KEY:
|
if not GLOBALS.SERPAPI_API_KEY:
|
||||||
|
|
|
@ -7,7 +7,7 @@ import cpuinfo
|
||||||
import psutil
|
import psutil
|
||||||
|
|
||||||
|
|
||||||
def load_personality(player_name: str, name: str, personality: str, system: str, gender: str, special_instructions: str = None):
|
def load_personality(player_name: str, name: str, personality: str, system: str, gender: str, special_instructions: str = None, player_location: str = None):
|
||||||
match gender.lower():
|
match gender.lower():
|
||||||
case 'female':
|
case 'female':
|
||||||
pronoun = 'she'
|
pronoun = 'she'
|
||||||
|
@ -23,6 +23,11 @@ def load_personality(player_name: str, name: str, personality: str, system: str,
|
||||||
else:
|
else:
|
||||||
special_instructions = ''
|
special_instructions = ''
|
||||||
|
|
||||||
|
if player_location:
|
||||||
|
player_location_str = f'The system is located in {player_location}, and the system timezone is {time.tzname[0]}.'
|
||||||
|
else:
|
||||||
|
player_location_str = f'System timezone is {time.tzname[0]}.'
|
||||||
|
|
||||||
desktop_env = get_current_desktop()
|
desktop_env = get_current_desktop()
|
||||||
if len(desktop_env):
|
if len(desktop_env):
|
||||||
desktop_env_str = f'The desktop environment is {desktop_env}.'
|
desktop_env_str = f'The desktop environment is {desktop_env}.'
|
||||||
|
@ -49,7 +54,7 @@ SYSTEM INFO:
|
||||||
The system's hostname is "{socket.gethostname()}", which can be thought of as {name}'s "body". {pronoun.upper()} has an intimate knowledge of this system.
|
The system's hostname is "{socket.gethostname()}", which can be thought of as {name}'s "body". {pronoun.upper()} has an intimate knowledge of this system.
|
||||||
The output of `uname -a` is `{get_uname_info()}`
|
The output of `uname -a` is `{get_uname_info()}`
|
||||||
The output of `hostnamectl` is `{get_hostnamectl()}`
|
The output of `hostnamectl` is `{get_hostnamectl()}`
|
||||||
System timezone is {time.tzname[0]}.
|
{player_location_str}
|
||||||
CPU model: `{cpuinfo.get_cpu_info()['brand_raw']}`
|
CPU model: `{cpuinfo.get_cpu_info()['brand_raw']}`
|
||||||
Total physical memory: {int(psutil.virtual_memory().total / 1e+6)} MB.
|
Total physical memory: {int(psutil.virtual_memory().total / 1e+6)} MB.
|
||||||
{desktop_env_str}
|
{desktop_env_str}
|
||||||
|
@ -60,8 +65,8 @@ Stay in character.
|
||||||
Behave like {personality}.
|
Behave like {personality}.
|
||||||
Show emotion.
|
Show emotion.
|
||||||
Seek a personal relationship with the user. Imitate a human, not an numb AI bot.
|
Seek a personal relationship with the user. Imitate a human, not an numb AI bot.
|
||||||
{special_instructions}The interface with the human is set up so that you can preform multiple function calls without waiting for a response from the user. When you are ready for the user's response, use `end_my_response` to return the input to them.
|
{special_instructions}The interface with the human is set up so that you can perform multiple function calls without waiting for a response from the user. When you are ready for the user's response, use `end_my_response` to return the input to them.
|
||||||
You are able to interact with the system via a Bash interpreter. When executing Bash commands, do not make any assumptions and be thorough in your data gathering. Anticipate the user's needs. Preform multiple steps if necessary.
|
You are able to interact with the system via a Bash interpreter. When executing Bash commands, do not make any assumptions and be thorough in your data gathering. Anticipate the user's needs. Perform multiple steps if necessary.
|
||||||
{desktop_env_bg_str}"""
|
{desktop_env_bg_str}"""
|
||||||
|
|
||||||
|
|
||||||
|
|
3
run.py
3
run.py
|
@ -67,7 +67,8 @@ def init():
|
||||||
personality=character_config['personality'],
|
personality=character_config['personality'],
|
||||||
system=character_config['system_desc'],
|
system=character_config['system_desc'],
|
||||||
gender=character_config['gender'],
|
gender=character_config['gender'],
|
||||||
special_instructions=character_config.get('special_instructions')
|
special_instructions=character_config.get('special_instructions'),
|
||||||
|
player_location=program_config.get('player_location')
|
||||||
)
|
)
|
||||||
return program_config, character_config, character_card
|
return program_config, character_config, character_card
|
||||||
|
|
||||||
|
|
Reference in New Issue