2023-09-14 17:38:20 -06:00
|
|
|
import json
|
|
|
|
import pickle
|
2023-08-23 21:33:52 -06:00
|
|
|
import time
|
2023-09-14 17:38:20 -06:00
|
|
|
from uuid import uuid4
|
|
|
|
|
|
|
|
from redis import Redis
|
2023-08-23 20:12:38 -06:00
|
|
|
|
2023-09-30 19:41:50 -06:00
|
|
|
from llm_server.custom_redis import RedisCustom, redis
|
2023-10-02 02:05:15 -06:00
|
|
|
from llm_server.database.database import get_token_ratelimit
|
2023-08-29 13:46:41 -06:00
|
|
|
|
|
|
|
|
2023-09-28 03:44:30 -06:00
|
|
|
def increment_ip_count(client_ip: str, redis_key):
|
|
|
|
redis.hincrby(redis_key, client_ip, 1)
|
2023-08-29 13:46:41 -06:00
|
|
|
|
|
|
|
|
2023-09-28 03:44:30 -06:00
|
|
|
def decrement_ip_count(client_ip: str, redis_key):
|
|
|
|
new_count = redis.hincrby(redis_key, client_ip, -1)
|
|
|
|
if new_count <= 0:
|
|
|
|
redis.hdel(redis_key, client_ip)
|
2023-08-27 23:48:10 -06:00
|
|
|
|
2023-08-23 20:12:38 -06:00
|
|
|
|
2023-09-14 17:38:20 -06:00
|
|
|
class RedisPriorityQueue:
|
2023-09-30 19:41:50 -06:00
|
|
|
def __init__(self, name: str = 'priority_queue', db: int = 12):
|
|
|
|
self.redis = RedisCustom(name, db=db)
|
2023-09-14 17:38:20 -06:00
|
|
|
self.pubsub = self.redis.pubsub()
|
|
|
|
self.pubsub.subscribe('events')
|
2023-08-23 20:12:38 -06:00
|
|
|
|
2023-09-30 19:41:50 -06:00
|
|
|
def put(self, item, priority, selected_model):
|
2023-09-14 17:38:20 -06:00
|
|
|
# Check if the IP is already in the dictionary and if it has reached the limit
|
|
|
|
ip_count = self.redis.hget('queued_ip_count', item[1])
|
2023-09-27 21:15:54 -06:00
|
|
|
if ip_count:
|
|
|
|
ip_count = int(ip_count)
|
2023-10-02 02:05:15 -06:00
|
|
|
_, simultaneous_ip = get_token_ratelimit(item[2])
|
|
|
|
if ip_count and int(ip_count) >= simultaneous_ip and priority != 0:
|
2023-09-27 19:39:04 -06:00
|
|
|
print(f'Rejecting request from {item[1]} - {ip_count} requests in progress.')
|
2023-09-14 17:38:20 -06:00
|
|
|
return None # reject the request
|
2023-09-27 19:39:04 -06:00
|
|
|
|
2023-10-04 12:40:29 -06:00
|
|
|
event = DataEvent()
|
2023-09-30 19:41:50 -06:00
|
|
|
self.redis.zadd('queue', {json.dumps((item, event.event_id, selected_model)): -priority})
|
2023-09-27 19:39:04 -06:00
|
|
|
self.increment_ip_count(item[1], 'queued_ip_count')
|
2023-08-23 20:12:38 -06:00
|
|
|
return event
|
|
|
|
|
|
|
|
def get(self):
|
2023-09-14 17:38:20 -06:00
|
|
|
while True:
|
|
|
|
data = self.redis.zpopmin('queue')
|
|
|
|
if data:
|
|
|
|
item = json.loads(data[0][0])
|
2023-09-27 21:15:54 -06:00
|
|
|
client_ip = item[0][1]
|
2023-10-04 12:40:29 -06:00
|
|
|
|
|
|
|
b = self.redis.hget('queued_ip_count', item[1])
|
2023-09-27 19:39:04 -06:00
|
|
|
self.decrement_ip_count(client_ip, 'queued_ip_count')
|
2023-10-04 12:40:29 -06:00
|
|
|
a = self.redis.hget('queued_ip_count', item[1])
|
2023-10-04 12:42:18 -06:00
|
|
|
|
2023-10-04 12:40:29 -06:00
|
|
|
print(item[1], a, b)
|
|
|
|
|
2023-09-14 17:38:20 -06:00
|
|
|
return item
|
2023-09-28 01:34:15 -06:00
|
|
|
time.sleep(0.1) # wait for something to be added to the queue
|
2023-09-14 17:38:20 -06:00
|
|
|
|
2023-09-28 03:44:30 -06:00
|
|
|
def increment_ip_count(self, client_ip: str, redis_key):
|
|
|
|
self.redis.hincrby(redis_key, client_ip, 1)
|
2023-09-14 17:38:20 -06:00
|
|
|
|
2023-09-28 03:44:30 -06:00
|
|
|
def decrement_ip_count(self, client_ip: str, redis_key):
|
|
|
|
new_count = self.redis.hincrby(redis_key, client_ip, -1)
|
|
|
|
if new_count <= 0:
|
2023-10-04 12:40:29 -06:00
|
|
|
self.redis.hdel(redis_key, [client_ip])
|
2023-08-23 20:33:49 -06:00
|
|
|
|
|
|
|
def __len__(self):
|
2023-09-14 17:38:20 -06:00
|
|
|
return self.redis.zcard('queue')
|
2023-08-23 20:12:38 -06:00
|
|
|
|
2023-09-30 19:41:50 -06:00
|
|
|
def len(self, model_name):
|
2023-10-04 12:40:29 -06:00
|
|
|
self.redis.zrange('queue', 0, -1)
|
2023-09-30 19:41:50 -06:00
|
|
|
|
2023-09-28 03:44:30 -06:00
|
|
|
def get_queued_ip_count(self, client_ip: str):
|
|
|
|
q = self.redis.hget('queued_ip_count', client_ip)
|
|
|
|
if not q:
|
|
|
|
return 0
|
|
|
|
return 0
|
2023-09-27 21:15:54 -06:00
|
|
|
|
2023-09-30 19:41:50 -06:00
|
|
|
def flush(self):
|
|
|
|
self.redis.flush()
|
|
|
|
|
2023-08-23 20:12:38 -06:00
|
|
|
|
2023-09-14 17:38:20 -06:00
|
|
|
class DataEvent:
|
|
|
|
def __init__(self, event_id=None):
|
|
|
|
self.event_id = event_id if event_id else str(uuid4())
|
|
|
|
self.redis = Redis(host='localhost', port=6379, db=14)
|
|
|
|
self.pubsub = self.redis.pubsub()
|
|
|
|
self.pubsub.subscribe(self.event_id)
|
2023-08-23 20:12:38 -06:00
|
|
|
|
2023-09-14 17:38:20 -06:00
|
|
|
def set(self, data):
|
|
|
|
self.redis.publish(self.event_id, pickle.dumps(data))
|
2023-08-23 20:12:38 -06:00
|
|
|
|
2023-09-14 17:38:20 -06:00
|
|
|
def wait(self):
|
|
|
|
for item in self.pubsub.listen():
|
|
|
|
if item['type'] == 'message':
|
|
|
|
return pickle.loads(item['data'])
|
|
|
|
|
|
|
|
|
|
|
|
priority_queue = RedisPriorityQueue()
|
2023-09-28 08:47:39 -06:00
|
|
|
|
|
|
|
|
2023-10-02 20:53:08 -06:00
|
|
|
def update_active_workers(key: str, operation: str):
|
|
|
|
if operation == 'incr':
|
|
|
|
redis.incr(f'active_gen_workers:{key}')
|
|
|
|
elif operation == 'decr':
|
|
|
|
redis.decr(f'active_gen_workers:{key}')
|
|
|
|
if redis.get(f'active_gen_workers:{key}', default=0, dtype=int) < 0:
|
|
|
|
redis.set(f'active_gen_workers:{key}', 0)
|
|
|
|
|
|
|
|
|
2023-09-30 19:41:50 -06:00
|
|
|
def incr_active_workers(selected_model: str, backend_url: str):
|
2023-10-02 20:53:08 -06:00
|
|
|
update_active_workers(selected_model, 'incr')
|
|
|
|
update_active_workers(backend_url, 'incr')
|
2023-09-30 19:41:50 -06:00
|
|
|
|
2023-09-28 08:47:39 -06:00
|
|
|
|
2023-09-30 19:41:50 -06:00
|
|
|
def decr_active_workers(selected_model: str, backend_url: str):
|
2023-10-02 20:53:08 -06:00
|
|
|
update_active_workers(selected_model, 'decr')
|
|
|
|
update_active_workers(backend_url, 'decr')
|