hf_text-generation-inference/server/text_generation/cache.py

25 lines
556 B
Python
Raw Normal View History

from typing import Dict, Optional
2022-10-08 04:30:12 -06:00
from text_generation.models.types import Batch
2022-10-08 04:30:12 -06:00
class Cache:
def __init__(self):
self.cache: Dict[int, Batch] = {}
2022-10-08 04:30:12 -06:00
def pop(self, batch_id: int) -> Optional[Batch]:
2022-10-08 04:30:12 -06:00
return self.cache.pop(batch_id, None)
def set(self, entry: Batch):
2022-10-08 04:30:12 -06:00
if entry is not None:
self.cache[entry.batch_id] = entry
def delete(self, batch_id: int):
2022-10-08 04:30:12 -06:00
del self.cache[batch_id]
def clear(self):
self.cache.clear()
def __len__(self):
return len(self.cache.keys())