increase tokenization chunk size

This commit is contained in:
Cyberes 2023-10-16 17:59:21 -06:00
parent f421436048
commit 19a193b792
1 changed files with 2 additions and 2 deletions

View File

@ -22,8 +22,8 @@ def tokenize(prompt: str, backend_url: str) -> int:
tokenizer = tiktoken.get_encoding("cl100k_base")
# Split the prompt into 300 character chunks
chunk_size = 300
# Split the prompt into 1000 character chunks
chunk_size = 1000
chunks = [prompt[i:i + chunk_size] for i in range(0, len(prompt), chunk_size)]
# Define a function to send a chunk to the server