increase tokenization chunk size
This commit is contained in:
parent
f421436048
commit
19a193b792
|
@ -22,8 +22,8 @@ def tokenize(prompt: str, backend_url: str) -> int:
|
|||
|
||||
tokenizer = tiktoken.get_encoding("cl100k_base")
|
||||
|
||||
# Split the prompt into 300 character chunks
|
||||
chunk_size = 300
|
||||
# Split the prompt into 1000 character chunks
|
||||
chunk_size = 1000
|
||||
chunks = [prompt[i:i + chunk_size] for i in range(0, len(prompt), chunk_size)]
|
||||
|
||||
# Define a function to send a chunk to the server
|
||||
|
|
Reference in New Issue