2000 chunk size
This commit is contained in:
parent
19a193b792
commit
20047fa0e4
|
@ -22,8 +22,8 @@ def tokenize(prompt: str, backend_url: str) -> int:
|
||||||
|
|
||||||
tokenizer = tiktoken.get_encoding("cl100k_base")
|
tokenizer = tiktoken.get_encoding("cl100k_base")
|
||||||
|
|
||||||
# Split the prompt into 1000 character chunks
|
# Split the prompt into 2000 character chunks
|
||||||
chunk_size = 1000
|
chunk_size = 2000
|
||||||
chunks = [prompt[i:i + chunk_size] for i in range(0, len(prompt), chunk_size)]
|
chunks = [prompt[i:i + chunk_size] for i in range(0, len(prompt), chunk_size)]
|
||||||
|
|
||||||
# Define a function to send a chunk to the server
|
# Define a function to send a chunk to the server
|
||||||
|
|
Reference in New Issue