Fixing the update to outlines.

This commit is contained in:
Nicolas Patry 2024-09-14 15:13:00 +02:00
parent c4bbe06bf1
commit 68f7d75f23
No known key found for this signature in database
GPG Key ID: 64AF4752B2967863
3 changed files with 9 additions and 11 deletions

1
.gitignore vendored
View File

@ -22,3 +22,4 @@ server/fbgemmm
.direnv/ .direnv/
.venv/ .venv/
out/

View File

@ -1,23 +1,23 @@
{ {
"choices": [ "choices": [
{ {
"finish_reason": "eos_token", "finish_reason": "stop",
"index": 0, "index": 0,
"logprobs": null, "logprobs": null,
"message": { "message": {
"content": "{\n \"temperature\": [\n 35,\n 34,\n 36\n ],\n \"unit\": \"°c\"\n}", "content": "{ \"temperature\": [ 26, 30, 33, 29 ] ,\"unit\": \"Fahrenheit\" }",
"role": "assistant" "role": "assistant"
} }
} }
], ],
"created": 1718044128, "created": 1726319532,
"id": "", "id": "",
"model": "TinyLlama/TinyLlama-1.1B-Chat-v1.0", "model": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
"object": "text_completion", "object": "chat.completion",
"system_fingerprint": "2.0.5-dev0-native", "system_fingerprint": "2.2.1-dev0-native",
"usage": { "usage": {
"completion_tokens": 39, "completion_tokens": 36,
"prompt_tokens": 136, "prompt_tokens": 136,
"total_tokens": 175 "total_tokens": 172
} }
} }

View File

@ -56,10 +56,7 @@ async def test_grammar_response_format_llama_json(llama_grammar, response_snapsh
called = chat_completion["choices"][0]["message"]["content"] called = chat_completion["choices"][0]["message"]["content"]
assert response.status_code == 200 assert response.status_code == 200
assert ( assert called == '{ "temperature": [ 26, 30, 33, 29 ] ,"unit": "Fahrenheit" }'
called
== '{\n "temperature": [\n 35,\n 34,\n 36\n ],\n "unit": "°c"\n}'
)
assert chat_completion == response_snapshot assert chat_completion == response_snapshot