From 68f7d75f231d837be3116c838394f217611196a9 Mon Sep 17 00:00:00 2001 From: Nicolas Patry Date: Sat, 14 Sep 2024 15:13:00 +0200 Subject: [PATCH] Fixing the update to outlines. --- .gitignore | 1 + .../test_grammar_response_format_llama_json.json | 14 +++++++------- .../models/test_grammar_response_format_llama.py | 5 +---- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/.gitignore b/.gitignore index edcc2f89..5fb69a7d 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,4 @@ server/fbgemmm .direnv/ .venv/ +out/ diff --git a/integration-tests/models/__snapshots__/test_grammar_response_format_llama/test_grammar_response_format_llama_json.json b/integration-tests/models/__snapshots__/test_grammar_response_format_llama/test_grammar_response_format_llama_json.json index 83390832..4f5c50da 100644 --- a/integration-tests/models/__snapshots__/test_grammar_response_format_llama/test_grammar_response_format_llama_json.json +++ b/integration-tests/models/__snapshots__/test_grammar_response_format_llama/test_grammar_response_format_llama_json.json @@ -1,23 +1,23 @@ { "choices": [ { - "finish_reason": "eos_token", + "finish_reason": "stop", "index": 0, "logprobs": null, "message": { - "content": "{\n \"temperature\": [\n 35,\n 34,\n 36\n ],\n \"unit\": \"°c\"\n}", + "content": "{ \"temperature\": [ 26, 30, 33, 29 ] ,\"unit\": \"Fahrenheit\" }", "role": "assistant" } } ], - "created": 1718044128, + "created": 1726319532, "id": "", "model": "TinyLlama/TinyLlama-1.1B-Chat-v1.0", - "object": "text_completion", - "system_fingerprint": "2.0.5-dev0-native", + "object": "chat.completion", + "system_fingerprint": "2.2.1-dev0-native", "usage": { - "completion_tokens": 39, + "completion_tokens": 36, "prompt_tokens": 136, - "total_tokens": 175 + "total_tokens": 172 } } diff --git a/integration-tests/models/test_grammar_response_format_llama.py b/integration-tests/models/test_grammar_response_format_llama.py index 25bf9d98..6fa3d8bd 100644 --- a/integration-tests/models/test_grammar_response_format_llama.py +++ b/integration-tests/models/test_grammar_response_format_llama.py @@ -56,10 +56,7 @@ async def test_grammar_response_format_llama_json(llama_grammar, response_snapsh called = chat_completion["choices"][0]["message"]["content"] assert response.status_code == 200 - assert ( - called - == '{\n "temperature": [\n 35,\n 34,\n 36\n ],\n "unit": "°c"\n}' - ) + assert called == '{ "temperature": [ 26, 30, 33, 29 ] ,"unit": "Fahrenheit" }' assert chat_completion == response_snapshot