2023-01-20 04:24:39 -07:00
|
|
|
import pytest
|
|
|
|
|
2023-03-07 10:52:22 -07:00
|
|
|
from text_generation_server.pb import generate_pb2
|
|
|
|
from text_generation_server.models.causal_lm import CausalLMBatch
|
|
|
|
from text_generation_server.models.santacoder import SantaCoder
|
2023-01-20 04:24:39 -07:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope="session")
|
|
|
|
def default_santacoder():
|
|
|
|
return SantaCoder("bigcode/santacoder")
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def default_pb_request(default_pb_parameters, default_pb_stop_parameters):
|
|
|
|
return generate_pb2.Request(
|
|
|
|
id=0,
|
|
|
|
inputs="def",
|
2023-04-09 12:22:27 -06:00
|
|
|
truncate=100,
|
2023-01-20 04:24:39 -07:00
|
|
|
parameters=default_pb_parameters,
|
|
|
|
stopping_parameters=default_pb_stop_parameters,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def default_pb_batch(default_pb_request):
|
|
|
|
return generate_pb2.Batch(id=0, requests=[default_pb_request], size=1)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def default_fim_pb_request(default_pb_parameters, default_pb_stop_parameters):
|
|
|
|
return generate_pb2.Request(
|
|
|
|
id=0,
|
|
|
|
inputs="<fim-prefix>def<fim-suffix>world<fim-middle>",
|
2023-04-09 12:22:27 -06:00
|
|
|
truncate=100,
|
2023-01-20 04:24:39 -07:00
|
|
|
parameters=default_pb_parameters,
|
|
|
|
stopping_parameters=default_pb_stop_parameters,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def default_fim_pb_batch(default_fim_pb_request):
|
|
|
|
return generate_pb2.Batch(id=0, requests=[default_fim_pb_request], size=1)
|
|
|
|
|
|
|
|
|
2023-01-20 07:35:22 -07:00
|
|
|
@pytest.mark.skip
|
2023-01-20 04:24:39 -07:00
|
|
|
def test_santacoder_generate_token_completion(default_santacoder, default_pb_batch):
|
|
|
|
batch = CausalLMBatch.from_pb(
|
|
|
|
default_pb_batch, default_santacoder.tokenizer, default_santacoder.device
|
|
|
|
)
|
|
|
|
next_batch = batch
|
|
|
|
|
|
|
|
for _ in range(batch.stopping_criterias[0].max_new_tokens - 1):
|
2023-01-31 09:04:00 -07:00
|
|
|
generations, next_batch = default_santacoder.generate_token(next_batch)
|
|
|
|
assert len(generations) == len(next_batch)
|
2023-01-20 04:24:39 -07:00
|
|
|
|
2023-01-31 09:04:00 -07:00
|
|
|
generations, next_batch = default_santacoder.generate_token(next_batch)
|
2023-01-20 04:24:39 -07:00
|
|
|
assert next_batch is None
|
|
|
|
|
2023-01-31 09:04:00 -07:00
|
|
|
assert len(generations) == 1
|
2023-02-02 07:02:04 -07:00
|
|
|
assert generations[0].generated_text.text == " test_get_all_users_with_"
|
2023-01-31 09:04:00 -07:00
|
|
|
assert generations[0].request_id == batch.requests[0].id
|
2023-01-20 04:24:39 -07:00
|
|
|
assert (
|
2023-01-31 09:04:00 -07:00
|
|
|
generations[0].generated_text.generated_tokens
|
2023-01-20 04:24:39 -07:00
|
|
|
== batch.stopping_criterias[0].max_new_tokens
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2023-01-20 07:35:22 -07:00
|
|
|
@pytest.mark.skip
|
2023-01-20 04:24:39 -07:00
|
|
|
def test_fim_santacoder_generate_token_completion(
|
|
|
|
default_santacoder, default_fim_pb_batch
|
|
|
|
):
|
|
|
|
batch = CausalLMBatch.from_pb(
|
|
|
|
default_fim_pb_batch, default_santacoder.tokenizer, default_santacoder.device
|
|
|
|
)
|
|
|
|
next_batch = batch
|
|
|
|
|
|
|
|
for _ in range(batch.stopping_criterias[0].max_new_tokens - 1):
|
2023-01-31 09:04:00 -07:00
|
|
|
generations, next_batch = default_santacoder.generate_token(next_batch)
|
|
|
|
assert len(generations) == len(next_batch)
|
2023-01-20 04:24:39 -07:00
|
|
|
|
2023-01-31 09:04:00 -07:00
|
|
|
generations, next_batch = default_santacoder.generate_token(next_batch)
|
2023-01-20 04:24:39 -07:00
|
|
|
assert next_batch is None
|
|
|
|
|
2023-01-31 09:04:00 -07:00
|
|
|
assert len(generations) == 1
|
2023-01-20 04:24:39 -07:00
|
|
|
assert (
|
2023-01-31 09:04:00 -07:00
|
|
|
generations[0].generated_text.text
|
2023-02-02 07:02:04 -07:00
|
|
|
== """ineProperty(exports, "__esModule", { value"""
|
2023-01-20 04:24:39 -07:00
|
|
|
)
|
2023-01-31 09:04:00 -07:00
|
|
|
assert generations[0].request_id == batch.requests[0].id
|
2023-01-20 04:24:39 -07:00
|
|
|
assert (
|
2023-01-31 09:04:00 -07:00
|
|
|
generations[0].generated_text.generated_tokens
|
2023-01-20 04:24:39 -07:00
|
|
|
== batch.stopping_criterias[0].max_new_tokens
|
|
|
|
)
|