2022-10-28 11:24:00 -06:00
|
|
|
from text_generation.models.model import Model
|
2022-11-04 07:22:47 -06:00
|
|
|
from text_generation.models.causal_lm import CausalLM
|
2022-11-04 11:03:04 -06:00
|
|
|
from text_generation.models.bloom import BLOOMSharded
|
|
|
|
from text_generation.models.seq2seq_lm import Seq2SeqLM
|
2022-10-28 11:24:00 -06:00
|
|
|
|
2022-11-04 11:03:04 -06:00
|
|
|
__all__ = ["Model", "BLOOMSharded", "CausalLM", "Seq2SeqLM"]
|
2022-10-28 11:24:00 -06:00
|
|
|
|
|
|
|
|
|
|
|
def get_model(model_name: str, sharded: bool, quantize: bool) -> Model:
|
|
|
|
if model_name.startswith("bigscience/bloom"):
|
|
|
|
if sharded:
|
2022-11-07 04:53:56 -07:00
|
|
|
return BLOOMSharded(model_name, quantize=quantize)
|
2022-10-28 11:24:00 -06:00
|
|
|
else:
|
2022-11-07 04:53:56 -07:00
|
|
|
return CausalLM(model_name, quantize=quantize)
|
2022-10-28 11:24:00 -06:00
|
|
|
else:
|
2022-11-04 07:22:47 -06:00
|
|
|
if sharded:
|
|
|
|
raise ValueError("sharded is not supported for AutoModel")
|
2022-11-04 11:03:04 -06:00
|
|
|
try:
|
2022-11-07 04:53:56 -07:00
|
|
|
return CausalLM(model_name, quantize=quantize)
|
2022-11-04 11:03:04 -06:00
|
|
|
except Exception as e:
|
2022-11-07 04:53:56 -07:00
|
|
|
return Seq2SeqLM(model_name, quantize=quantize)
|