From 49257b4abfc039b9d0b9a2cadfe2b82b3dcf5dc5 Mon Sep 17 00:00:00 2001 From: Patrick von Platen Date: Thu, 9 Jun 2022 12:36:37 +0200 Subject: [PATCH] finish transformers removal --- setup.py | 51 ++++++++------------- src/diffusers/configuration_utils.py | 67 ++++++++++++++-------------- src/diffusers/modeling_utils.py | 16 +++---- src/diffusers/pipeline_utils.py | 5 ++- src/diffusers/utils/__init__.py | 1 + tests/test_modeling_utils.py | 40 +++++++++++++++++ 6 files changed, 104 insertions(+), 76 deletions(-) diff --git a/setup.py b/setup.py index 17a5dc36..96d1f309 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,4 @@ -# Copyright 2021 The HuggingFace Team. All rights reserved. +# Copyright 2022 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -52,11 +52,11 @@ To create the package for pypi. twine upload dist/* -r pypitest --repository-url=https://test.pypi.org/legacy/ Check that you can install it in a virtualenv by running: - pip install -i https://testpypi.python.org/pypi transformers + pip install -i https://testpypi.python.org/pypi diffusers Check you can run the following commands: - python -c "from transformers import pipeline; classifier = pipeline('text-classification'); print(classifier('What a nice release'))" - python -c "from transformers import *" + python -c "from diffusers import pipeline; classifier = pipeline('text-classification'); print(classifier('What a nice release'))" + python -c "from diffusers import *" 9. Upload the final version to actual pypi: twine upload dist/* -r pypi @@ -77,36 +77,21 @@ from setuptools import find_packages, setup # 2. once modified, run: `make deps_table_update` to update src/diffusers/dependency_versions_table.py _deps = [ "Pillow", - "accelerate>=0.9.0", "black~=22.0,>=22.3", - "codecarbon==1.2.0", - "dataclasses", - "datasets", - "GitPython<3.1.19", - "hf-doc-builder>=0.3.0", - "huggingface-hub>=0.1.0,<1.0", - "importlib_metadata", + "filelock", + "flake8>=3.8.3", + "huggingface-hub", "isort>=5.5.4", - "numpy>=1.17", + "numpy", "pytest", - "pytest-timeout", - "pytest-xdist", - "python>=3.7.0", - "regex!=2019.12.17", "requests", - "sagemaker>=2.31.0", - "tokenizers>=0.11.1,!=0.11.3,<0.13", "torch>=1.4", - "torchaudio", - "tqdm>=4.27", - "unidic>=1.0.2", - "unidic_lite>=1.0.7", - "uvicorn", + "torchvision", ] # this is a lookup table with items like: # -# tokenizers: "tokenizers==0.9.4" +# tokenizers: "huggingface-hub==0.8.0" # packaging: "packaging" # # some of the values are versioned whereas others aren't. @@ -176,15 +161,17 @@ extras["quality"] = ["black ~= 22.0", "isort >= 5.5.4", "flake8 >= 3.8.3"] extras["docs"] = [] extras["test"] = [ "pytest", - "pytest-xdist", - "pytest-subtests", - "datasets", - "transformers", ] extras["dev"] = extras["quality"] + extras["test"] -extras["sagemaker"] = [ - "sagemaker", # boto3 is a required package in sagemaker +install_requires = [ + deps["filelock"], + deps["huggingface-hub"], + deps["numpy"], + deps["requests"], + deps["torch"], + deps["torchvision"], + deps["Pillow"], ] setup( @@ -201,7 +188,7 @@ setup( package_dir={"": "src"}, packages=find_packages("src"), python_requires=">=3.6.0", - install_requires=["numpy>=1.17", "packaging>=20.0", "pyyaml", "torch>=1.4.0"], + install_requires=install_requires, extras_require=extras, classifiers=[ "Development Status :: 5 - Production/Stable", diff --git a/src/diffusers/configuration_utils.py b/src/diffusers/configuration_utils.py index 1dfc5aba..bd1550c1 100644 --- a/src/diffusers/configuration_utils.py +++ b/src/diffusers/configuration_utils.py @@ -57,6 +57,8 @@ class ConfigMixin: if self.config_name is None: raise NotImplementedError(f"Make sure that {self.__class__} has defined a class name `config_name`") kwargs["_class_name"] = self.__class__.__name__ + kwargs["_diffusers_version"] = __version__ + for key, value in kwargs.items(): try: setattr(self, key, value) @@ -91,6 +93,21 @@ class ConfigMixin: self.to_json_file(output_config_file) logger.info(f"ConfigMixinuration saved in {output_config_file}") + @classmethod + def from_config(cls, pretrained_model_name_or_path: Union[str, os.PathLike], return_unused_kwargs=False, **kwargs): + config_dict = cls.get_config_dict( + pretrained_model_name_or_path=pretrained_model_name_or_path, **kwargs + ) + + init_dict, unused_kwargs = cls.extract_init_dict(config_dict, **kwargs) + + model = cls(**init_dict) + + if return_unused_kwargs: + return model, unused_kwargs + else: + return model + @classmethod def get_config_dict( cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs @@ -107,6 +124,12 @@ class ConfigMixin: pretrained_model_name_or_path = str(pretrained_model_name_or_path) + if cls.config_name is None: + raise ValueError( + "`self.config_name` is not defined. Note that one should not load a config from " + "`ConfigMixin`. Please make sure to define `config_name` in a class inheriting from `ConfigMixin`" + ) + if os.path.isfile(pretrained_model_name_or_path): config_file = pretrained_model_name_or_path elif os.path.isdir(pretrained_model_name_or_path): @@ -168,13 +191,13 @@ class ConfigMixin: f"containing a {cls.config_name} file" ) - try: - # Load config dict - config_dict = cls._dict_from_json_file(config_file) - except (json.JSONDecodeError, UnicodeDecodeError): - raise EnvironmentError( - f"It looks like the config file at '{config_file}' is not a valid JSON file." - ) + try: + # Load config dict + config_dict = cls._dict_from_json_file(config_file) + except (json.JSONDecodeError, UnicodeDecodeError): + raise EnvironmentError( + f"It looks like the config file at '{config_file}' is not a valid JSON file." + ) return config_dict @@ -200,21 +223,6 @@ class ConfigMixin: return init_dict, unused_kwargs - @classmethod - def from_config(cls, pretrained_model_name_or_path: Union[str, os.PathLike], return_unused_kwargs=False, **kwargs): - config_dict = cls.get_config_dict( - pretrained_model_name_or_path=pretrained_model_name_or_path, **kwargs - ) - - init_dict, unused_kwargs = cls.extract_init_dict(config_dict, **kwargs) - - model = cls(**init_dict) - - if return_unused_kwargs: - return model, unused_kwargs - else: - return model - @classmethod def _dict_from_json_file(cls, json_file: Union[str, os.PathLike]): with open(json_file, "r", encoding="utf-8") as reader: @@ -227,18 +235,9 @@ class ConfigMixin: def __repr__(self): return f"{self.__class__.__name__} {self.to_json_string()}" - def to_dict(self) -> Dict[str, Any]: - """ - Serializes this instance to a Python dictionary. - - Returns: - `Dict[str, Any]`: Dictionary of all the attributes that make up this configuration instance. - """ - output = copy.deepcopy(self.__dict__) - - # Diffusion version when serializing the model - output["diffusers_version"] = __version__ - + @property + def config(self) -> Dict[str, Any]: + output = copy.deepcopy(self._dict_to_save) return output def to_json_string(self) -> str: diff --git a/src/diffusers/modeling_utils.py b/src/diffusers/modeling_utils.py index 47f2175a..20870e34 100644 --- a/src/diffusers/modeling_utils.py +++ b/src/diffusers/modeling_utils.py @@ -401,14 +401,14 @@ class ModelMixin(torch.nn.Module): ) # restore default dtype - state_dict = load_state_dict(model_file) - model, missing_keys, unexpected_keys, mismatched_keys, error_msgs = cls._load_pretrained_model( - model, - state_dict, - model_file, - pretrained_model_name_or_path, - ignore_mismatched_sizes=ignore_mismatched_sizes, - ) + state_dict = load_state_dict(model_file) + model, missing_keys, unexpected_keys, mismatched_keys, error_msgs = cls._load_pretrained_model( + model, + state_dict, + model_file, + pretrained_model_name_or_path, + ignore_mismatched_sizes=ignore_mismatched_sizes, + ) # Set model in evaluation mode to deactivate DropOut modules by default model.eval() diff --git a/src/diffusers/pipeline_utils.py b/src/diffusers/pipeline_utils.py index fb72e2c6..c7637925 100644 --- a/src/diffusers/pipeline_utils.py +++ b/src/diffusers/pipeline_utils.py @@ -67,11 +67,12 @@ class DiffusionPipeline(ConfigMixin): def save_pretrained(self, save_directory: Union[str, os.PathLike]): self.save_config(save_directory) - model_index_dict = self._dict_to_save + model_index_dict = self.config model_index_dict.pop("_class_name") + model_index_dict.pop("_diffusers_version") model_index_dict.pop("_module") - for name, (library_name, class_name) in self._dict_to_save.items(): + for name, (library_name, class_name) in model_index_dict.items(): importable_classes = LOADABLE_CLASSES[library_name] # TODO: Suraj diff --git a/src/diffusers/utils/__init__.py b/src/diffusers/utils/__init__.py index 0f4c874e..45b9f64e 100644 --- a/src/diffusers/utils/__init__.py +++ b/src/diffusers/utils/__init__.py @@ -19,6 +19,7 @@ # See the License for the specific language governing permissions and # limitations under the License. from requests.exceptions import HTTPError +import os hf_cache_home = os.path.expanduser( os.getenv("HF_HOME", os.path.join(os.getenv("XDG_CACHE_HOME", "~/.cache"), "huggingface")) diff --git a/tests/test_modeling_utils.py b/tests/test_modeling_utils.py index 0b7dece2..2e19cd26 100755 --- a/tests/test_modeling_utils.py +++ b/tests/test_modeling_utils.py @@ -24,6 +24,7 @@ import torch from diffusers import GaussianDDPMScheduler, UNetModel from diffusers.pipeline_utils import DiffusionPipeline +from diffusers.configuration_utils import ConfigMixin from models.vision.ddpm.modeling_ddpm import DDPM @@ -77,6 +78,45 @@ def floats_tensor(shape, scale=1.0, rng=None, name=None): return torch.tensor(data=values, dtype=torch.float).view(shape).contiguous() +class ConfigTester(unittest.TestCase): + def test_load_not_from_mixin(self): + with self.assertRaises(ValueError): + ConfigMixin.from_config("dummy_path") + + def test_save_load(self): + + class SampleObject(ConfigMixin): + config_name = "config.json" + + def __init__( + self, + a=2, + b=5, + c=(2, 5), + d="for diffusion", + e=[1, 3], + ): + self.register(a=a, b=b, c=c, d=d, e=e) + + obj = SampleObject() + config = obj.config + + assert config["a"] == 2 + assert config["b"] == 5 + assert config["c"] == (2, 5) + assert config["d"] == "for diffusion" + assert config["e"] == [1, 3] + + with tempfile.TemporaryDirectory() as tmpdirname: + obj.save_config(tmpdirname) + new_obj = SampleObject.from_config(tmpdirname) + new_config = new_obj.config + + assert config.pop("c") == (2, 5) # instantiated as tuple + assert new_config.pop("c") == [2, 5] # saved & loaded as list because of json + assert config == new_config + + class ModelTesterMixin(unittest.TestCase): @property def dummy_input(self):