2022-05-31 02:17:19 -06:00
|
|
|
# coding=utf-8
|
|
|
|
# Copyright 2020 The HuggingFace Inc. team.
|
|
|
|
#
|
|
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
# you may not use this file except in compliance with the License.
|
|
|
|
# You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
|
|
|
|
|
|
|
|
# All paths are set with the intent you should run this script from the root of the repo with the command
|
|
|
|
# python utils/check_dummies.py
|
2022-06-17 08:58:45 -06:00
|
|
|
PATH_TO_DIFFUSERS = "src/diffusers"
|
2022-05-31 02:17:19 -06:00
|
|
|
|
|
|
|
# Matches is_xxx_available()
|
2022-06-22 06:38:36 -06:00
|
|
|
_re_backend = re.compile(r"is\_([a-z_]*)_available\(\)")
|
2022-05-31 02:17:19 -06:00
|
|
|
# Matches from xxx import bla
|
|
|
|
_re_single_line_import = re.compile(r"\s+from\s+\S*\s+import\s+([^\(\s].*)\n")
|
|
|
|
|
|
|
|
|
|
|
|
DUMMY_CONSTANT = """
|
|
|
|
{0} = None
|
|
|
|
"""
|
|
|
|
|
|
|
|
DUMMY_CLASS = """
|
|
|
|
class {0}(metaclass=DummyObject):
|
|
|
|
_backends = {1}
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
requires_backends(self, {1})
|
2022-10-12 06:41:16 -06:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_config(cls, *args, **kwargs):
|
|
|
|
requires_backends(cls, {1})
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def from_pretrained(cls, *args, **kwargs):
|
|
|
|
requires_backends(cls, {1})
|
2022-05-31 02:17:19 -06:00
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
DUMMY_FUNCTION = """
|
|
|
|
def {0}(*args, **kwargs):
|
|
|
|
requires_backends({0}, {1})
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
|
|
def find_backend(line):
|
|
|
|
"""Find one (or multiple) backend in a code line of the init."""
|
2022-06-17 08:58:45 -06:00
|
|
|
backends = _re_backend.findall(line)
|
|
|
|
if len(backends) == 0:
|
2022-05-31 02:17:19 -06:00
|
|
|
return None
|
2022-06-17 08:58:45 -06:00
|
|
|
|
2022-06-22 06:38:36 -06:00
|
|
|
return "_and_".join(backends)
|
2022-05-31 02:17:19 -06:00
|
|
|
|
|
|
|
|
|
|
|
def read_init():
|
|
|
|
"""Read the init and extracts PyTorch, TensorFlow, SentencePiece and Tokenizers objects."""
|
2022-06-17 08:58:45 -06:00
|
|
|
with open(os.path.join(PATH_TO_DIFFUSERS, "__init__.py"), "r", encoding="utf-8", newline="\n") as f:
|
2022-05-31 02:17:19 -06:00
|
|
|
lines = f.readlines()
|
|
|
|
|
|
|
|
# Get to the point we do the actual imports for type checking
|
|
|
|
line_index = 0
|
|
|
|
backend_specific_objects = {}
|
|
|
|
# Go through the end of the file
|
|
|
|
while line_index < len(lines):
|
|
|
|
# If the line is an if is_backend_available, we grab all objects associated.
|
|
|
|
backend = find_backend(lines[line_index])
|
|
|
|
if backend is not None:
|
|
|
|
objects = []
|
2022-06-17 08:58:45 -06:00
|
|
|
line_index += 1
|
2022-05-31 02:17:19 -06:00
|
|
|
# Until we unindent, add backend objects to the list
|
2022-06-17 08:58:45 -06:00
|
|
|
while not lines[line_index].startswith("else:"):
|
2022-05-31 02:17:19 -06:00
|
|
|
line = lines[line_index]
|
|
|
|
single_line_import_search = _re_single_line_import.search(line)
|
|
|
|
if single_line_import_search is not None:
|
|
|
|
objects.extend(single_line_import_search.groups()[0].split(", "))
|
2022-08-31 10:54:36 -06:00
|
|
|
elif line.startswith(" " * 8):
|
|
|
|
objects.append(line[8:-2])
|
2022-05-31 02:17:19 -06:00
|
|
|
line_index += 1
|
|
|
|
|
|
|
|
backend_specific_objects[backend] = objects
|
|
|
|
else:
|
|
|
|
line_index += 1
|
|
|
|
|
|
|
|
return backend_specific_objects
|
|
|
|
|
|
|
|
|
|
|
|
def create_dummy_object(name, backend_name):
|
|
|
|
"""Create the code for the dummy object corresponding to `name`."""
|
|
|
|
if name.isupper():
|
|
|
|
return DUMMY_CONSTANT.format(name)
|
|
|
|
elif name.islower():
|
|
|
|
return DUMMY_FUNCTION.format(name, backend_name)
|
|
|
|
else:
|
|
|
|
return DUMMY_CLASS.format(name, backend_name)
|
|
|
|
|
|
|
|
|
|
|
|
def create_dummy_files():
|
|
|
|
"""Create the content of the dummy files."""
|
|
|
|
backend_specific_objects = read_init()
|
|
|
|
# For special correspondence backend to module name as used in the function requires_modulename
|
|
|
|
dummy_files = {}
|
|
|
|
|
|
|
|
for backend, objects in backend_specific_objects.items():
|
|
|
|
backend_name = "[" + ", ".join(f'"{b}"' for b in backend.split("_and_")) + "]"
|
|
|
|
dummy_file = "# This file is autogenerated by the command `make fix-copies`, do not edit.\n"
|
2022-08-31 11:29:38 -06:00
|
|
|
dummy_file += "# flake8: noqa\n\n"
|
2022-05-31 02:17:19 -06:00
|
|
|
dummy_file += "from ..utils import DummyObject, requires_backends\n\n"
|
|
|
|
dummy_file += "\n".join([create_dummy_object(o, backend_name) for o in objects])
|
|
|
|
dummy_files[backend] = dummy_file
|
|
|
|
|
|
|
|
return dummy_files
|
|
|
|
|
|
|
|
|
|
|
|
def check_dummies(overwrite=False):
|
|
|
|
"""Check if the dummy files are up to date and maybe `overwrite` with the right content."""
|
|
|
|
dummy_files = create_dummy_files()
|
|
|
|
# For special correspondence backend to shortcut as used in utils/dummy_xxx_objects.py
|
|
|
|
short_names = {"torch": "pt"}
|
|
|
|
|
|
|
|
# Locate actual dummy modules and read their content.
|
2022-06-17 08:58:45 -06:00
|
|
|
path = os.path.join(PATH_TO_DIFFUSERS, "utils")
|
2022-05-31 02:17:19 -06:00
|
|
|
dummy_file_paths = {
|
|
|
|
backend: os.path.join(path, f"dummy_{short_names.get(backend, backend)}_objects.py")
|
|
|
|
for backend in dummy_files.keys()
|
|
|
|
}
|
|
|
|
|
|
|
|
actual_dummies = {}
|
|
|
|
for backend, file_path in dummy_file_paths.items():
|
|
|
|
if os.path.isfile(file_path):
|
|
|
|
with open(file_path, "r", encoding="utf-8", newline="\n") as f:
|
|
|
|
actual_dummies[backend] = f.read()
|
|
|
|
else:
|
|
|
|
actual_dummies[backend] = ""
|
|
|
|
|
|
|
|
for backend in dummy_files.keys():
|
|
|
|
if dummy_files[backend] != actual_dummies[backend]:
|
|
|
|
if overwrite:
|
|
|
|
print(
|
2022-06-17 08:58:45 -06:00
|
|
|
f"Updating diffusers.utils.dummy_{short_names.get(backend, backend)}_objects.py as the main "
|
2022-05-31 02:17:19 -06:00
|
|
|
"__init__ has new objects."
|
|
|
|
)
|
|
|
|
with open(dummy_file_paths[backend], "w", encoding="utf-8", newline="\n") as f:
|
|
|
|
f.write(dummy_files[backend])
|
|
|
|
else:
|
|
|
|
raise ValueError(
|
|
|
|
"The main __init__ has objects that are not present in "
|
2022-06-17 08:58:45 -06:00
|
|
|
f"diffusers.utils.dummy_{short_names.get(backend, backend)}_objects.py. Run `make fix-copies` "
|
2022-05-31 02:17:19 -06:00
|
|
|
"to fix this."
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
parser.add_argument("--fix_and_overwrite", action="store_true", help="Whether to fix inconsistencies.")
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
|
|
check_dummies(args.fix_and_overwrite)
|