Fix PyCharm/VSCode static type checking for dummy objects (#1596)
* Fix PyCharm/VSCode static type checking for dummy objects * Re-add dummies * Fix AudioDiffusion imports * fix import * fix import * Update utils/check_dummies.py Co-authored-by: Pedro Cuenca <pedro@huggingface.co> * Update src/diffusers/utils/import_utils.py * Update src/diffusers/__init__.py Co-authored-by: Patrick von Platen <patrick.v.platen@gmail.com> * Update src/diffusers/pipelines/stable_diffusion/__init__.py * fix double import Co-authored-by: Pedro Cuenca <pedro@huggingface.co> Co-authored-by: Patrick von Platen <patrick.v.platen@gmail.com>
This commit is contained in:
parent
03566d8689
commit
dbe0719246
|
@ -3,9 +3,11 @@ __version__ = "0.10.0.dev0"
|
||||||
from .configuration_utils import ConfigMixin
|
from .configuration_utils import ConfigMixin
|
||||||
from .onnx_utils import OnnxRuntimeModel
|
from .onnx_utils import OnnxRuntimeModel
|
||||||
from .utils import (
|
from .utils import (
|
||||||
|
OptionalDependencyNotAvailable,
|
||||||
is_flax_available,
|
is_flax_available,
|
||||||
is_inflect_available,
|
is_inflect_available,
|
||||||
is_k_diffusion_available,
|
is_k_diffusion_available,
|
||||||
|
is_librosa_available,
|
||||||
is_onnx_available,
|
is_onnx_available,
|
||||||
is_scipy_available,
|
is_scipy_available,
|
||||||
is_torch_available,
|
is_torch_available,
|
||||||
|
@ -15,7 +17,12 @@ from .utils import (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if is_torch_available():
|
try:
|
||||||
|
if not is_torch_available():
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from .utils.dummy_pt_objects import * # noqa F403
|
||||||
|
else:
|
||||||
from .modeling_utils import ModelMixin
|
from .modeling_utils import ModelMixin
|
||||||
from .models import AutoencoderKL, Transformer2DModel, UNet1DModel, UNet2DConditionModel, UNet2DModel, VQModel
|
from .models import AutoencoderKL, Transformer2DModel, UNet1DModel, UNet2DConditionModel, UNet2DModel, VQModel
|
||||||
from .optimization import (
|
from .optimization import (
|
||||||
|
@ -29,14 +36,12 @@ if is_torch_available():
|
||||||
)
|
)
|
||||||
from .pipeline_utils import DiffusionPipeline
|
from .pipeline_utils import DiffusionPipeline
|
||||||
from .pipelines import (
|
from .pipelines import (
|
||||||
AudioDiffusionPipeline,
|
|
||||||
DanceDiffusionPipeline,
|
DanceDiffusionPipeline,
|
||||||
DDIMPipeline,
|
DDIMPipeline,
|
||||||
DDPMPipeline,
|
DDPMPipeline,
|
||||||
KarrasVePipeline,
|
KarrasVePipeline,
|
||||||
LDMPipeline,
|
LDMPipeline,
|
||||||
LDMSuperResolutionPipeline,
|
LDMSuperResolutionPipeline,
|
||||||
Mel,
|
|
||||||
PNDMPipeline,
|
PNDMPipeline,
|
||||||
RePaintPipeline,
|
RePaintPipeline,
|
||||||
ScoreSdeVePipeline,
|
ScoreSdeVePipeline,
|
||||||
|
@ -60,15 +65,22 @@ if is_torch_available():
|
||||||
VQDiffusionScheduler,
|
VQDiffusionScheduler,
|
||||||
)
|
)
|
||||||
from .training_utils import EMAModel
|
from .training_utils import EMAModel
|
||||||
else:
|
|
||||||
from .utils.dummy_pt_objects import * # noqa F403
|
|
||||||
|
|
||||||
if is_torch_available() and is_scipy_available():
|
try:
|
||||||
from .schedulers import LMSDiscreteScheduler
|
if not (is_torch_available() and is_scipy_available()):
|
||||||
else:
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
from .utils.dummy_torch_and_scipy_objects import * # noqa F403
|
from .utils.dummy_torch_and_scipy_objects import * # noqa F403
|
||||||
|
else:
|
||||||
|
from .schedulers import LMSDiscreteScheduler
|
||||||
|
|
||||||
if is_torch_available() and is_transformers_available():
|
|
||||||
|
try:
|
||||||
|
if not (is_torch_available() and is_transformers_available()):
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from .utils.dummy_torch_and_transformers_objects import * # noqa F403
|
||||||
|
else:
|
||||||
from .pipelines import (
|
from .pipelines import (
|
||||||
AltDiffusionImg2ImgPipeline,
|
AltDiffusionImg2ImgPipeline,
|
||||||
AltDiffusionPipeline,
|
AltDiffusionPipeline,
|
||||||
|
@ -88,15 +100,21 @@ if is_torch_available() and is_transformers_available():
|
||||||
VersatileDiffusionTextToImagePipeline,
|
VersatileDiffusionTextToImagePipeline,
|
||||||
VQDiffusionPipeline,
|
VQDiffusionPipeline,
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
from .utils.dummy_torch_and_transformers_objects import * # noqa F403
|
|
||||||
|
|
||||||
if is_torch_available() and is_transformers_available() and is_k_diffusion_available():
|
try:
|
||||||
from .pipelines import StableDiffusionKDiffusionPipeline
|
if not (is_torch_available() and is_transformers_available() and is_k_diffusion_available()):
|
||||||
else:
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
from .utils.dummy_torch_and_transformers_and_k_diffusion_objects import * # noqa F403
|
from .utils.dummy_torch_and_transformers_and_k_diffusion_objects import * # noqa F403
|
||||||
|
else:
|
||||||
|
from .pipelines import StableDiffusionKDiffusionPipeline
|
||||||
|
|
||||||
if is_torch_available() and is_transformers_available() and is_onnx_available():
|
try:
|
||||||
|
if not (is_torch_available() and is_transformers_available() and is_onnx_available()):
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from .utils.dummy_torch_and_transformers_and_onnx_objects import * # noqa F403
|
||||||
|
else:
|
||||||
from .pipelines import (
|
from .pipelines import (
|
||||||
OnnxStableDiffusionImg2ImgPipeline,
|
OnnxStableDiffusionImg2ImgPipeline,
|
||||||
OnnxStableDiffusionInpaintPipeline,
|
OnnxStableDiffusionInpaintPipeline,
|
||||||
|
@ -104,10 +122,21 @@ if is_torch_available() and is_transformers_available() and is_onnx_available():
|
||||||
OnnxStableDiffusionPipeline,
|
OnnxStableDiffusionPipeline,
|
||||||
StableDiffusionOnnxPipeline,
|
StableDiffusionOnnxPipeline,
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
from .utils.dummy_torch_and_transformers_and_onnx_objects import * # noqa F403
|
|
||||||
|
|
||||||
if is_flax_available():
|
try:
|
||||||
|
if not (is_torch_available() and is_librosa_available()):
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from .utils.dummy_torch_and_librosa_objects import * # noqa F403
|
||||||
|
else:
|
||||||
|
from .pipelines import AudioDiffusionPipeline, Mel
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not is_flax_available():
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from .utils.dummy_flax_objects import * # noqa F403
|
||||||
|
else:
|
||||||
from .modeling_flax_utils import FlaxModelMixin
|
from .modeling_flax_utils import FlaxModelMixin
|
||||||
from .models.unet_2d_condition_flax import FlaxUNet2DConditionModel
|
from .models.unet_2d_condition_flax import FlaxUNet2DConditionModel
|
||||||
from .models.vae_flax import FlaxAutoencoderKL
|
from .models.vae_flax import FlaxAutoencoderKL
|
||||||
|
@ -122,10 +151,11 @@ if is_flax_available():
|
||||||
FlaxSchedulerMixin,
|
FlaxSchedulerMixin,
|
||||||
FlaxScoreSdeVeScheduler,
|
FlaxScoreSdeVeScheduler,
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
from .utils.dummy_flax_objects import * # noqa F403
|
|
||||||
|
|
||||||
if is_flax_available() and is_transformers_available():
|
try:
|
||||||
from .pipelines import FlaxStableDiffusionPipeline
|
if not (is_flax_available() and is_transformers_available()):
|
||||||
else:
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
from .utils.dummy_flax_and_transformers_objects import * # noqa F403
|
from .utils.dummy_flax_and_transformers_objects import * # noqa F403
|
||||||
|
else:
|
||||||
|
from .pipelines import FlaxStableDiffusionPipeline
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
from ..utils import (
|
from ..utils import (
|
||||||
|
OptionalDependencyNotAvailable,
|
||||||
is_flax_available,
|
is_flax_available,
|
||||||
is_k_diffusion_available,
|
is_k_diffusion_available,
|
||||||
is_librosa_available,
|
is_librosa_available,
|
||||||
|
@ -8,7 +9,12 @@ from ..utils import (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if is_torch_available():
|
try:
|
||||||
|
if not is_torch_available():
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from ..utils.dummy_pt_objects import * # noqa F403
|
||||||
|
else:
|
||||||
from .dance_diffusion import DanceDiffusionPipeline
|
from .dance_diffusion import DanceDiffusionPipeline
|
||||||
from .ddim import DDIMPipeline
|
from .ddim import DDIMPipeline
|
||||||
from .ddpm import DDPMPipeline
|
from .ddpm import DDPMPipeline
|
||||||
|
@ -18,15 +24,21 @@ if is_torch_available():
|
||||||
from .repaint import RePaintPipeline
|
from .repaint import RePaintPipeline
|
||||||
from .score_sde_ve import ScoreSdeVePipeline
|
from .score_sde_ve import ScoreSdeVePipeline
|
||||||
from .stochastic_karras_ve import KarrasVePipeline
|
from .stochastic_karras_ve import KarrasVePipeline
|
||||||
else:
|
|
||||||
from ..utils.dummy_pt_objects import * # noqa F403
|
|
||||||
|
|
||||||
if is_torch_available() and is_librosa_available():
|
try:
|
||||||
|
if not (is_torch_available() and is_librosa_available()):
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from ..utils.dummy_torch_and_librosa_objects import * # noqa F403
|
||||||
|
else:
|
||||||
from .audio_diffusion import AudioDiffusionPipeline, Mel
|
from .audio_diffusion import AudioDiffusionPipeline, Mel
|
||||||
else:
|
|
||||||
from ..utils.dummy_torch_and_librosa_objects import AudioDiffusionPipeline, Mel # noqa F403
|
|
||||||
|
|
||||||
if is_torch_available() and is_transformers_available():
|
try:
|
||||||
|
if not (is_torch_available() and is_transformers_available()):
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from ..utils.dummy_torch_and_transformers_objects import * # noqa F403
|
||||||
|
else:
|
||||||
from .alt_diffusion import AltDiffusionImg2ImgPipeline, AltDiffusionPipeline
|
from .alt_diffusion import AltDiffusionImg2ImgPipeline, AltDiffusionPipeline
|
||||||
from .latent_diffusion import LDMTextToImagePipeline
|
from .latent_diffusion import LDMTextToImagePipeline
|
||||||
from .paint_by_example import PaintByExamplePipeline
|
from .paint_by_example import PaintByExamplePipeline
|
||||||
|
@ -48,7 +60,12 @@ if is_torch_available() and is_transformers_available():
|
||||||
)
|
)
|
||||||
from .vq_diffusion import VQDiffusionPipeline
|
from .vq_diffusion import VQDiffusionPipeline
|
||||||
|
|
||||||
if is_transformers_available() and is_onnx_available():
|
try:
|
||||||
|
if not (is_torch_available() and is_transformers_available() and is_onnx_available()):
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from ..utils.dummy_torch_and_transformers_and_onnx_objects import * # noqa F403
|
||||||
|
else:
|
||||||
from .stable_diffusion import (
|
from .stable_diffusion import (
|
||||||
OnnxStableDiffusionImg2ImgPipeline,
|
OnnxStableDiffusionImg2ImgPipeline,
|
||||||
OnnxStableDiffusionInpaintPipeline,
|
OnnxStableDiffusionInpaintPipeline,
|
||||||
|
@ -57,8 +74,19 @@ if is_transformers_available() and is_onnx_available():
|
||||||
StableDiffusionOnnxPipeline,
|
StableDiffusionOnnxPipeline,
|
||||||
)
|
)
|
||||||
|
|
||||||
if is_torch_available() and is_transformers_available() and is_k_diffusion_available():
|
try:
|
||||||
|
if not (is_torch_available() and is_transformers_available() and is_k_diffusion_available()):
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from ..utils.dummy_torch_and_transformers_and_k_diffusion_objects import * # noqa F403
|
||||||
|
else:
|
||||||
from .stable_diffusion import StableDiffusionKDiffusionPipeline
|
from .stable_diffusion import StableDiffusionKDiffusionPipeline
|
||||||
|
|
||||||
if is_transformers_available() and is_flax_available():
|
|
||||||
|
try:
|
||||||
|
if not (is_flax_available() and is_transformers_available()):
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from ..utils.dummy_flax_and_transformers_objects import * # noqa F403
|
||||||
|
else:
|
||||||
from .stable_diffusion import FlaxStableDiffusionPipeline
|
from .stable_diffusion import FlaxStableDiffusionPipeline
|
||||||
|
|
|
@ -8,6 +8,7 @@ from PIL import Image
|
||||||
|
|
||||||
from ...utils import (
|
from ...utils import (
|
||||||
BaseOutput,
|
BaseOutput,
|
||||||
|
OptionalDependencyNotAvailable,
|
||||||
is_flax_available,
|
is_flax_available,
|
||||||
is_k_diffusion_available,
|
is_k_diffusion_available,
|
||||||
is_onnx_available,
|
is_onnx_available,
|
||||||
|
@ -44,12 +45,20 @@ if is_transformers_available() and is_torch_available():
|
||||||
from .pipeline_stable_diffusion_upscale import StableDiffusionUpscalePipeline
|
from .pipeline_stable_diffusion_upscale import StableDiffusionUpscalePipeline
|
||||||
from .safety_checker import StableDiffusionSafetyChecker
|
from .safety_checker import StableDiffusionSafetyChecker
|
||||||
|
|
||||||
if is_transformers_available() and is_torch_available() and is_transformers_version(">=", "4.25.0.dev0"):
|
try:
|
||||||
from .pipeline_stable_diffusion_image_variation import StableDiffusionImageVariationPipeline
|
if not (is_transformers_available() and is_torch_available() and is_transformers_version(">=", "4.25.0.dev0")):
|
||||||
else:
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
from ...utils.dummy_torch_and_transformers_objects import StableDiffusionImageVariationPipeline
|
from ...utils.dummy_torch_and_transformers_objects import StableDiffusionImageVariationPipeline
|
||||||
|
else:
|
||||||
|
from .pipeline_stable_diffusion_image_variation import StableDiffusionImageVariationPipeline
|
||||||
|
|
||||||
if is_transformers_available() and is_torch_available() and is_k_diffusion_available():
|
try:
|
||||||
|
if not (is_torch_available() and is_transformers_available() and is_k_diffusion_available()):
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from ...utils.dummy_torch_and_transformers_and_k_diffusion_objects import * # noqa F403
|
||||||
|
else:
|
||||||
from .pipeline_stable_diffusion_k_diffusion import StableDiffusionKDiffusionPipeline
|
from .pipeline_stable_diffusion_k_diffusion import StableDiffusionKDiffusionPipeline
|
||||||
|
|
||||||
if is_transformers_available() and is_onnx_available():
|
if is_transformers_available() and is_onnx_available():
|
||||||
|
|
|
@ -1,16 +1,24 @@
|
||||||
from ...utils import is_torch_available, is_transformers_available, is_transformers_version
|
from ...utils import (
|
||||||
|
OptionalDependencyNotAvailable,
|
||||||
|
is_torch_available,
|
||||||
|
is_transformers_available,
|
||||||
|
is_transformers_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if is_transformers_available() and is_torch_available() and is_transformers_version(">=", "4.25.0.dev0"):
|
try:
|
||||||
from .modeling_text_unet import UNetFlatConditionModel
|
if not (is_transformers_available() and is_torch_available() and is_transformers_version(">=", "4.25.0.dev0")):
|
||||||
from .pipeline_versatile_diffusion import VersatileDiffusionPipeline
|
raise OptionalDependencyNotAvailable()
|
||||||
from .pipeline_versatile_diffusion_dual_guided import VersatileDiffusionDualGuidedPipeline
|
except OptionalDependencyNotAvailable:
|
||||||
from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline
|
|
||||||
from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline
|
|
||||||
else:
|
|
||||||
from ...utils.dummy_torch_and_transformers_objects import (
|
from ...utils.dummy_torch_and_transformers_objects import (
|
||||||
VersatileDiffusionDualGuidedPipeline,
|
VersatileDiffusionDualGuidedPipeline,
|
||||||
VersatileDiffusionImageVariationPipeline,
|
VersatileDiffusionImageVariationPipeline,
|
||||||
VersatileDiffusionPipeline,
|
VersatileDiffusionPipeline,
|
||||||
VersatileDiffusionTextToImagePipeline,
|
VersatileDiffusionTextToImagePipeline,
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
from .modeling_text_unet import UNetFlatConditionModel
|
||||||
|
from .pipeline_versatile_diffusion import VersatileDiffusionPipeline
|
||||||
|
from .pipeline_versatile_diffusion_dual_guided import VersatileDiffusionDualGuidedPipeline
|
||||||
|
from .pipeline_versatile_diffusion_image_variation import VersatileDiffusionImageVariationPipeline
|
||||||
|
from .pipeline_versatile_diffusion_text_to_image import VersatileDiffusionTextToImagePipeline
|
||||||
|
|
|
@ -13,10 +13,15 @@
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
|
||||||
from ..utils import is_flax_available, is_scipy_available, is_torch_available
|
from ..utils import OptionalDependencyNotAvailable, is_flax_available, is_scipy_available, is_torch_available
|
||||||
|
|
||||||
|
|
||||||
if is_torch_available():
|
try:
|
||||||
|
if not is_torch_available():
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from ..utils.dummy_pt_objects import * # noqa F403
|
||||||
|
else:
|
||||||
from .scheduling_ddim import DDIMScheduler
|
from .scheduling_ddim import DDIMScheduler
|
||||||
from .scheduling_ddpm import DDPMScheduler
|
from .scheduling_ddpm import DDPMScheduler
|
||||||
from .scheduling_dpmsolver_multistep import DPMSolverMultistepScheduler
|
from .scheduling_dpmsolver_multistep import DPMSolverMultistepScheduler
|
||||||
|
@ -34,10 +39,13 @@ if is_torch_available():
|
||||||
from .scheduling_sde_vp import ScoreSdeVpScheduler
|
from .scheduling_sde_vp import ScoreSdeVpScheduler
|
||||||
from .scheduling_utils import SchedulerMixin
|
from .scheduling_utils import SchedulerMixin
|
||||||
from .scheduling_vq_diffusion import VQDiffusionScheduler
|
from .scheduling_vq_diffusion import VQDiffusionScheduler
|
||||||
else:
|
|
||||||
from ..utils.dummy_pt_objects import * # noqa F403
|
|
||||||
|
|
||||||
if is_flax_available():
|
try:
|
||||||
|
if not is_flax_available():
|
||||||
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
|
from ..utils.dummy_flax_objects import * # noqa F403
|
||||||
|
else:
|
||||||
from .scheduling_ddim_flax import FlaxDDIMScheduler
|
from .scheduling_ddim_flax import FlaxDDIMScheduler
|
||||||
from .scheduling_ddpm_flax import FlaxDDPMScheduler
|
from .scheduling_ddpm_flax import FlaxDDPMScheduler
|
||||||
from .scheduling_dpmsolver_multistep_flax import FlaxDPMSolverMultistepScheduler
|
from .scheduling_dpmsolver_multistep_flax import FlaxDPMSolverMultistepScheduler
|
||||||
|
@ -46,11 +54,12 @@ if is_flax_available():
|
||||||
from .scheduling_pndm_flax import FlaxPNDMScheduler
|
from .scheduling_pndm_flax import FlaxPNDMScheduler
|
||||||
from .scheduling_sde_ve_flax import FlaxScoreSdeVeScheduler
|
from .scheduling_sde_ve_flax import FlaxScoreSdeVeScheduler
|
||||||
from .scheduling_utils_flax import FlaxSchedulerMixin, FlaxSchedulerOutput, broadcast_to_shape_from_left
|
from .scheduling_utils_flax import FlaxSchedulerMixin, FlaxSchedulerOutput, broadcast_to_shape_from_left
|
||||||
else:
|
|
||||||
from ..utils.dummy_flax_objects import * # noqa F403
|
|
||||||
|
|
||||||
|
|
||||||
if is_scipy_available() and is_torch_available():
|
try:
|
||||||
from .scheduling_lms_discrete import LMSDiscreteScheduler
|
if not (is_torch_available() and is_scipy_available()):
|
||||||
else:
|
raise OptionalDependencyNotAvailable()
|
||||||
|
except OptionalDependencyNotAvailable:
|
||||||
from ..utils.dummy_torch_and_scipy_objects import * # noqa F403
|
from ..utils.dummy_torch_and_scipy_objects import * # noqa F403
|
||||||
|
else:
|
||||||
|
from .scheduling_lms_discrete import LMSDiscreteScheduler
|
||||||
|
|
|
@ -26,6 +26,7 @@ from .import_utils import (
|
||||||
USE_TF,
|
USE_TF,
|
||||||
USE_TORCH,
|
USE_TORCH,
|
||||||
DummyObject,
|
DummyObject,
|
||||||
|
OptionalDependencyNotAvailable,
|
||||||
is_accelerate_available,
|
is_accelerate_available,
|
||||||
is_flax_available,
|
is_flax_available,
|
||||||
is_inflect_available,
|
is_inflect_available,
|
||||||
|
|
|
@ -152,21 +152,6 @@ class DiffusionPipeline(metaclass=DummyObject):
|
||||||
requires_backends(cls, ["torch"])
|
requires_backends(cls, ["torch"])
|
||||||
|
|
||||||
|
|
||||||
class AudioDiffusionPipeline(metaclass=DummyObject):
|
|
||||||
_backends = ["torch"]
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
requires_backends(self, ["torch"])
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_config(cls, *args, **kwargs):
|
|
||||||
requires_backends(cls, ["torch"])
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_pretrained(cls, *args, **kwargs):
|
|
||||||
requires_backends(cls, ["torch"])
|
|
||||||
|
|
||||||
|
|
||||||
class DanceDiffusionPipeline(metaclass=DummyObject):
|
class DanceDiffusionPipeline(metaclass=DummyObject):
|
||||||
_backends = ["torch"]
|
_backends = ["torch"]
|
||||||
|
|
||||||
|
@ -257,21 +242,6 @@ class LDMSuperResolutionPipeline(metaclass=DummyObject):
|
||||||
requires_backends(cls, ["torch"])
|
requires_backends(cls, ["torch"])
|
||||||
|
|
||||||
|
|
||||||
class Mel(metaclass=DummyObject):
|
|
||||||
_backends = ["torch"]
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
requires_backends(self, ["torch"])
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_config(cls, *args, **kwargs):
|
|
||||||
requires_backends(cls, ["torch"])
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_pretrained(cls, *args, **kwargs):
|
|
||||||
requires_backends(cls, ["torch"])
|
|
||||||
|
|
||||||
|
|
||||||
class PNDMPipeline(metaclass=DummyObject):
|
class PNDMPipeline(metaclass=DummyObject):
|
||||||
_backends = ["torch"]
|
_backends = ["torch"]
|
||||||
|
|
||||||
|
|
|
@ -430,3 +430,7 @@ def is_transformers_version(operation: str, version: str):
|
||||||
if not _transformers_available:
|
if not _transformers_available:
|
||||||
return False
|
return False
|
||||||
return compare_versions(parse(_transformers_version), operation, version)
|
return compare_versions(parse(_transformers_version), operation, version)
|
||||||
|
|
||||||
|
|
||||||
|
class OptionalDependencyNotAvailable(BaseException):
|
||||||
|
"""An error indicating that an optional dependency of Diffusers was not found in the environment."""
|
||||||
|
|
|
@ -74,13 +74,15 @@ def read_init():
|
||||||
backend_specific_objects = {}
|
backend_specific_objects = {}
|
||||||
# Go through the end of the file
|
# Go through the end of the file
|
||||||
while line_index < len(lines):
|
while line_index < len(lines):
|
||||||
# If the line is an if is_backend_available, we grab all objects associated.
|
# If the line contains is_backend_available, we grab all objects associated with the `else` block
|
||||||
backend = find_backend(lines[line_index])
|
backend = find_backend(lines[line_index])
|
||||||
if backend is not None:
|
if backend is not None:
|
||||||
objects = []
|
|
||||||
line_index += 1
|
|
||||||
# Until we unindent, add backend objects to the list
|
|
||||||
while not lines[line_index].startswith("else:"):
|
while not lines[line_index].startswith("else:"):
|
||||||
|
line_index += 1
|
||||||
|
line_index += 1
|
||||||
|
objects = []
|
||||||
|
# Until we unindent, add backend objects to the list
|
||||||
|
while line_index < len(lines) and len(lines[line_index]) > 1:
|
||||||
line = lines[line_index]
|
line = lines[line_index]
|
||||||
single_line_import_search = _re_single_line_import.search(line)
|
single_line_import_search = _re_single_line_import.search(line)
|
||||||
if single_line_import_search is not None:
|
if single_line_import_search is not None:
|
||||||
|
|
Loading…
Reference in New Issue