From 86f73b6ec3ce35e85735a33e31ebd9d335872a52 Mon Sep 17 00:00:00 2001 From: Pedro Cuenca Date: Fri, 23 Sep 2022 14:48:13 +0200 Subject: [PATCH 1/2] Remove deprecated `torch_device` kwarg. --- src/diffusers/pipelines/ddim/pipeline_ddim.py | 15 +-------------- src/diffusers/pipelines/ddpm/pipeline_ddpm.py | 11 ----------- .../latent_diffusion/pipeline_latent_diffusion.py | 11 ----------- .../pipeline_latent_diffusion_uncond.py | 12 ------------ src/diffusers/pipelines/pndm/pipeline_pndm.py | 12 ------------ .../score_sde_ve/pipeline_score_sde_ve.py | 12 ------------ .../stable_diffusion/pipeline_stable_diffusion.py | 12 ------------ .../pipeline_stochastic_karras_ve.py | 11 ----------- 8 files changed, 1 insertion(+), 95 deletions(-) diff --git a/src/diffusers/pipelines/ddim/pipeline_ddim.py b/src/diffusers/pipelines/ddim/pipeline_ddim.py index 95b49e045f67..1caeb39bf02e 100644 --- a/src/diffusers/pipelines/ddim/pipeline_ddim.py +++ b/src/diffusers/pipelines/ddim/pipeline_ddim.py @@ -74,20 +74,6 @@ def __call__( generated images. """ - if "torch_device" in kwargs: - device = kwargs.pop("torch_device") - warnings.warn( - "`torch_device` is deprecated as an input argument to `__call__` and will be removed in v0.3.0." - " Consider using `pipe.to(torch_device)` instead." - ) - - # Set device as before (to be removed in 0.3.0) - if device is None: - device = "cuda" if torch.cuda.is_available() else "cpu" - self.to(device) - - # eta corresponds to η in paper and should be between [0, 1] - # Sample gaussian noise to begin loop image = torch.randn( (batch_size, self.unet.in_channels, self.unet.sample_size, self.unet.sample_size), @@ -103,6 +89,7 @@ def __call__( model_output = self.unet(image, t).sample # 2. predict previous mean of image x_t-1 and add variance depending on eta + # eta corresponds to η in paper and should be between [0, 1] # do x_t -> x_t-1 image = self.scheduler.step(model_output, t, image, eta).prev_sample diff --git a/src/diffusers/pipelines/ddpm/pipeline_ddpm.py b/src/diffusers/pipelines/ddpm/pipeline_ddpm.py index b7f7093e379b..88f1050b84f3 100644 --- a/src/diffusers/pipelines/ddpm/pipeline_ddpm.py +++ b/src/diffusers/pipelines/ddpm/pipeline_ddpm.py @@ -66,17 +66,6 @@ def __call__( `return_dict` is True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images. """ - if "torch_device" in kwargs: - device = kwargs.pop("torch_device") - warnings.warn( - "`torch_device` is deprecated as an input argument to `__call__` and will be removed in v0.3.0." - " Consider using `pipe.to(torch_device)` instead." - ) - - # Set device as before (to be removed in 0.3.0) - if device is None: - device = "cuda" if torch.cuda.is_available() else "cpu" - self.to(device) # Sample gaussian noise to begin loop image = torch.randn( diff --git a/src/diffusers/pipelines/latent_diffusion/pipeline_latent_diffusion.py b/src/diffusers/pipelines/latent_diffusion/pipeline_latent_diffusion.py index 43b734c97c39..e410a877359e 100644 --- a/src/diffusers/pipelines/latent_diffusion/pipeline_latent_diffusion.py +++ b/src/diffusers/pipelines/latent_diffusion/pipeline_latent_diffusion.py @@ -94,17 +94,6 @@ def __call__( `return_dict` is True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images. """ - if "torch_device" in kwargs: - device = kwargs.pop("torch_device") - warnings.warn( - "`torch_device` is deprecated as an input argument to `__call__` and will be removed in v0.3.0." - " Consider using `pipe.to(torch_device)` instead." - ) - - # Set device as before (to be removed in 0.3.0) - if device is None: - device = "cuda" if torch.cuda.is_available() else "cpu" - self.to(device) if isinstance(prompt, str): batch_size = 1 diff --git a/src/diffusers/pipelines/latent_diffusion_uncond/pipeline_latent_diffusion_uncond.py b/src/diffusers/pipelines/latent_diffusion_uncond/pipeline_latent_diffusion_uncond.py index 5574b65df9f8..921fbca245d9 100644 --- a/src/diffusers/pipelines/latent_diffusion_uncond/pipeline_latent_diffusion_uncond.py +++ b/src/diffusers/pipelines/latent_diffusion_uncond/pipeline_latent_diffusion_uncond.py @@ -60,18 +60,6 @@ def __call__( generated images. """ - if "torch_device" in kwargs: - device = kwargs.pop("torch_device") - warnings.warn( - "`torch_device` is deprecated as an input argument to `__call__` and will be removed in v0.3.0." - " Consider using `pipe.to(torch_device)` instead." - ) - - # Set device as before (to be removed in 0.3.0) - if device is None: - device = "cuda" if torch.cuda.is_available() else "cpu" - self.to(device) - latents = torch.randn( (batch_size, self.unet.in_channels, self.unet.sample_size, self.unet.sample_size), generator=generator, diff --git a/src/diffusers/pipelines/pndm/pipeline_pndm.py b/src/diffusers/pipelines/pndm/pipeline_pndm.py index ae6c10e9e967..a6311f2e0762 100644 --- a/src/diffusers/pipelines/pndm/pipeline_pndm.py +++ b/src/diffusers/pipelines/pndm/pipeline_pndm.py @@ -75,18 +75,6 @@ def __call__( # For more information on the sampling method you can take a look at Algorithm 2 of # the official paper: https://arxiv.org/pdf/2202.09778.pdf - if "torch_device" in kwargs: - device = kwargs.pop("torch_device") - warnings.warn( - "`torch_device` is deprecated as an input argument to `__call__` and will be removed in v0.3.0." - " Consider using `pipe.to(torch_device)` instead." - ) - - # Set device as before (to be removed in 0.3.0) - if device is None: - device = "cuda" if torch.cuda.is_available() else "cpu" - self.to(device) - # Sample gaussian noise to begin loop image = torch.randn( (batch_size, self.unet.in_channels, self.unet.sample_size, self.unet.sample_size), diff --git a/src/diffusers/pipelines/score_sde_ve/pipeline_score_sde_ve.py b/src/diffusers/pipelines/score_sde_ve/pipeline_score_sde_ve.py index b29795e7f661..34ff7ef073e5 100644 --- a/src/diffusers/pipelines/score_sde_ve/pipeline_score_sde_ve.py +++ b/src/diffusers/pipelines/score_sde_ve/pipeline_score_sde_ve.py @@ -53,18 +53,6 @@ def __call__( generated images. """ - if "torch_device" in kwargs: - device = kwargs.pop("torch_device") - warnings.warn( - "`torch_device` is deprecated as an input argument to `__call__` and will be removed in v0.3.0." - " Consider using `pipe.to(torch_device)` instead." - ) - - # Set device as before (to be removed in 0.3.0) - if device is None: - device = "cuda" if torch.cuda.is_available() else "cpu" - self.to(device) - img_size = self.unet.config.sample_size shape = (batch_size, 3, img_size, img_size) diff --git a/src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py b/src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py index 216a76a55997..d4e41d699b3c 100644 --- a/src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py +++ b/src/diffusers/pipelines/stable_diffusion/pipeline_stable_diffusion.py @@ -165,18 +165,6 @@ def __call__( (nsfw) content, according to the `safety_checker`. """ - if "torch_device" in kwargs: - device = kwargs.pop("torch_device") - warnings.warn( - "`torch_device` is deprecated as an input argument to `__call__` and will be removed in v0.3.0." - " Consider using `pipe.to(torch_device)` instead." - ) - - # Set device as before (to be removed in 0.3.0) - if device is None: - device = "cuda" if torch.cuda.is_available() else "cpu" - self.to(device) - if isinstance(prompt, str): batch_size = 1 elif isinstance(prompt, list): diff --git a/src/diffusers/pipelines/stochastic_karras_ve/pipeline_stochastic_karras_ve.py b/src/diffusers/pipelines/stochastic_karras_ve/pipeline_stochastic_karras_ve.py index 1984a25ac0c6..b42eb7b2b9c3 100644 --- a/src/diffusers/pipelines/stochastic_karras_ve/pipeline_stochastic_karras_ve.py +++ b/src/diffusers/pipelines/stochastic_karras_ve/pipeline_stochastic_karras_ve.py @@ -64,17 +64,6 @@ def __call__( `return_dict` is True, otherwise a `tuple. When returning a tuple, the first element is a list with the generated images. """ - if "torch_device" in kwargs: - device = kwargs.pop("torch_device") - warnings.warn( - "`torch_device` is deprecated as an input argument to `__call__` and will be removed in v0.3.0." - " Consider using `pipe.to(torch_device)` instead." - ) - - # Set device as before (to be removed in 0.3.0) - if device is None: - device = "cuda" if torch.cuda.is_available() else "cpu" - self.to(device) img_size = self.unet.config.sample_size shape = (batch_size, 3, img_size, img_size) From 3c3c09710f8bf4d08a10f79257e9896a03c392d6 Mon Sep 17 00:00:00 2001 From: Pedro Cuenca Date: Fri, 23 Sep 2022 14:57:38 +0200 Subject: [PATCH 2/2] Remove unused imports. --- src/diffusers/pipelines/ddim/pipeline_ddim.py | 1 - src/diffusers/pipelines/ddpm/pipeline_ddpm.py | 1 - .../pipelines/latent_diffusion/pipeline_latent_diffusion.py | 1 - .../latent_diffusion_uncond/pipeline_latent_diffusion_uncond.py | 1 - src/diffusers/pipelines/pndm/pipeline_pndm.py | 1 - src/diffusers/pipelines/score_sde_ve/pipeline_score_sde_ve.py | 1 - .../stochastic_karras_ve/pipeline_stochastic_karras_ve.py | 1 - 7 files changed, 7 deletions(-) diff --git a/src/diffusers/pipelines/ddim/pipeline_ddim.py b/src/diffusers/pipelines/ddim/pipeline_ddim.py index 1caeb39bf02e..17b8ec83a9a4 100644 --- a/src/diffusers/pipelines/ddim/pipeline_ddim.py +++ b/src/diffusers/pipelines/ddim/pipeline_ddim.py @@ -14,7 +14,6 @@ # limitations under the License. -import warnings from typing import Optional, Tuple, Union import torch diff --git a/src/diffusers/pipelines/ddpm/pipeline_ddpm.py b/src/diffusers/pipelines/ddpm/pipeline_ddpm.py index 88f1050b84f3..9f62a6a1258a 100644 --- a/src/diffusers/pipelines/ddpm/pipeline_ddpm.py +++ b/src/diffusers/pipelines/ddpm/pipeline_ddpm.py @@ -14,7 +14,6 @@ # limitations under the License. -import warnings from typing import Optional, Tuple, Union import torch diff --git a/src/diffusers/pipelines/latent_diffusion/pipeline_latent_diffusion.py b/src/diffusers/pipelines/latent_diffusion/pipeline_latent_diffusion.py index e410a877359e..dbf2e792f29b 100644 --- a/src/diffusers/pipelines/latent_diffusion/pipeline_latent_diffusion.py +++ b/src/diffusers/pipelines/latent_diffusion/pipeline_latent_diffusion.py @@ -1,5 +1,4 @@ import inspect -import warnings from typing import List, Optional, Tuple, Union import torch diff --git a/src/diffusers/pipelines/latent_diffusion_uncond/pipeline_latent_diffusion_uncond.py b/src/diffusers/pipelines/latent_diffusion_uncond/pipeline_latent_diffusion_uncond.py index 921fbca245d9..b7104bd709c6 100644 --- a/src/diffusers/pipelines/latent_diffusion_uncond/pipeline_latent_diffusion_uncond.py +++ b/src/diffusers/pipelines/latent_diffusion_uncond/pipeline_latent_diffusion_uncond.py @@ -1,5 +1,4 @@ import inspect -import warnings from typing import Optional, Tuple, Union import torch diff --git a/src/diffusers/pipelines/pndm/pipeline_pndm.py b/src/diffusers/pipelines/pndm/pipeline_pndm.py index a6311f2e0762..483f86da16db 100644 --- a/src/diffusers/pipelines/pndm/pipeline_pndm.py +++ b/src/diffusers/pipelines/pndm/pipeline_pndm.py @@ -14,7 +14,6 @@ # limitations under the License. -import warnings from typing import Optional, Tuple, Union import torch diff --git a/src/diffusers/pipelines/score_sde_ve/pipeline_score_sde_ve.py b/src/diffusers/pipelines/score_sde_ve/pipeline_score_sde_ve.py index 34ff7ef073e5..1907fc7d50d8 100644 --- a/src/diffusers/pipelines/score_sde_ve/pipeline_score_sde_ve.py +++ b/src/diffusers/pipelines/score_sde_ve/pipeline_score_sde_ve.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -import warnings from typing import Optional, Tuple, Union import torch diff --git a/src/diffusers/pipelines/stochastic_karras_ve/pipeline_stochastic_karras_ve.py b/src/diffusers/pipelines/stochastic_karras_ve/pipeline_stochastic_karras_ve.py index b42eb7b2b9c3..1e7e7a26a216 100644 --- a/src/diffusers/pipelines/stochastic_karras_ve/pipeline_stochastic_karras_ve.py +++ b/src/diffusers/pipelines/stochastic_karras_ve/pipeline_stochastic_karras_ve.py @@ -1,5 +1,4 @@ #!/usr/bin/env python3 -import warnings from typing import Optional, Tuple, Union import torch