Fix vae tests for cpu and gpu (#480)

This commit is contained in:
Kashif Rasul 2022-09-13 19:14:20 +02:00 committed by GitHub
parent 55f7ca3bb9
commit da7e3994ad
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1 changed files with 11 additions and 3 deletions

View File

@ -104,7 +104,15 @@ class AutoencoderKLTests(ModelTesterMixin, unittest.TestCase):
output_slice = output[0, -1, -3:, -3:].flatten().cpu()
# fmt: off
expected_output_slice = torch.tensor([-0.1352, 0.0878, 0.0419, -0.0818, -0.1069, 0.0688, -0.1458, -0.4446, -0.0026])
# fmt: on
# Since the VAE Gaussian prior's generator is seeded on the appropriate device,
# the expected output slices are not the same for CPU and GPU.
if torch_device in ("mps", "cpu"):
expected_output_slice = torch.tensor(
[-0.1352, 0.0878, 0.0419, -0.0818, -0.1069, 0.0688, -0.1458, -0.4446, -0.0026]
)
else:
expected_output_slice = torch.tensor(
[-0.2421, 0.4642, 0.2507, -0.0438, 0.0682, 0.3160, -0.2018, -0.0727, 0.2485]
)
self.assertTrue(torch.allclose(output_slice, expected_output_slice, rtol=1e-2))