fix bug thanks to @jihoonerd

This commit is contained in:
Phil Wang
2022-04-15 06:40:46 -07:00
parent 5b4ee09625
commit bece206699
2 changed files with 3 additions and 3 deletions

View File

@@ -450,7 +450,7 @@ class DiffusionPrior(nn.Module):
alphas = 1. - betas
alphas_cumprod = torch.cumprod(alphas, axis=0)
alphas_cumprod_prev = F.pad(alphas_cumprod[:-1], (0, 1), value = 1.)
alphas_cumprod_prev = F.pad(alphas_cumprod[:-1], (1, 0), value = 1.)
timesteps, = betas.shape
self.num_timesteps = int(timesteps)
@@ -941,7 +941,7 @@ class Decoder(nn.Module):
alphas = 1. - betas
alphas_cumprod = torch.cumprod(alphas, axis=0)
alphas_cumprod_prev = F.pad(alphas_cumprod[:-1], (0, 1), value = 1.)
alphas_cumprod_prev = F.pad(alphas_cumprod[:-1], (1, 0), value = 1.)
timesteps, = betas.shape
self.num_timesteps = int(timesteps)

View File

@@ -10,7 +10,7 @@ setup(
'dream = dalle2_pytorch.cli:dream'
],
},
version = '0.0.15',
version = '0.0.16',
license='MIT',
description = 'DALL-E 2',
author = 'Phil Wang',