Compare commits

...

2 Commits

Author SHA1 Message Date
Phil Wang
a95de92964 fix bug with @jihoonerd 2022-04-15 06:40:46 -07:00
Phil Wang
5b4ee09625 ideation 2022-04-14 13:48:01 -07:00
3 changed files with 4 additions and 3 deletions

View File

@@ -325,6 +325,7 @@ Offer training wrappers
- [ ] train on a toy task, offer in colab
- [ ] add attention to unet - apply some personal tricks with efficient attention
- [ ] figure out the big idea behind latent diffusion and what can be ported over
- [ ] consider U2-net for decoder https://arxiv.org/abs/2005.09007
## Citations

View File

@@ -450,7 +450,7 @@ class DiffusionPrior(nn.Module):
alphas = 1. - betas
alphas_cumprod = torch.cumprod(alphas, axis=0)
alphas_cumprod_prev = F.pad(alphas_cumprod[:-1], (0, 1), value = 1.)
alphas_cumprod_prev = F.pad(alphas_cumprod[:-1], (1, 0), value = 1.)
timesteps, = betas.shape
self.num_timesteps = int(timesteps)
@@ -941,7 +941,7 @@ class Decoder(nn.Module):
alphas = 1. - betas
alphas_cumprod = torch.cumprod(alphas, axis=0)
alphas_cumprod_prev = F.pad(alphas_cumprod[:-1], (0, 1), value = 1.)
alphas_cumprod_prev = F.pad(alphas_cumprod[:-1], (1, 0), value = 1.)
timesteps, = betas.shape
self.num_timesteps = int(timesteps)

View File

@@ -10,7 +10,7 @@ setup(
'dream = dalle2_pytorch.cli:dream'
],
},
version = '0.0.15',
version = '0.0.16',
license='MIT',
description = 'DALL-E 2',
author = 'Phil Wang',