sinusoidal embed time embeddings for diffusion prior as well, for continuous version

This commit is contained in:
Phil Wang
2022-05-07 08:32:26 -07:00
parent 8f93729d19
commit 830afd3c15
2 changed files with 2 additions and 2 deletions

View File

@@ -706,7 +706,7 @@ class DiffusionPriorNetwork(nn.Module):
**kwargs
):
super().__init__()
self.time_embeddings = nn.Embedding(num_timesteps, dim) if exists(num_timesteps) else nn.Sequential(Rearrange('b -> b 1'), MLP(1, dim)) # also offer a continuous version of timestep embeddings, with a 2 layer MLP
self.time_embeddings = nn.Embedding(num_timesteps, dim) if exists(num_timesteps) else nn.Sequential(SinusoidalPosEmb(dim), MLP(dim, dim)) # also offer a continuous version of timestep embeddings, with a 2 layer MLP
self.learned_query = nn.Parameter(torch.randn(dim))
self.causal_transformer = CausalTransformer(dim = dim, **kwargs)

View File

@@ -10,7 +10,7 @@ setup(
'dream = dalle2_pytorch.cli:dream'
],
},
version = '0.1.7',
version = '0.1.8',
license='MIT',
description = 'DALL-E 2',
author = 'Phil Wang',