bring in rotary embeddings for diffusion prior causal transformer (the most powerful relative positional encoding, used in PaLM) - 0.1.0 because of breaking change

This commit is contained in:
Phil Wang
2022-05-06 08:45:30 -07:00
parent 0be1e0d64c
commit ad20a14a4d
2 changed files with 22 additions and 5 deletions

View File

@@ -10,7 +10,7 @@ setup(
'dream = dalle2_pytorch.cli:dream'
],
},
version = '0.0.109',
version = '0.1.1',
license='MIT',
description = 'DALL-E 2',
author = 'Phil Wang',
@@ -31,6 +31,7 @@ setup(
'kornia>=0.5.4',
'pillow',
'resize-right>=0.0.2',
'rotary-embedding-torch',
'torch>=1.10',
'torchvision',
'tqdm',