switch to using linear attention for the sparse attention layers within unet, given success in GAN projects

This commit is contained in:
Phil Wang
2022-05-01 17:59:03 -07:00
parent 76c767b1ce
commit ad87bfe28f
3 changed files with 50 additions and 5 deletions

View File

@@ -10,7 +10,7 @@ setup(
'dream = dalle2_pytorch.cli:dream'
],
},
version = '0.0.87',
version = '0.0.88',
license='MIT',
description = 'DALL-E 2',
author = 'Phil Wang',