use t5 relative positional bias in prior network causal transformer, since it makes more sense than rotary embeddings

This commit is contained in:
Phil Wang
2022-04-14 12:01:09 -07:00
parent 9f55c24db6
commit 6e27f617f1
2 changed files with 58 additions and 4 deletions

View File

@@ -10,7 +10,7 @@ setup(
'dream = dalle2_pytorch.cli:dream'
],
},
version = '0.0.14',
version = '0.0.15',
license='MIT',
description = 'DALL-E 2',
author = 'Phil Wang',