vqgan-vae codebook dims should be 256 or smaller

This commit is contained in:
Phil Wang
2022-04-28 08:59:03 -07:00
parent 625ce23f6b
commit cb26187450
2 changed files with 3 additions and 1 deletions

View File

@@ -545,6 +545,7 @@ class VQGanVAE(nn.Module):
l2_recon_loss = False, l2_recon_loss = False,
use_hinge_loss = True, use_hinge_loss = True,
vgg = None, vgg = None,
vq_codebook_dim = 256,
vq_codebook_size = 512, vq_codebook_size = 512,
vq_decay = 0.8, vq_decay = 0.8,
vq_commitment_weight = 1., vq_commitment_weight = 1.,
@@ -579,6 +580,7 @@ class VQGanVAE(nn.Module):
self.vq = VQ( self.vq = VQ(
dim = self.enc_dec.encoded_dim, dim = self.enc_dec.encoded_dim,
codebook_dim = vq_codebook_dim,
codebook_size = vq_codebook_size, codebook_size = vq_codebook_size,
decay = vq_decay, decay = vq_decay,
commitment_weight = vq_commitment_weight, commitment_weight = vq_commitment_weight,

View File

@@ -10,7 +10,7 @@ setup(
'dream = dalle2_pytorch.cli:dream' 'dream = dalle2_pytorch.cli:dream'
], ],
}, },
version = '0.0.62', version = '0.0.63',
license='MIT', license='MIT',
description = 'DALL-E 2', description = 'DALL-E 2',
author = 'Phil Wang', author = 'Phil Wang',