bug fix cosine annealing optimizer in prior trainer (#262)

This commit is contained in:
zion
2022-11-11 12:15:13 -08:00
committed by GitHub
parent 08238a7200
commit 91c8d1ca13

View File

@@ -236,7 +236,7 @@ class DiffusionPriorTrainer(nn.Module):
) )
if exists(cosine_decay_max_steps): if exists(cosine_decay_max_steps):
self.scheduler = CosineAnnealingLR(optimizer, T_max = cosine_decay_max_steps) self.scheduler = CosineAnnealingLR(self.optimizer, T_max = cosine_decay_max_steps)
else: else:
self.scheduler = LambdaLR(self.optimizer, lr_lambda = lambda _: 1.0) self.scheduler = LambdaLR(self.optimizer, lr_lambda = lambda _: 1.0)