From 1ed0f9d80b097594e6294bad432d06ce3fdcb220 Mon Sep 17 00:00:00 2001 From: zion <51308183+nousr@users.noreply.github.com> Date: Wed, 25 May 2022 09:31:43 -0700 Subject: [PATCH] use deterministic optimizer params (#116) --- dalle2_pytorch/optimizer.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/dalle2_pytorch/optimizer.py b/dalle2_pytorch/optimizer.py index 9657a6a..33192a5 100644 --- a/dalle2_pytorch/optimizer.py +++ b/dalle2_pytorch/optimizer.py @@ -21,8 +21,6 @@ def get_optimizer( if wd == 0: return Adam(params, lr = lr, betas = betas, eps = eps) - params = set(params) - if group_wd_params: wd_params, no_wd_params = separate_weight_decayable_params(params)