Compare commits

...

3 Commits

Author SHA1 Message Date
Phil Wang
1ffeecd0ca lower default ema beta value 2022-05-31 11:55:21 -07:00
Phil Wang
3df899f7a4 patch 2022-05-31 09:03:43 -07:00
Aidan Dempster
09534119a1 Fixed non deterministic optimizer creation (#130) 2022-05-31 09:03:20 -07:00
3 changed files with 8 additions and 6 deletions

View File

@@ -1,8 +1,10 @@
from torch.optim import AdamW, Adam
def separate_weight_decayable_params(params):
no_wd_params = set([param for param in params if param.ndim < 2])
wd_params = set(params) - no_wd_params
wd_params, no_wd_params = [], []
for param in params:
param_list = no_wd_params if param.ndim < 2 else wd_params
param_list.append(param)
return wd_params, no_wd_params
def get_optimizer(
@@ -25,8 +27,8 @@ def get_optimizer(
wd_params, no_wd_params = separate_weight_decayable_params(params)
params = [
{'params': list(wd_params)},
{'params': list(no_wd_params), 'weight_decay': 0},
{'params': wd_params},
{'params': no_wd_params, 'weight_decay': 0},
]
return AdamW(params, lr = lr, weight_decay = wd, betas = betas, eps = eps)

View File

@@ -178,7 +178,7 @@ class EMA(nn.Module):
def __init__(
self,
model,
beta = 0.9999,
beta = 0.99,
update_after_step = 1000,
update_every = 10,
):

View File

@@ -1 +1 @@
__version__ = '0.6.3'
__version__ = '0.6.5'