default the device to the device that the diffusion prior parameters are on, if the trainer was never given the accelerator nor device

This commit is contained in:
Phil Wang
2022-07-06 12:47:48 -07:00
parent 1bd8a7835a
commit e928ae5c34
2 changed files with 16 additions and 8 deletions

View File

@@ -173,14 +173,26 @@ class DiffusionPriorTrainer(nn.Module):
super().__init__()
assert isinstance(diffusion_prior, DiffusionPrior)
assert not exists(accelerator) or isinstance(accelerator, Accelerator)
assert exists(accelerator) or exists(device), "You must supply some method of obtaining a device."
ema_kwargs, kwargs = groupby_prefix_and_trim('ema_', kwargs)
# verbosity
self.verbose = verbose
# assign some helpful member vars
self.accelerator = accelerator
self.device = accelerator.device if exists(accelerator) else device
self.text_conditioned = diffusion_prior.condition_on_text_encodings
# setting the device
if not exists(accelerator) and not exists(device):
diffusion_prior_device = next(diffusion_prior.parameters()).device
self.print(f'accelerator not given, and device not specified: defaulting to device of diffusion prior parameters - {diffusion_prior_device}')
self.device = diffusion_prior_device
else:
self.device = accelerator.device if exists(accelerator) else device
# save model
self.diffusion_prior = diffusion_prior
@@ -214,13 +226,9 @@ class DiffusionPriorTrainer(nn.Module):
self.max_grad_norm = max_grad_norm
# verbosity
self.verbose = verbose
# track steps internally
self.register_buffer('step', torch.tensor([0]))
self.register_buffer('step', torch.tensor([0], device = self.device))
# accelerator wrappers

View File

@@ -1 +1 @@
__version__ = '0.16.10'
__version__ = '0.16.12'