Compare commits

...

2 Commits

Author SHA1 Message Date
Phil Wang
c56336a104 pydantic 2 2023-07-15 09:08:39 -07:00
Phil Wang
00e07b7d61 force einops 0.6.1 or greater and call allow_ops_in_compiled_graph 2023-04-20 14:08:52 -07:00
4 changed files with 16 additions and 8 deletions

View File

@@ -1,3 +1,10 @@
import torch
from packaging import version
if version.parse(torch.__version__) >= version.parse('2.0.0'):
from einops._torch_specific import allow_ops_in_compiled_graph
allow_ops_in_compiled_graph()
from dalle2_pytorch.version import __version__ from dalle2_pytorch.version import __version__
from dalle2_pytorch.dalle2_pytorch import DALLE2, DiffusionPriorNetwork, DiffusionPrior, Unet, Decoder from dalle2_pytorch.dalle2_pytorch import DALLE2, DiffusionPriorNetwork, DiffusionPrior, Unet, Decoder
from dalle2_pytorch.dalle2_pytorch import OpenAIClipAdapter, OpenClipAdapter from dalle2_pytorch.dalle2_pytorch import OpenAIClipAdapter, OpenClipAdapter

View File

@@ -1,6 +1,6 @@
import json import json
from torchvision import transforms as T from torchvision import transforms as T
from pydantic import BaseModel, validator, root_validator from pydantic import BaseModel, validator, model_validator
from typing import List, Optional, Union, Tuple, Dict, Any, TypeVar from typing import List, Optional, Union, Tuple, Dict, Any, TypeVar
from x_clip import CLIP as XCLIP from x_clip import CLIP as XCLIP
@@ -38,9 +38,9 @@ class TrainSplitConfig(BaseModel):
val: float = 0.15 val: float = 0.15
test: float = 0.1 test: float = 0.1
@root_validator @model_validator(mode = 'after')
def validate_all(cls, fields): def validate_all(self):
actual_sum = sum([*fields.values()]) actual_sum = sum([*dict(self).values()])
if actual_sum != 1.: if actual_sum != 1.:
raise ValueError(f'{fields.keys()} must sum to 1.0. Found: {actual_sum}') raise ValueError(f'{fields.keys()} must sum to 1.0. Found: {actual_sum}')
return fields return fields
@@ -59,6 +59,7 @@ class TrackerLogConfig(BaseModel):
kwargs = self.dict() kwargs = self.dict()
return create_logger(self.log_type, data_path, **kwargs) return create_logger(self.log_type, data_path, **kwargs)
class TrackerLoadConfig(BaseModel): class TrackerLoadConfig(BaseModel):
load_from: Optional[str] = None load_from: Optional[str] = None
only_auto_resume: bool = False # Only attempt to load if the logger is auto-resuming only_auto_resume: bool = False # Only attempt to load if the logger is auto-resuming
@@ -348,7 +349,7 @@ class TrainDecoderConfig(BaseModel):
config = json.load(f) config = json.load(f)
return cls(**config) return cls(**config)
@root_validator @model_validator(mode = 'after')
def check_has_embeddings(cls, values): def check_has_embeddings(cls, values):
# Makes sure that enough information is provided to get the embeddings specified for training # Makes sure that enough information is provided to get the embeddings specified for training
data_config, decoder_config = values.get('data'), values.get('decoder') data_config, decoder_config = values.get('data'), values.get('decoder')

View File

@@ -1 +1 @@
__version__ = '1.14.0' __version__ = '1.15.0'

View File

@@ -30,13 +30,13 @@ setup(
'clip-anytorch>=2.5.2', 'clip-anytorch>=2.5.2',
'coca-pytorch>=0.0.5', 'coca-pytorch>=0.0.5',
'ema-pytorch>=0.0.7', 'ema-pytorch>=0.0.7',
'einops>=0.6', 'einops>=0.6.1',
'embedding-reader', 'embedding-reader',
'kornia>=0.5.4', 'kornia>=0.5.4',
'numpy', 'numpy',
'packaging', 'packaging',
'pillow', 'pillow',
'pydantic', 'pydantic>=2',
'pytorch-warmup', 'pytorch-warmup',
'resize-right>=0.0.2', 'resize-right>=0.0.2',
'rotary-embedding-torch', 'rotary-embedding-torch',