add open clip to train_config (#260)

add the ability to use open_clip in the train configs (useful for the new SOTA h/14 model)
This commit is contained in:
zion
2022-11-07 15:44:36 -08:00
committed by GitHub
parent fbba0f9aaf
commit 7166ad6711

View File

@@ -4,11 +4,13 @@ from pydantic import BaseModel, validator, root_validator
from typing import List, Optional, Union, Tuple, Dict, Any, TypeVar from typing import List, Optional, Union, Tuple, Dict, Any, TypeVar
from x_clip import CLIP as XCLIP from x_clip import CLIP as XCLIP
from open_clip import list_pretrained
from coca_pytorch import CoCa from coca_pytorch import CoCa
from dalle2_pytorch.dalle2_pytorch import ( from dalle2_pytorch.dalle2_pytorch import (
CoCaAdapter, CoCaAdapter,
OpenAIClipAdapter, OpenAIClipAdapter,
OpenClipAdapter,
Unet, Unet,
Decoder, Decoder,
DiffusionPrior, DiffusionPrior,
@@ -117,6 +119,10 @@ class AdapterConfig(BaseModel):
def create(self): def create(self):
if self.make == "openai": if self.make == "openai":
return OpenAIClipAdapter(self.model) return OpenAIClipAdapter(self.model)
elif self.make == "open_clip":
pretrained = dict(list_pretrained())
checkpoint = pretrained[self.model]
return OpenClipAdapter(name=self.model, pretrained=checkpoint)
elif self.make == "x-clip": elif self.make == "x-clip":
return XClipAdapter(XCLIP(**self.base_model_kwargs)) return XClipAdapter(XCLIP(**self.base_model_kwargs))
elif self.make == "coca": elif self.make == "coca":