diff --git a/README.md b/README.md index 46e4161..fec5898 100644 --- a/README.md +++ b/README.md @@ -499,9 +499,7 @@ loss.backward() Although there is the possibility they are using an unreleased, more powerful CLIP, you can use one of the released ones, if you do not wish to train your own CLIP from scratch. This will also allow the community to more quickly validate the conclusions of the paper. -First you'll need to install the prerequisites - -Then to use a pretrained OpenAI CLIP, simply import `OpenAIClipAdapter` and pass it into the `DiffusionPrior` or `Decoder` like so +To use a pretrained OpenAI CLIP, simply import `OpenAIClipAdapter` and pass it into the `DiffusionPrior` or `Decoder` like so ```python import torch diff --git a/dalle2_pytorch/dalle2_pytorch.py b/dalle2_pytorch/dalle2_pytorch.py index 586a730..4541285 100644 --- a/dalle2_pytorch/dalle2_pytorch.py +++ b/dalle2_pytorch/dalle2_pytorch.py @@ -172,11 +172,7 @@ class OpenAIClipAdapter(BaseClipAdapter): self, name = 'ViT-B/32' ): - try: - import clip - except ImportError: - print('you must install openai clip in order to use this adapter - `pip install git+https://github.com/openai/CLIP.git` - more instructions at https://github.com/openai/CLIP#usage') - + import clip openai_clip, _ = clip.load(name) super().__init__(openai_clip) @@ -1636,4 +1632,3 @@ class DALLE2(nn.Module): return images[0] return images - diff --git a/setup.py b/setup.py index 33196c2..8687aa3 100644 --- a/setup.py +++ b/setup.py @@ -10,7 +10,7 @@ setup( 'dream = dalle2_pytorch.cli:dream' ], }, - version = '0.0.72', + version = '0.0.73', license='MIT', description = 'DALL-E 2', author = 'Phil Wang', @@ -23,6 +23,7 @@ setup( ], install_requires=[ 'click', + 'clip-anytorch', 'einops>=0.4', 'einops-exts>=0.0.3', 'kornia>=0.5.4',