From a4ceca6d03c3b984a3f8cbcc88427eaadd6dd724 Mon Sep 17 00:00:00 2001 From: Tim Dockhorn <37085732+timudk@users.noreply.github.com> Date: Wed, 26 Jul 2023 04:25:17 -0400 Subject: [PATCH] Revert "fall back to vanilla if xformers is not available (#51)" (#61) This reverts commit ef520df1dbf9d680ceaa61167ff70047dd213b85. --- sgm/modules/diffusionmodules/model.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/sgm/modules/diffusionmodules/model.py b/sgm/modules/diffusionmodules/model.py index 747b1a0..2b24deb 100644 --- a/sgm/modules/diffusionmodules/model.py +++ b/sgm/modules/diffusionmodules/model.py @@ -1,7 +1,5 @@ # pytorch_diffusion + derived encoder decoder import logging -import warnings - import math from typing import Any, Callable, Optional @@ -293,13 +291,6 @@ def make_attn(in_channels, attn_type="vanilla", attn_kwargs=None): f"as it is too expensive. Please install xformers via e.g. 'pip install xformers==0.0.16'" ) attn_type = "vanilla-xformers" - if attn_type == "vanilla-xformers" and not XFORMERS_IS_AVAILABLE: - warnings.warn( - f"Requested attention type {attn_type!r} but Xformers is not available; " - f"falling back to vanilla attention" - ) - attn_type = "vanilla" - attn_kwargs = None logger.debug(f"making attention of type '{attn_type}' with {in_channels} in_channels") if attn_type == "vanilla": assert attn_kwargs is None