add ability to specify full self attention on specific stages in the unet

This commit is contained in:
Phil Wang
2022-07-01 10:22:07 -07:00
parent 282c35930f
commit 3d23ba4aa5
3 changed files with 42 additions and 18 deletions

View File

@@ -216,6 +216,7 @@ class UnetConfig(BaseModel):
cond_on_text_encodings: bool = None
cond_dim: int = None
channels: int = 3
self_attn: ListOrTuple(int)
attn_dim_head: int = 32
attn_heads: int = 16