quick fix for linear attention

This commit is contained in:
Phil Wang
2022-07-29 13:17:12 -07:00
parent d167378401
commit 87432e93ad
2 changed files with 3 additions and 2 deletions

View File

@@ -1490,7 +1490,8 @@ class LinearAttention(nn.Module):
self, self,
dim, dim,
dim_head = 32, dim_head = 32,
heads = 8 heads = 8,
**kwargs
): ):
super().__init__() super().__init__()
self.scale = dim_head ** -0.5 self.scale = dim_head ** -0.5

View File

@@ -1 +1 @@
__version__ = '1.4.3' __version__ = '1.4.4'