From 5c0b4cb97dacd2e6ea6b20bfde1275d2d21d4256 Mon Sep 17 00:00:00 2001 From: Kohaku-Blueleaf <59680068+KohakuBlueleaf@users.noreply.github.com> Date: Fri, 16 Dec 2022 11:18:46 +0800 Subject: [PATCH] Fix wrong dim setting in SpatialTransformer The data dim: in_ch -> inner_dim -> in_ch So the proj_out should be inner_dim -> in_ch --- ldm/modules/attention.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ldm/modules/attention.py b/ldm/modules/attention.py index 509cd87..d3b978f 100644 --- a/ldm/modules/attention.py +++ b/ldm/modules/attention.py @@ -315,7 +315,7 @@ class SpatialTransformer(nn.Module): stride=1, padding=0)) else: - self.proj_out = zero_module(nn.Linear(in_channels, inner_dim)) + self.proj_out = zero_module(nn.Linear(inner_dim, in_channels)) self.use_linear = use_linear def forward(self, x, context=None):