Skip to content

Commit f47aab6

Browse files
committed
Revert excess changes
1 parent 14eb932 commit f47aab6

File tree

1 file changed

+2
-2
lines changed

1 file changed

+2
-2
lines changed

dalle_pytorch/attention.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -132,7 +132,7 @@ def forward(self, x, mask = None, rotary_pos_emb = None):
132132
if exists(rotary_pos_emb):
133133
q, k, v = apply_pos_emb(rotary_pos_emb, (q, k, v))
134134

135-
q = q * self.scale
135+
q *= self.scale
136136

137137
((q_text, q_img), (k_text, k_img), (v_text, v_img)) = map(lambda t: (t[:, :-img_seq_len], t[:, -img_seq_len:]), (q, k, v))
138138

@@ -252,7 +252,7 @@ def forward(self, x, mask = None, rotary_pos_emb = None):
252252
if exists(rotary_pos_emb):
253253
q, k, v = apply_pos_emb(rotary_pos_emb, (q, k, v))
254254

255-
q = q * self.scale
255+
q *= self.scale
256256

257257
((q_text, q_img), (k_text, k_img), (v_text, v_img)) = map(lambda t: (t[:, :-img_seq_len], t[:, -img_seq_len:]), (q, k, v))
258258

0 commit comments

Comments
 (0)