Skip to content

Commit

Permalink
Upcasting rope to fp32 seems to make no difference in this model.
Browse files Browse the repository at this point in the history
  • Loading branch information
comfyanonymous committed Feb 5, 2025
1 parent 6065300 commit 94f21f9
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions comfy/ldm/lumina/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,9 +93,9 @@ def apply_rotary_emb(
and key tensor with rotary embeddings.
"""

t_ = x_in.reshape(*x_in.shape[:-1], -1, 1, 2).float()
t_ = x_in.reshape(*x_in.shape[:-1], -1, 1, 2)
t_out = freqs_cis[..., 0] * t_[..., 0] + freqs_cis[..., 1] * t_[..., 1]
return t_out.reshape(*x_in.shape).type_as(x_in)
return t_out.reshape(*x_in.shape)

def forward(
self,
Expand Down Expand Up @@ -552,7 +552,7 @@ def patchify_and_embed(
position_ids[i, cap_len:cap_len+img_len, 1] = row_ids
position_ids[i, cap_len:cap_len+img_len, 2] = col_ids

freqs_cis = self.rope_embedder(position_ids).movedim(1, 2)
freqs_cis = self.rope_embedder(position_ids).movedim(1, 2).to(dtype)

# build freqs_cis for cap and image individually
cap_freqs_cis_shape = list(freqs_cis.shape)
Expand Down

0 comments on commit 94f21f9

Please sign in to comment.