From 6a6912ec37c654bfe98d3b3eb872be06c9d88b4e Mon Sep 17 00:00:00 2001 From: Jade Choghari Date: Tue, 27 Jan 2026 16:00:40 +0000 Subject: [PATCH] revert .clone --- src/lerobot/policies/pi05_full/modeling_pi05.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/lerobot/policies/pi05_full/modeling_pi05.py b/src/lerobot/policies/pi05_full/modeling_pi05.py index 641c11da9..b9d41b3de 100644 --- a/src/lerobot/policies/pi05_full/modeling_pi05.py +++ b/src/lerobot/policies/pi05_full/modeling_pi05.py @@ -375,8 +375,7 @@ def compute_layer_complete( out_emb = layer.self_attn.o_proj(att_output[:, start_pos:end_pos]) # first residual out_emb = modeling_gemma._gated_residual(hidden_states, out_emb, gates[i]) # noqa: SLF001 - # Store reference instead of clone - we need original for second residual - after_first_residual = out_emb + after_first_residual = out_emb.clone() out_emb, gate = layer.post_attention_layernorm(out_emb.clone(), cond=adarms_cond[i]) # convert to bfloat16 if the next layer (mlp) uses bfloat16 if layer.mlp.up_proj.weight.dtype == torch.bfloat16: