From 46044fed753f62fe54ced13adcd9af865ed36fb0 Mon Sep 17 00:00:00 2001 From: Khalil Meftah Date: Thu, 26 Feb 2026 13:28:46 +0100 Subject: [PATCH] Fix: remove device_map from SmolVLA model loading (#3029) * Fix SmolVLA meta tensor error by removing device_map - Remove device_map parameter from VLM model loading - Change torch_dtype from string to torch.bfloat16 - Add explicit .to(device) calls after initialization This resolves NotImplementedError when training SmolVLA policy. Fixes meta tensor copy issue in factory.py:418. * fix: remove manual device movement logic and fix dtype handling --------- Co-authored-by: Highsky7 --- src/lerobot/policies/smolvla/smolvlm_with_expert.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/lerobot/policies/smolvla/smolvlm_with_expert.py b/src/lerobot/policies/smolvla/smolvlm_with_expert.py index 555c40773..caca41dab 100644 --- a/src/lerobot/policies/smolvla/smolvlm_with_expert.py +++ b/src/lerobot/policies/smolvla/smolvlm_with_expert.py @@ -77,7 +77,6 @@ class SmolVLMWithExpertModel(nn.Module): print(f"Loading {model_id} weights ...") self.vlm = AutoModelForImageTextToText.from_pretrained( model_id, - device_map=device, torch_dtype="bfloat16", low_cpu_mem_usage=True, )