chore(processor): add Step suffix to all processors (#1854)

* refactor(processor): rename MapDeltaActionToRobotAction and MapTensorToDeltaActionDict for consistency

* refactor(processor): rename DeviceProcessor to DeviceProcessorStep for consistency across modules

* refactor(processor): rename Torch2NumpyActionProcessor to Torch2NumpyActionProcessorStep for consistency

* refactor(processor): rename Numpy2TorchActionProcessor to Numpy2TorchActionProcessorStep for consistency

* refactor(processor): rename AddTeleopActionAsComplimentaryData to AddTeleopActionAsComplimentaryDataStep for consistency

* refactor(processor): rename ImageCropResizeProcessor and AddTeleopEventsAsInfo for consistency

* refactor(processor): rename TimeLimitProcessor to TimeLimitProcessorStep for consistency

* refactor(processor): rename GripperPenaltyProcessor to GripperPenaltyProcessorStep for consistency

* refactor(processor): rename InterventionActionProcessor to InterventionActionProcessorStep for consistency

* refactor(processor): rename RewardClassifierProcessor to RewardClassifierProcessorStep for consistency

* refactor(processor): rename JointVelocityProcessor to JointVelocityProcessorStep for consistency

* refactor(processor): rename MotorCurrentProcessor to MotorCurrentProcessorStep for consistency

* refactor(processor): rename NormalizerProcessor and UnnormalizerProcessor to NormalizerProcessorStep and UnnormalizerProcessorStep for consistency

* refactor(processor): rename VanillaObservationProcessor to VanillaObservationProcessorStep for consistency

* refactor(processor): rename RenameProcessor to RenameProcessorStep for consistency

* refactor(processor): rename TokenizerProcessor to TokenizerProcessorStep for consistency

* refactor(processor): rename ToBatchProcessor to AddBatchDimensionProcessorStep for consistency

* refactor(processor): update config file name in test for RenameProcessorStep consistency
This commit is contained in:
Adil Zouitine
2025-09-03 18:12:11 +02:00
committed by GitHub
parent 029c4a9a76
commit ce793cde64
37 changed files with 653 additions and 628 deletions
+20 -20
View File
@@ -25,12 +25,12 @@ from lerobot.constants import ACTION, OBS_IMAGE, OBS_STATE
from lerobot.policies.pi0.configuration_pi0 import PI0Config
from lerobot.policies.pi0.processor_pi0 import Pi0NewLineProcessor, make_pi0_pre_post_processors
from lerobot.processor import (
DeviceProcessor,
NormalizerProcessor,
RenameProcessor,
ToBatchProcessor,
AddBatchDimensionProcessorStep,
DeviceProcessorStep,
NormalizerProcessorStep,
RenameProcessorStep,
TransitionKey,
UnnormalizerProcessor,
UnnormalizerProcessorStep,
)
@@ -83,7 +83,7 @@ def test_make_pi0_processor_basic():
config = create_default_config()
stats = create_default_stats()
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessor"):
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessorStep"):
preprocessor, postprocessor = make_pi0_pre_post_processors(
config,
stats,
@@ -97,17 +97,17 @@ def test_make_pi0_processor_basic():
# Check steps in preprocessor
assert len(preprocessor.steps) == 6
assert isinstance(preprocessor.steps[0], RenameProcessor)
assert isinstance(preprocessor.steps[1], NormalizerProcessor)
assert isinstance(preprocessor.steps[2], ToBatchProcessor)
assert isinstance(preprocessor.steps[0], RenameProcessorStep)
assert isinstance(preprocessor.steps[1], NormalizerProcessorStep)
assert isinstance(preprocessor.steps[2], AddBatchDimensionProcessorStep)
assert isinstance(preprocessor.steps[3], Pi0NewLineProcessor)
# Step 4 would be TokenizerProcessor but it's mocked
assert isinstance(preprocessor.steps[5], DeviceProcessor)
# Step 4 would be TokenizerProcessorStep but it's mocked
assert isinstance(preprocessor.steps[5], DeviceProcessorStep)
# Check steps in postprocessor
assert len(postprocessor.steps) == 2
assert isinstance(postprocessor.steps[0], DeviceProcessor)
assert isinstance(postprocessor.steps[1], UnnormalizerProcessor)
assert isinstance(postprocessor.steps[0], DeviceProcessorStep)
assert isinstance(postprocessor.steps[1], UnnormalizerProcessorStep)
def test_pi0_newline_processor_single_task():
@@ -165,7 +165,7 @@ def test_pi0_processor_cuda():
stats = create_default_stats()
# Mock the tokenizer processor to act as pass-through
class MockTokenizerProcessor:
class MockTokenizerProcessorStep:
def __init__(self, *args, **kwargs):
pass
@@ -187,7 +187,7 @@ def test_pi0_processor_cuda():
def transform_features(self, features):
return features
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessor", MockTokenizerProcessor):
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessorStep", MockTokenizerProcessorStep):
preprocessor, postprocessor = make_pi0_pre_post_processors(
config,
stats,
@@ -220,7 +220,7 @@ def test_pi0_processor_accelerate_scenario():
stats = create_default_stats()
# Mock the tokenizer processor to act as pass-through
class MockTokenizerProcessor:
class MockTokenizerProcessorStep:
def __init__(self, *args, **kwargs):
pass
@@ -242,7 +242,7 @@ def test_pi0_processor_accelerate_scenario():
def transform_features(self, features):
return features
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessor", MockTokenizerProcessor):
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessorStep", MockTokenizerProcessorStep):
preprocessor, postprocessor = make_pi0_pre_post_processors(
config,
stats,
@@ -276,7 +276,7 @@ def test_pi0_processor_multi_gpu():
stats = create_default_stats()
# Mock the tokenizer processor to act as pass-through
class MockTokenizerProcessor:
class MockTokenizerProcessorStep:
def __init__(self, *args, **kwargs):
pass
@@ -298,7 +298,7 @@ def test_pi0_processor_multi_gpu():
def transform_features(self, features):
return features
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessor", MockTokenizerProcessor):
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessorStep", MockTokenizerProcessorStep):
preprocessor, postprocessor = make_pi0_pre_post_processors(
config,
stats,
@@ -329,7 +329,7 @@ def test_pi0_processor_without_stats():
config = create_default_config()
# Mock the tokenizer processor
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessor"):
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessorStep"):
preprocessor, postprocessor = make_pi0_pre_post_processors(
config,
dataset_stats=None,