refactor(pipeline): enforce ProcessorStep inheritance for pipeline steps (#1862)

- Updated the DataProcessorPipeline to require that all steps inherit from ProcessorStep, enhancing type safety and clarity.
- Adjusted tests to utilize a MockTokenizerProcessorStep that adheres to the ProcessorStep interface, ensuring consistent behavior across tests.
- Refactored various mock step classes in tests to inherit from ProcessorStep for improved consistency and maintainability.
This commit is contained in:
Adil Zouitine
2025-09-04 16:22:03 +02:00
committed by GitHub
parent fc43246942
commit 332ca4ccc5
4 changed files with 85 additions and 38 deletions
+23 -5
View File
@@ -27,13 +27,31 @@ from lerobot.policies.pi0.processor_pi0 import Pi0NewLineProcessor, make_pi0_pre
from lerobot.processor import (
AddBatchDimensionProcessorStep,
DeviceProcessorStep,
EnvTransition,
NormalizerProcessorStep,
ProcessorStep,
RenameProcessorStep,
TransitionKey,
UnnormalizerProcessorStep,
)
class MockTokenizerProcessorStep(ProcessorStep):
"""Mock tokenizer processor step for testing."""
def __init__(self, *args, **kwargs):
# Accept any arguments to mimic the real TokenizerProcessorStep interface
pass
def __call__(self, transition: EnvTransition) -> EnvTransition:
# Pass through transition unchanged
return transition
def transform_features(self, features):
# Pass through features unchanged
return features
def create_transition(observation=None, action=None, **kwargs):
"""Helper function to create a transition dictionary."""
transition = {}
@@ -83,7 +101,7 @@ def test_make_pi0_processor_basic():
config = create_default_config()
stats = create_default_stats()
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessorStep"):
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessorStep", MockTokenizerProcessorStep):
preprocessor, postprocessor = make_pi0_pre_post_processors(
config,
stats,
@@ -165,7 +183,7 @@ def test_pi0_processor_cuda():
stats = create_default_stats()
# Mock the tokenizer processor to act as pass-through
class MockTokenizerProcessorStep:
class MockTokenizerProcessorStep(ProcessorStep):
def __init__(self, *args, **kwargs):
pass
@@ -220,7 +238,7 @@ def test_pi0_processor_accelerate_scenario():
stats = create_default_stats()
# Mock the tokenizer processor to act as pass-through
class MockTokenizerProcessorStep:
class MockTokenizerProcessorStep(ProcessorStep):
def __init__(self, *args, **kwargs):
pass
@@ -276,7 +294,7 @@ def test_pi0_processor_multi_gpu():
stats = create_default_stats()
# Mock the tokenizer processor to act as pass-through
class MockTokenizerProcessorStep:
class MockTokenizerProcessorStep(ProcessorStep):
def __init__(self, *args, **kwargs):
pass
@@ -329,7 +347,7 @@ def test_pi0_processor_without_stats():
config = create_default_config()
# Mock the tokenizer processor
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessorStep"):
with patch("lerobot.policies.pi0.processor_pi0.TokenizerProcessorStep", MockTokenizerProcessorStep):
preprocessor, postprocessor = make_pi0_pre_post_processors(
config,
dataset_stats=None,