Creating a Modular Environment#
This tutorial demonstrates how to create sophisticated robotic environments using EmbodiChain’s modular architecture. You’ll learn how to use the advanced envs.EmbodiedEnv class with configuration-driven setup, event managers, observation managers, and randomization systems.
The Code#
The tutorial corresponds to the modular_env.py script in the scripts/tutorials/gym directory.
Code for modular_env.py
1# ----------------------------------------------------------------------------
2# Copyright (c) 2021-2025 DexForce Technology Co., Ltd.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15# ----------------------------------------------------------------------------
16
17import torch
18
19from typing import List, Dict, Any
20
21import embodichain.lab.gym.envs.managers.randomization as rand
22import embodichain.lab.gym.envs.managers.events as events
23import embodichain.lab.gym.envs.managers.observations as obs
24
25from embodichain.lab.gym.envs.managers import (
26 EventCfg,
27 SceneEntityCfg,
28 ObservationCfg,
29)
30from embodichain.lab.gym.envs import EmbodiedEnv, EmbodiedEnvCfg
31from embodichain.lab.gym.utils.registration import register_env
32from embodichain.lab.sim.robots import DexforceW1Cfg
33from embodichain.lab.sim.sensors import StereoCameraCfg, SensorCfg
34from embodichain.lab.sim.shapes import MeshCfg
35from embodichain.lab.sim.cfg import (
36 LightCfg,
37 ArticulationCfg,
38 RobotCfg,
39 RigidObjectCfg,
40 RigidBodyAttributesCfg,
41)
42from embodichain.data import get_data_path
43from embodichain.utils import configclass
44
45
46@configclass
47class ExampleEventCfg:
48
49 replace_obj: EventCfg = EventCfg(
50 func=events.replace_assets_from_group,
51 mode="reset",
52 params={
53 "entity_cfg": SceneEntityCfg(
54 uid="fork",
55 ),
56 "folder_path": get_data_path("TableWare/tableware/fork/"),
57 },
58 )
59
60 randomize_fork_mass: EventCfg = EventCfg(
61 func=rand.randomize_rigid_object_mass,
62 mode="reset",
63 params={
64 "entity_cfg": SceneEntityCfg(
65 uid="fork",
66 ),
67 "mass_range": (0.1, 2.0),
68 },
69 )
70
71 randomize_light: EventCfg = EventCfg(
72 func=rand.randomize_light,
73 mode="interval",
74 interval_step=5,
75 params={
76 "entity_cfg": SceneEntityCfg(
77 uid="point",
78 ),
79 "position_range": [[-0.5, -0.5, 2], [0.5, 0.5, 2]],
80 "color_range": [[0.6, 0.6, 0.6], [1, 1, 1]],
81 "intensity_range": [50.0, 100.0],
82 },
83 )
84
85 randomize_table_mat: EventCfg = EventCfg(
86 func=rand.randomize_visual_material,
87 mode="interval",
88 interval_step=10,
89 params={
90 "entity_cfg": SceneEntityCfg(
91 uid="table",
92 ),
93 "random_texture_prob": 0.5,
94 "texture_path": get_data_path("CocoBackground/coco"),
95 "base_color_range": [[0.2, 0.2, 0.2], [1.0, 1.0, 1.0]],
96 },
97 )
98
99
100@configclass
101class ObsCfg:
102
103 obj_pose: ObservationCfg = ObservationCfg(
104 func=obs.get_rigid_object_pose,
105 mode="add",
106 name="fork_pose",
107 params={"entity_cfg": SceneEntityCfg(uid="fork")},
108 )
109
110
111@configclass
112class ExampleCfg(EmbodiedEnvCfg):
113
114 # Define the robot configuration using DexforceW1Cfg
115 robot: RobotCfg = DexforceW1Cfg.from_dict(
116 {
117 "uid": "dexforce_w1",
118 "version": "v021",
119 "arm_kind": "anthropomorphic",
120 "init_pos": [0.0, 0, 0.0],
121 }
122 )
123
124 # Define the sensor configuration using StereoCameraCfg
125 sensor: List[SensorCfg] = [
126 StereoCameraCfg(
127 uid="eye_in_head",
128 width=960,
129 height=540,
130 enable_mask=True,
131 enable_depth=True,
132 left_to_right_pos=(0.06, 0, 0),
133 intrinsics=(450, 450, 480, 270),
134 intrinsics_right=(450, 450, 480, 270),
135 extrinsics=StereoCameraCfg.ExtrinsicsCfg(
136 parent="eyes",
137 ),
138 )
139 ]
140
141 light: EmbodiedEnvCfg.EnvLightCfg = EmbodiedEnvCfg.EnvLightCfg(
142 direct=[
143 LightCfg(
144 uid="point",
145 light_type="point",
146 color=(1.0, 1.0, 1.0),
147 intensity=50.0,
148 init_pos=(0, 0, 2),
149 )
150 ]
151 )
152
153 background: List[RigidObjectCfg] = [
154 RigidObjectCfg(
155 uid="table",
156 shape=MeshCfg(
157 fpath=get_data_path("CircleTableSimple/circle_table_simple.ply"),
158 compute_uv=True,
159 ),
160 attrs=RigidBodyAttributesCfg(
161 mass=10.0,
162 static_friction=0.95,
163 dynamic_friction=0.85,
164 restitution=0.01,
165 ),
166 body_type="kinematic",
167 init_pos=(0.80, 0, 0.8),
168 init_rot=(0, 90, 0),
169 ),
170 ]
171
172 rigid_object: List[RigidObjectCfg] = [
173 RigidObjectCfg(
174 uid="fork",
175 shape=MeshCfg(
176 fpath=get_data_path("TableWare/tableware/fork/standard_fork_scale.ply"),
177 ),
178 body_scale=(0.75, 0.75, 1.0),
179 init_pos=(0.8, 0, 1.0),
180 ),
181 ]
182
183 articulation_cfg: List[ArticulationCfg] = [
184 ArticulationCfg(
185 uid="drawer",
186 fpath="SlidingBoxDrawer/SlidingBoxDrawer.urdf",
187 init_pos=(0.5, 0.0, 0.85),
188 )
189 ]
190
191 events = ExampleEventCfg()
192
193 observations = ObsCfg()
194
195
196@register_env("ModularEnv-v1", max_episode_steps=100, override=True)
197class ModularEnv(EmbodiedEnv):
198 """
199 An example of a modular environment that inherits from EmbodiedEnv
200 and uses custom event and observation managers.
201 """
202
203 def __init__(self, cfg: EmbodiedEnvCfg, **kwargs):
204 super().__init__(cfg, **kwargs)
205
206
207if __name__ == "__main__":
208 import gymnasium as gym
209 import argparse
210
211 from embodichain.lab.sim import SimulationManagerCfg
212
213 parser = argparse.ArgumentParser()
214 parser.add_argument("--enable_rt", action="store_true", help="Enable ray tracing")
215 args = parser.parse_args()
216
217 env_cfg = ExampleCfg(sim_cfg=SimulationManagerCfg(enable_rt=args.enable_rt))
218
219 # Create the Gym environment
220 env = gym.make("ModularEnv-v1", cfg=env_cfg)
221
222 while True:
223 obs, info = env.reset()
224
225 for i in range(100):
226 action = torch.zeros(env.action_space.shape, dtype=torch.float32)
227 obs, reward, done, truncated, info = env.step(action)
The Code Explained#
This tutorial showcases EmbodiChain’s most powerful environment creation approach using the envs.EmbodiedEnv class. Unlike the basic environment tutorial, this approach uses declarative configuration classes and manager systems for maximum flexibility and reusability.
Event Configuration#
Events define automated behaviors that occur during simulation. There are three types of supported modes:
startup: triggers once when the environment is initialized
reset: triggers every time the environment is reset
interval: triggers at fixed step intervals during simulation
The ExampleEventCfg demonstrates three types of events:
LightCfg,
ArticulationCfg,
RobotCfg,
RigidObjectCfg,
RigidBodyAttributesCfg,
)
from embodichain.data import get_data_path
from embodichain.utils import configclass
@configclass
class ExampleEventCfg:
replace_obj: EventCfg = EventCfg(
func=events.replace_assets_from_group,
mode="reset",
params={
"entity_cfg": SceneEntityCfg(
uid="fork",
),
"folder_path": get_data_path("TableWare/tableware/fork/"),
},
)
randomize_fork_mass: EventCfg = EventCfg(
func=rand.randomize_rigid_object_mass,
mode="reset",
params={
"entity_cfg": SceneEntityCfg(
uid="fork",
),
"mass_range": (0.1, 2.0),
},
)
randomize_light: EventCfg = EventCfg(
func=rand.randomize_light,
mode="interval",
interval_step=5,
params={
"entity_cfg": SceneEntityCfg(
Asset Replacement Event
The replace_obj event demonstrates dynamic asset swapping:
Mode:
"reset"- triggers at environment resetPurpose: Randomly selects different fork models from a folder
Light Randomization Event
The randomize_light event creates dynamic lighting conditions:
Function:
envs.managers.randomization.rendering.randomize_light()Mode:
"interval"- triggers every 5 stepsParameters: Randomizes position, color, and intensity within specified ranges
Material Randomization Event
The randomize_table_mat event varies visual appearance:
Function:
envs.managers.randomization.rendering.randomize_visual_material()Mode:
"interval"- triggers every 10 stepsFeatures: Random textures from COCO dataset and base color variations
for more randomization events, please refer
Observation Configuration#
The default observation from envs.EmbodiedEnv includes:
- robot: robot proprioceptive data (joint positions, velocities, efforts)
- sensor: all available sensor data (images, depth, segmentation, etc.)
However, users always need to define some custom observation for specified learning tasks. To handle this, the observation manager system allows users to declaratively specify additional observations.
"position_range": [[-0.5, -0.5, 2], [0.5, 0.5, 2]],
"color_range": [[0.6, 0.6, 0.6], [1, 1, 1]],
"intensity_range": [50.0, 100.0],
},
)
randomize_table_mat: EventCfg = EventCfg(
func=rand.randomize_visual_material,
mode="interval",
This configuration:
Function:
envs.managers.observations.get_rigid_object_pose()Mode:
"add"- appends data to observation dictionaryName: Custom key for the observation data
Target: Tracks the fork object’s pose in the scene
For details documentation, see envs.managers.cfg.ObservationCfg.
Environment Configuration#
The main environment configuration inherits from envs.EmbodiedEnvCfg and defines all scene components:
Robot Configuration
robot: RobotCfg = DexforceW1Cfg.from_dict(
{
"uid": "dexforce_w1",
"version": "v021",
"arm_kind": "anthropomorphic",
"init_pos": [0.0, 0, 0.0],
}
)
Uses the pre-configured DexforceW1Cfg with customizations:
Version: Specific robot variant (v021)
Arm Type: Anthropomorphic configuration
Position: Initial placement in the scene
Sensor Configuration
func=obs.get_rigid_object_pose,
mode="add",
name="fork_pose",
params={"entity_cfg": SceneEntityCfg(uid="fork")},
)
@configclass
class ExampleCfg(EmbodiedEnvCfg):
# Define the robot configuration using DexforceW1Cfg
robot: RobotCfg = DexforceW1Cfg.from_dict(
{
"uid": "dexforce_w1",
"version": "v021",
Configures a stereo camera system using StereoCameraCfg:
Resolution: 960x540 pixels for realistic visual input
Features: Depth sensing and segmentation masks enabled
Stereo Setup: 6cm baseline between left and right cameras
Mounting: Attached to robot’s “eyes” frame
Lighting Configuration
"init_pos": [0.0, 0, 0.0],
}
)
# Define the sensor configuration using StereoCameraCfg
sensor: List[SensorCfg] = [
StereoCameraCfg(
uid="eye_in_head",
width=960,
height=540,
enable_mask=True,
Defines scene illumination with controllable point lights:
Type: Point light for realistic shadows
Properties: Configurable color, intensity, and position
UID: Named reference for event system manipulation
Rigid Objects
left_to_right_pos=(0.06, 0, 0),
intrinsics=(450, 450, 480, 270),
intrinsics_right=(450, 450, 480, 270),
extrinsics=StereoCameraCfg.ExtrinsicsCfg(
parent="eyes",
),
)
]
light: EmbodiedEnvCfg.EnvLightCfg = EmbodiedEnvCfg.EnvLightCfg(
direct=[
LightCfg(
uid="point",
light_type="point",
color=(1.0, 1.0, 1.0),
intensity=50.0,
init_pos=(0, 0, 2),
)
]
)
background: List[RigidObjectCfg] = [
RigidObjectCfg(
uid="table",
shape=MeshCfg(
fpath=get_data_path("CircleTableSimple/circle_table_simple.ply"),
Multiple objects demonstrate different physics properties:
Table Configuration:
Shape: Custom PLY mesh with UV mapping
Physics: Kinematic body (movable but not affected by forces)
Material: Friction and restitution properties for realistic contact
Fork Configuration:
Shape: Detailed mesh from asset library
Scale: Proportionally scaled for scene consistency
Physics: Dynamic body affected by gravity and collisions
Articulated Objects
),
attrs=RigidBodyAttributesCfg(
mass=10.0,
static_friction=0.95,
dynamic_friction=0.85,
restitution=0.01,
),
body_type="kinematic",
init_pos=(0.80, 0, 0.8),
init_rot=(0, 90, 0),
),
Demonstrates complex mechanisms with moving parts:
URDF: Sliding drawer with joints and constraints
Positioning: Placed on table surface for interaction
Environment Implementation#
The actual environment class is remarkably simple due to the configuration-driven approach:
@register_env("ModularEnv-v1", max_episode_steps=100, override=True)
class ModularEnv(EmbodiedEnv):
"""
An example of a modular environment that inherits from EmbodiedEnv
and uses custom event and observation managers.
"""
def __init__(self, cfg: EmbodiedEnvCfg, **kwargs):
super().__init__(cfg, **kwargs)
The envs.EmbodiedEnv base class automatically:
Loads all configured scene components
Sets up observation and action spaces
Initializes event and observation managers
Handles environment lifecycle (reset, step, etc.)
The Code Execution#
To run the modular environment:
cd /path/to/embodichain
python scripts/tutorials/gym/modular_env.py
The script demonstrates the complete workflow:
Configuration: Creates an instance of
ExampleCfgRegistration: Uses the registered environment ID
Execution: Runs episodes with zero actions to observe automatic behaviors
Manager System Benefits#
The manager-based architecture provides several key advantages:
Event Managers
Modularity: Reusable event functions across environments
Timing Control: Flexible scheduling (reset, interval, condition-based)
Parameter Binding: Type-safe configuration with validation
Extensibility: Easy to add custom event behaviors
Observation Managers
Flexible Data: Any simulation data can become an observation
Processing Pipeline: Built-in normalization and transformation
Dynamic Composition: Runtime observation space modification
Performance: Efficient data collection and GPU acceleration
Key Features Demonstrated#
This tutorial showcases the most advanced features of EmbodiChain environments:
Configuration-Driven Design: Declarative environment specification
Manager Systems: Modular event and observation handling
Asset Management: Dynamic loading and randomization
Sensor Integration: Realistic camera systems with stereo vision
Physics Simulation: Complex articulated and rigid body dynamics
Visual Randomization: Automated domain randomization
Extensible Architecture: Easy customization and extension points
This tutorial demonstrates the full power of EmbodiChain’s modular environment system, providing the foundation for creating sophisticated robotic learning scenarios.