""" Example script for using external devices to teleoperate a robot. """ import omnigibson as og from omnigibson.utils.ui_utils import choose_from_options from omnigibson.envs import DataCollectionWrapper import os from omnigibson.macros import gm ROBOTS = { "Fetch": "Mobile robot with one arm", } TELEOP_METHOD = { "keyboard": "Keyboard", "oculus": "Oculus Quest VR", } vr_data = "/omnigibson-src/omnigibson/vr_data" collect_hdf5_path = os.path.join(vr_data, "test_data_collection2.hdf5") print(f"Collecting data to: {collect_hdf5_path}") def main(): """ Spawn a robot in an empty scene with a breakfast table and some toys. Users can try pick and place the toy into the basket using selected external devices and robot of their choice. """ from telemoma.configs.base_config import teleop_config from telemoma.utils.camera_utils import RealSenseCamera from omnigibson.utils.teleop_utils import TeleopSystem robot_name = "Fetch" arm_teleop_method = choose_from_options(options=TELEOP_METHOD, name="robot arm teleop method") if robot_name != "FrankaPanda": base_teleop_method = "keyboard" else: base_teleop_method = "keyboard" # Dummy value since FrankaPanda does not have a base # Generate teleop config teleop_config.arm_left_controller = arm_teleop_method teleop_config.arm_right_controller = arm_teleop_method teleop_config.base_controller = base_teleop_method teleop_config.interface_kwargs["keyboard"] = {"arm_speed_scaledown": 0.04} teleop_config.interface_kwargs["spacemouse"] = {"arm_speed_scaledown": 0.04} if arm_teleop_method == "vision" or base_teleop_method == "vision": teleop_config.interface_kwargs["vision"] = {"camera": RealSenseCamera()} # Create the config for generating the environment we want scene_cfg = {"type": "Scene"} # Add the robot we want to load robot_cfg = { "type": robot_name, "obs_modalities": ["rgb"], "action_normalize": False, "grasping_mode": "assisted", } arms = ["left", "right"] if robot_name == "Tiago" else ["0"] robot_cfg["controller_config"] = {} for arm in arms: robot_cfg["controller_config"][f"arm_{arm}"] = { "name": "InverseKinematicsController", "command_input_limits": None, } robot_cfg["controller_config"][f"gripper_{arm}"] = { "name": "MultiFingerGripperController", "command_input_limits": (0.0, 1.0), "mode": "smooth", } object_cfg = [ { "type": "DatasetObject", "prim_path": "/World/breakfast_table", "name": "breakfast_table", "category": "breakfast_table", "model": "kwmfdg", "bounding_box": [2, 1, 0.4], "position": [0.8, 0, 0.3], "orientation": [0, 0, 0.707, 0.707], }, { "type": "DatasetObject", "prim_path": "/World/frail", "name": "frail", "category": "frail", "model": "zmjovr", "scale": [2, 2, 2], "position": [0.6, -0.35, 0.5], }, { "type": "DatasetObject", "prim_path": "/World/toy_figure1", "name": "toy_figure1", "category": "toy_figure", "model": "issvzv", "scale": [0.75, 0.75, 0.75], "position": [0.6, 0, 0.5], }, { "type": "DatasetObject", "prim_path": "/World/toy_figure2", "name": "toy_figure2", "category": "toy_figure", "model": "nncqfn", "scale": [0.75, 0.75, 0.75], "position": [0.6, 0.15, 0.5], }, { "type": "DatasetObject", "prim_path": "/World/toy_figure3", "name": "toy_figure3", "category": "toy_figure", "model": "eulekw", "scale": [0.25, 0.25, 0.25], "position": [0.6, 0.3, 0.5], }, ] if og.sim is None: # Make sure GPU dynamics are enabled (GPU dynamics needed for cloth) gm.ENABLE_OBJECT_STATES = True gm.USE_GPU_DYNAMICS = True gm.ENABLE_TRANSITION_RULES = False else: # Make sure sim is stopped og.sim.stop() cfg = dict(scene=scene_cfg, robots=[robot_cfg], objects=object_cfg) # Create the environment env = og.Environment(configs=cfg) # Wrap the environment with DataCollectionWrapper data_env = DataCollectionWrapper( env=env, output_path=collect_hdf5_path, only_successes=False, # Set to True if you only want to record successful episodes ) # Use the wrapped environment as you would normally obs, info = data_env.reset() # update viewer camera pose og.sim.viewer_camera.set_position_orientation(position=[-0.22, 0.99, 1.09], orientation=[-0.14, 0.47, 0.84, -0.23]) # Start teleoperation system robot = env.robots[0] # Initialize teleoperation system teleop_sys = TeleopSystem(config=teleop_config, robot=robot, show_control_marker=False) teleop_sys.start() # main simulation loop for _ in range(2000): # Get observation from the original env's robot # obs = teleop_sys.get_obs() action = teleop_sys.get_action(obs) # Step through the data collection wrapper obs, reward, terminated, truncated, info = data_env.step(action) # Save the collected data print(f"Saving data to {collect_hdf5_path}") data_env.save_data() # Shut down the environment cleanly at the end # og.sim.stop() # teleop_sys.stop() og.clear() print("Data collection finished!") if __name__ == "__main__": main()