diff --git a/igibson/configs/behavior_robot_vr_behavior_task.yaml b/igibson/configs/behavior_robot_vr_behavior_task.yaml index aaf53e2e0..182efc394 100644 --- a/igibson/configs/behavior_robot_vr_behavior_task.yaml +++ b/igibson/configs/behavior_robot_vr_behavior_task.yaml @@ -18,6 +18,7 @@ robot: # For the VR use case, we use the higher set of joint speed/position limits, and a different gripper action space. show_visual_head: False higher_limits: True + legacy_proprioception: True controller_config: gripper_right_hand: name: JointController diff --git a/igibson/robots/behavior_robot.py b/igibson/robots/behavior_robot.py index a30c121e3..12e22cc58 100644 --- a/igibson/robots/behavior_robot.py +++ b/igibson/robots/behavior_robot.py @@ -108,6 +108,7 @@ def __init__( base_name="body", grasping_mode="assisted", higher_limits=False, + legacy_proprioception=False, **kwargs, ): """ @@ -133,6 +134,8 @@ def __init__( :param higher_limits: bool, indicating whether the limbs' velocity and position limits should be set in the extended mode which allows for the instant responsiveness required for VR. Defaults to True. learning-based agents should set this parameter to False for a more realistic capability set. + :param legacy_proprioception: Use the proprioception dimensions and dict keys that the BehaviorRobot had at + the BEHAVIOR challenge, rather than the ones automatically generated by underlying robot classes. :param **kwargs: see ManipulationRobot, LocomotionRobot, ActiveCameraRobot """ assert reset_joint_pos is None, "BehaviorRobot doesn't support hand-specified reset_joint_pos" @@ -144,6 +147,7 @@ def __init__( self.use_ghost_hands = use_ghost_hands self.normal_color = normal_color self.show_visual_head = show_visual_head + self._legacy_proprioception = legacy_proprioception self._position_limit_coefficient = 1 if higher_limits else LOWER_LIMITS_POSITION_COEFFICIENT self._velocity_limit_coefficient = 1 if higher_limits else LOWER_LIMITS_VELOCITY_COEFFICIENT @@ -290,6 +294,51 @@ def set_position_orientation(self, pos, orn): clear_cached_states(self) + @property + def default_proprio_obs(self): + if self._legacy_proprioception: + return [ + "left_hand_position_local", + "left_hand_orientation_local", + "right_hand_position_local", + "right_hand_orientation_local", + "eye_position_local", + "eye_orientation_local", + "left_hand_trigger_fraction", + "left_hand_is_grasping", + "right_hand_trigger_fraction", + "right_hand_is_grasping", + ] + + # Otherwise just return the default observations + return super().default_proprio_obs + + def _get_proprioception_dict(self): + if self._legacy_proprioception: + # In the legacy proprioception mode, we use some custom labels and fields. + state = {} + + # Get all the part poses first. + for part_name, part in self._parts.items(): + if part_name == "body": + continue + part_pos, part_orn = part.get_local_position_orientation() + state[f"{part_name}_position_local"] = part_pos + state[f"{part_name}_orientation_local"] = p.getEulerFromQuaternion(part_orn) + + # Get grasping information. + for arm in self.arm_names: + finger_pos = self.joint_positions[self.gripper_control_idx[arm]] + min_pos = self.joint_lower_limits[self.gripper_control_idx[arm]] + max_pos = self.joint_upper_limits[self.gripper_control_idx[arm]] + trigger_fraction = np.max((finger_pos - min_pos) / (max_pos - min_pos)) + state[f"{arm}_trigger_fraction"] = [np.clip(trigger_fraction, 0, 1)] + state[f"{arm}_is_grasping"] = [self.is_grasping(arm=arm)] + + return state + + return super()._get_proprioception_dict() + def set_poses(self, poses): assert len(poses) == len(self._parts), "Number of poses (%d) does not match number of parts (%d)" % ( len(poses), diff --git a/tests/test_robot.py b/tests/test_robot.py index 550b97877..a6326c14d 100644 --- a/tests/test_robot.py +++ b/tests/test_robot.py @@ -313,3 +313,15 @@ def test_behavior_robot_stability(): ) _, angle = p.getAxisAngleFromQuaternion(rel_orn) assert angle < np.deg2rad(5), "Part orientation moved relative to robot." + + +def test_behavior_robot_legacy_proprioception(): + s = Simulator(physics_timestep=1 / 30, render_timestep=1 / 30, mode="headless") + scene = EmptyScene() + s.import_scene(scene) + + robot = REGISTERED_ROBOTS["BehaviorRobot"](legacy_proprioception=True) + s.import_object(robot) + + proprio_length = robot.get_proprioception().shape + assert proprio_length == (22,), f"Proprioception shape should be 22. Instead, it is {proprio_length}"