|
import tensorflow as tf |
|
|
|
from data.utils import clean_task_instruction, euler_to_quaternion, \ |
|
quaternion_to_rotation_matrix, rotation_matrix_to_ortho6d |
|
|
|
def process_step(step: dict) -> dict: |
|
""" |
|
Unify the action format and clean the task instruction. |
|
|
|
DO NOT use python list, use tf.TensorArray instead. |
|
""" |
|
|
|
|
|
origin_action = step['action'] |
|
step['action']={} |
|
action=step['action'] |
|
action['terminate'] = step['is_terminal'] |
|
|
|
eef_delta_pos = origin_action[:3] |
|
eef_ang=origin_action[3:6] |
|
eef_ang = euler_to_quaternion(eef_ang) |
|
|
|
|
|
|
|
action['arm_concat'] = tf.concat([eef_delta_pos,eef_ang],axis=0) |
|
|
|
|
|
action['format'] = tf.constant( |
|
"eef_delta_pos_x,eef_delta_pos_y,eef_delta_pos_z,eef_delta_angle_x,eef_delta_angle_y,eef_delta_angle_z,eef_delta_angle_w") |
|
|
|
|
|
state = step['observation'] |
|
|
|
|
|
arm_joint_ang=state['state'][:6] |
|
grip_open=1-state['state'][6:7] |
|
|
|
eef_pos = state['end_effector_state'][:3] |
|
eef_ang = quaternion_to_rotation_matrix(state['end_effector_state'][3:]) |
|
eef_ang = rotation_matrix_to_ortho6d(eef_ang) |
|
state['arm_concat'] = tf.concat([arm_joint_ang,grip_open,eef_pos,eef_ang],axis=0) |
|
|
|
|
|
state['format'] = tf.constant( |
|
"arm_joint_0_pos,arm_joint_1_pos,arm_joint_2_pos,arm_joint_3_pos,arm_joint_4_pos,arm_joint_5_pos,gripper_open,eef_pos_x,eef_pos_y,eef_pos_z,eef_angle_0,eef_angle_1,eef_angle_2,eef_angle_3,eef_angle_4,eef_angle_5") |
|
|
|
|
|
|
|
replacements = { |
|
'_': ' ', |
|
'1f': ' ', |
|
'4f': ' ', |
|
'-': ' ', |
|
'50': ' ', |
|
'55': ' ', |
|
'56': ' ', |
|
|
|
} |
|
instr = step['language_instruction'] |
|
instr = clean_task_instruction(instr, replacements) |
|
step['observation']['natural_language_instruction'] = instr |
|
|
|
return step |
|
|
|
|
|
if __name__ == "__main__": |
|
import tensorflow_datasets as tfds |
|
from data.utils import dataset_to_path |
|
|
|
DATASET_DIR = 'data/datasets/openx_embod' |
|
DATASET_NAME = 'berkeley_fanuc_manipulation' |
|
|
|
dataset = tfds.builder_from_directory( |
|
builder_dir=dataset_to_path( |
|
DATASET_NAME, DATASET_DIR)) |
|
dataset = dataset.as_dataset(split='all') |
|
|
|
|
|
for episode in dataset: |
|
for step in episode['steps']: |
|
print(step) |
|
|