Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

automated robot moving to collect data for camera in hand configuration and ROS based api #23

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
franka_control_suite/build/
.vscode/
#calibration/data/
calibration/data/fg/*.csv
calibration/data/fg/*.jpg
!calibration/calibration_board/target_A4.pdf
!calibration/calibration_board/target_US_letter.pdf
*.png
*.pdf
*.pyc
*.jpg
*.csv
36 changes: 27 additions & 9 deletions calibration/ROSCameraRobotCalibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ def __init__(self, args,
aruco_board_n_cols=aruco_board_n_cols,
data_file_name=file_name)
self.args = args

if(self.args.move_robot_automatically):
self.fa_object = FrankaArm(with_gripper=self.args.with_franka_gripper)
if(not args.move_robot_automatically):# as frankapy inits its own node
Expand All @@ -47,10 +48,15 @@ def __init__(self, args,
self.processed_image = 0
self.bool_kill_thread = False
if self.args.move_robot_automatically:
self.abs_poses = get_absolute_poses(movement_pose_file)#gets command poses for frankapy to move robot automatically
if not self.args.use_delta_poses:
self.abs_poses = get_absolute_poses(movement_pose_file)#gets command poses for frankapy to move robot automatically
else:
self.initial_joints = self.fa_object.get_joints()
self.initial_pose = self.fa_object.get_pose()
self.delta_poses = get_delta_poses(file_name=movement_pose_file, initial_pose_fa=self.initial_pose)
else:
self.abs_poses = None

def subscribe_image_cb(self, img_msg):
if self.camera_matrix is None:
return
Expand All @@ -76,13 +82,23 @@ def user_robot_move_function(self,):
R_tag2cam = []; t_tag2cam = []
prev_tag_pose = None
while(True):
if(self.abs_poses is not None and len(self.abs_poses)!=0 ):
self.fa_object.reset_joints()
self.fa_object.goto_pose(self.abs_poses.pop(0),ignore_virtual_walls=True, use_impedance=False)
time.sleep(1.0)
elif (self.abs_poses is not None and len(self.abs_poses)==0):
rospy.core.signal_shutdown('keyboard interrupt')
break
# moves robot to viewposes recorded in file
if(self.args.move_robot_automatically and not self.args.use_delta_poses):
if(self.abs_poses is not None and len(self.abs_poses)!=0 ):
self.fa_object.reset_joints()
self.fa_object.goto_pose(self.abs_poses.pop(0),ignore_virtual_walls=True, use_impedance=False)
time.sleep(1.0)
elif (self.abs_poses is not None and len(self.abs_poses)==0):
rospy.core.signal_shutdown('keyboard interrupt')
break
elif(self.args.move_robot_automatically and self.args.use_delta_poses):
if(self.delta_poses is not None and len(self.delta_poses)!=0 ):
self.fa_object.goto_joints(self.initial_joints)
self.fa_object.goto_pose_delta(self.delta_poses.pop(0),ignore_virtual_walls=True, use_impedance=False)
time.sleep(1.0)
elif (self.delta_poses is not None and len(self.delta_poses)==0):
rospy.core.signal_shutdown('keyboard interrupt')
break

if(args.move_robot_automatically):
ip = ""
Expand Down Expand Up @@ -169,6 +185,8 @@ def main(args):
'manually moved to an initial pose and the script controls EE to predefined '\
'relative poses. If false, the EE should be moved manually(white status LED '\
'and press enter to collect data')
parser.add_argument("--use_delta_poses", default=True, nargs='?', type=str2bool, help='should we use '\
'delta end-effector poses to move the robot to collect data for calibration')
parser.add_argument("--only_calibration", default=False, nargs='?',type=str2bool, help='if true, values '\
'stored in the data folder are used for calibration if false, data is first collected, '\
'stored in /data folder and then calibration routine is run')
Expand Down
6 changes: 3 additions & 3 deletions calibration/base_robot_camera_calibration.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,10 +124,10 @@ def process_image_for_aruco(self, image_gray, prev_tag_pose=None):
# print(objPoints, imgPoints)
retval, rvec, tvec = cv2.solvePnP(objPoints, imgPoints, self.camera_matrix, rvec, tvec)
if(self.args.debug_image):
cv2.aruco.drawDetectedMarkers(color_im, corners, borderColor=(0, 0, 255))
cv2.drawFrameAxes(color_im, self.camera_matrix, self.dist_coeffs, rvec, tvec, 0.1)
cv2.aruco.drawDetectedMarkers(image_gray, corners, borderColor=(0, 0, 255))
cv2.drawFrameAxes(image_gray, self.camera_matrix, self.dist_coeffs, rvec, tvec, 0.1)
img_file_name = "data/image/image_"+str(self.detections_count-1)+".jpg"
cv2.imwrite(img_file_name, color_im)
cv2.imwrite(img_file_name, image_gray)
reproj_error = reprojection_error(corners,
ids,
rvec, tvec,
Expand Down
55 changes: 27 additions & 28 deletions calibration/calibration_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,11 @@ def write_to_file(line_list, file_name):
f.write(line)
f.write('\n')

def sample_view_poses(initial_pose):
delta_position = [[],[],[],[],[],[],[],[],[],[]]
return
# def sample_view_poses(initial_pose):
# delta_position = [[],[],[],[],[],[],[],[],[],[]]
# return

def get_delta_poses(file_name):
def get_delta_poses(file_name, initial_pose_fa):
with open(file_name, 'r') as fp:
lines = fp.readlines()
ee_poses_tf = []
Expand All @@ -50,36 +50,35 @@ def get_delta_poses(file_name):
(float(data[3]),
float(data[4]),
float(data[5]))))
tag_pose = tuple(((float(data[6]),
float(data[7]),
float(data[8])),
(float(data[9]),
float(data[10]),
float(data[11]))))
# tag_pose = tuple(((float(data[6]),
# float(data[7]),
# float(data[8])),
# (float(data[9]),
# float(data[10]),
# float(data[11]))))
ee_pose_tf = np.eye(4)
tag_pose_tf = np.eye(4)
# tag_pose_tf = np.eye(4)
ee_pose_tf[0:3, 0:3] = cv2.Rodrigues(ee_pose[0])[0]; ee_pose_tf[0:3, -1] = ee_pose[1]
tag_pose_tf[0:3, 0:3] = cv2.Rodrigues(tag_pose[0])[0]; tag_pose_tf[0:3, -1] = tag_pose[1]
ee_pose_tf[0:3, -1] = ee_pose_tf[0:3, -1]/1000.0
# tag_pose_tf[0:3, 0:3] = cv2.Rodrigues(tag_pose[0])[0]; tag_pose_tf[0:3, -1] = tag_pose[1]
ee_poses_tf.append(ee_pose_tf)
prev = None

delta_poses = []
initial_pose = np.eye(4)
initial_pose[0:3, 0:3] = initial_pose_fa.rotation
initial_pose[0:3, -1] = initial_pose_fa.translation
for abs_ee_pose_tf in ee_poses_tf:
if prev is None:
delta_pose = RigidTransform(from_frame='franka_tool',
to_frame='franka_tool',
rotation=np.eye(3),
translation=np.array([0.0, 0.0,0.0]))
prev = abs_ee_pose_tf
else:
# difference = np.matmul(np.linalg.inv(abs_ee_pose_tf), prev)
# print(prev)
# print(abs_ee_pose_tf)
difference = np.matmul(np.linalg.inv(prev), abs_ee_pose_tf)
delta_pose = RigidTransform(from_frame='franka_tool',
to_frame='franka_tool',
rotation=difference[0:3,0:3],
translation=difference[0:3,-1])
difference = np.matmul(np.linalg.inv(initial_pose), abs_ee_pose_tf )
delta_pose = RigidTransform(from_frame='franka_tool',
to_frame='franka_tool',
rotation=difference[0:3,0:3],
translation=difference[0:3,-1])
delta_poses.append(delta_pose)
first_delta_pose = RigidTransform(from_frame='franka_tool',
to_frame='franka_tool',
rotation=np.eye(3),
translation=np.array([0.0, 0.0, 0.0]))
delta_poses = [first_delta_pose] + delta_poses
return delta_poses

def get_absolute_poses(file_name):
Expand Down
10 changes: 8 additions & 2 deletions calibration/docs/USAGE.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ For the camera-in-hand case, the calibration tag is fixed to the environment rig



For the camera-in-environment case, the calibration tag needs to be **rigidly** attached to the robot's End-Effector. You could use the (to be) provided CAD files for the finger tips and gripping points. The finger tips are to be attached to Franka End-effector and the gripping points are drilled/screwed onto the calibration tag. Now make the Franka End-effector with custom finger tips grasp the calibration tag with the attached custom gripping points, this ensures that the tag remains rigid with respect to the End-effector.
For the camera-in-environment case, the calibration tag needs to be **rigidly** attached to the robot's End-Effector. You could use the provided CAD files for the finger tips and gripping points in [models_4_3d_printing](../models_4_3d_printing). The [finger tips](../models_4_3d_printing/franka_custom_finger_tips.stl) are to be attached to Franka End-effector and the [gripping points](../models_4_3d_printing/finger_grasp_points.stl) or [handle plate](../models_4_3d_printing/finger_handle_plate.stl) are drilled/screwed onto the calibration tag. Now make the Franka End-effector with custom finger tips(figure 1) grasp the calibration tag(as show in figure 3) with the attached custom gripping points(figure 2), this ensures that the tag remains rigid with respect to the End-effector.

Comment on lines +42 to 43
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Images can be helpful to show the configuration that you are refering to.



Expand Down Expand Up @@ -137,7 +137,10 @@ run the readstates server that send end effector poses when requested,

### c.<u> Run Pose Data Collection + Calibration Script </u>


There are currently two ways to collect data for calibration-
1. move the robot end-effector manually and then hit "enter" to collect the pose data samples(works for both ROS and non-ROS scripts)
2. move the robot automatically with absolute/relative poses present in [../data/file_moves.txt](../data/file_moves.txt).
2.a. If you are using relative poses, the robot will move relative to the initial end-effector configuration when you start the script, therefore, before starting frankapy, point the camera(camera in hand case) or the calibration tag(camera in environment case) to the calibration tag or the camera respectively. Ensure, the calibration tag's normal is close to perpendicular to the camera and there's around ~30cm between the tag and the camera.

Comment on lines +143 to 144
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How can we enable relative poses? Mayebe we can say the option that should be True to use relative pose.

### In workstation computer,

Expand Down Expand Up @@ -187,6 +190,9 @@ optional arguments:
is first manually moved to an initial pose and the script controls EE to
predefined relative poses. If false, the EE should be moved manually(white
status LED) and press enter to collect data
--use_delta_poses [USE_DELTA_POSES]:True(default)/False
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

is it the same as using relative poses?

should we use delta end-effector poses to move the robot to collect
data for calibration
--only_calibration [ONLY_CALIBRATION]: True/False(default)
if true, values stored in the data folder are used for calibration if
false, data is first collected, stored in /data folder and then
Expand Down