2
2
"""
3
3
Example for demonstrating start and final joint positions for Lab 2.
4
4
"""
5
-
6
- import sys
7
-
8
- import numpy as np
5
+ import cv2
9
6
import rospy
7
+ import numpy as np
8
+ from scipy import linalg
10
9
11
- from sensor_msgs .msg import JointState
12
- from sensor_msgs . msg import Image
13
- from sensor_msgs . msg import CameraInfo
10
+ from sensor_msgs .msg import ( JointState ,
11
+ Image ,
12
+ CameraInfo )
14
13
from cv_bridge import CvBridge , CvBridgeError
15
14
from ar_track_alvar_msgs .msg import AlvarMarkers
16
- import message_filters
17
- import cv2
18
15
import forward_kinematics_extrinsics
19
16
20
-
21
17
ROSTOPIC_SET_ARM_JOINT = '/goal_dynamixel_position'
22
18
#flag = 0
23
19
@@ -31,47 +27,6 @@ def set_arm_joint(pub, joint_target):
31
27
rospy .loginfo ('Going to arm joint position: {}' .format (joint_state .position ))
32
28
pub .publish (joint_state )
33
29
34
- # class camera_calibration:
35
- # def __init__(self):
36
- # self.positions = np.zeros((15, 3))
37
- # self.orientations = np.zeros((15, 4))
38
-
39
- # self.sub_img = message_filters.Subscriber("camera/color/image_raw", Image)
40
- # self.sub_artag = message_filters.Subscriber("ar_pose_marker", AlvarMarkers)
41
- # self.ts = message_filters.ApproximateTimeSynchronizer([self.sub_img, self.sub_artag], 10, 0.1, allow_headerless=True)
42
- # self.ts.registerCallback(self.callback)
43
-
44
- # self.sub_info = rospy.Subscriber("camera/color/camera_info", CameraInfo, self.infocallback)
45
- # self.K_matrix = None
46
-
47
- # def callback(self, img, artag):
48
- # print(flag, 'Hiiiiiiii')
49
- # if artag.markers == None:
50
- # return
51
- # else:
52
- # try:
53
- # cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8")
54
- # except CvBridgeError as e:
55
- # print(e)
56
- # if flag != 0:
57
- # print(flag)
58
- # pos = artag.markers[0].pose.pose.position
59
- # self.positions[i, :] = np.array([pos.x, pos.y, pos.z])
60
- # q = artag.markers[0].pose.pose.orientation
61
- # self.orientations[i, :] = np.array([q.x, q.y, q.z, q.w])
62
- # cv2.imwrite('../data/caloibration/image_%d.png'%flag, cv_image)
63
- # flag == 0
64
- # if flag == 15:
65
- # np.savez("../data/calibration/artag_pos.npz", position = self.positions, orientation=self.orientations)
66
- # # (rows,cols,channels) = cv_image.shape
67
-
68
- # # cv2.imshow("Image window", cv_image)
69
- # # cv2.waitKey(3)
70
-
71
- # def infocallback(self, data):
72
- # if self.K_matrix == None:
73
- # self.K_matrix = data.K
74
-
75
30
def generatesample ():
76
31
sample = np .zeros (5 )
77
32
# Generate Sample within Constraints
@@ -94,11 +49,11 @@ def main():
94
49
Calib = rospy .wait_for_message ("/camera/color/camera_info" , CameraInfo )
95
50
K_matrix = np .array (Calib .K )
96
51
fk = forward_kinematics_extrinsics .forwardKinematics ('arm_base_link' , 'ar_tag' )
97
-
52
+
98
53
positions = np .zeros ((15 , 3 ))
99
54
orientations = np .zeros ((15 , 4 ))
100
55
joint_angles = np .zeros ((15 , 5 ))
101
- end_effector_position = np .zeros ((15 , 4 , 4 ))
56
+ end_effector_positions = np .zeros ((15 , 4 , 4 ))
102
57
rad_from_deg = np .pi / 180.
103
58
104
59
# Send to Home Position
@@ -107,45 +62,25 @@ def main():
107
62
home_arm (pub )
108
63
109
64
def imgcallback (i ):
110
- try :
111
- data = rospy .wait_for_message ("/camera/color/image_raw" , Image , timeout = 5 )
112
- except rospy .ROSException as e :
113
- if 'timeout exceeded' in e .message :
114
- print ("No Image recieved for the last 5 secs" )
115
- else :
116
- raise e
117
- return - 1
118
- try :
119
- cv_image = bridge .imgmsg_to_cv2 (data , "bgr8" )
120
- except CvBridgeError as e :
121
- print (e )
122
- return - 1
65
+ data = rospy .wait_for_message ("/camera/color/image_raw" , Image , timeout = 5 )
66
+ cv_image = bridge .imgmsg_to_cv2 (data , "bgr8" )
123
67
cv2 .imwrite ('../data/calibration/image_%d.png' % i , cv_image )
124
68
return 0
125
-
69
+
126
70
def artagcallback (i ):
127
- try :
128
- artag = rospy .wait_for_message ("ar_pose_marker" , AlvarMarkers , timeout = 5 )
129
- except rospy .ROSException as e :
130
- if 'timeout exceeded' in e .message :
131
- print ("No artag message recieved for the last 5 secs" )
132
- else :
133
- raise e
134
- return - 1
71
+ artag = rospy .wait_for_message ("ar_pose_marker" , AlvarMarkers , timeout = 5 )
135
72
if len (artag .markers ) == 0 :
136
- print ('No AR Tag in the image' )
137
- return - 1
138
- else :
73
+ raise Exception ("AR tag not found" )
74
+ else :
139
75
pos = artag .markers [0 ].pose .pose .position
140
76
q = artag .markers [0 ].pose .pose .orientation
141
77
positions [i , :] = np .array ([pos .x , pos .y , pos .z ])
142
78
orientations [i , :] = np .array ([q .x , q .y , q .z , q .w ])
143
79
return 0
144
80
81
+ # # Send nodes of the path as commands to controller
145
82
count = 0
146
- # # Send nodes of the path as commands to controller
147
83
while count < 15 :
148
- #raw_input("Robot ready to move to NEXT POSITION. Press Enter to continue.")
149
84
150
85
joint_angle = generatesample ()
151
86
joint_angles [count , :] = joint_angle
@@ -154,20 +89,112 @@ def artagcallback(i):
154
89
set_arm_joint (pub , np .array (joint_angle ))
155
90
156
91
joint_angle = np .append (joint_angle , 0 )
157
-
92
+
158
93
M = fk .getJointForwardKinematics (joint_angle .reshape ((1 ,6 )))
159
- end_effector_position [count , :, :] = np .matmul (np .linalg .inv (M [0 ,:,:]),M [5 ,:,:])
94
+ end_effector_positions [count , :, :] = np .matmul (np .linalg .inv (M [0 ,:,:]),M [5 ,:,:])
160
95
161
96
rospy .sleep (4 )
162
- if artagcallback (count ) == - 1 :
97
+
98
+ try :
99
+ artagcallback (count )
100
+ except (rospy .ROSException , Exception ) as e :
163
101
continue
164
- if imgcallback (count ) == - 1 :
102
+
103
+
104
+ try :
105
+ imgcallback (count )
106
+ except (rospy .ROSException ,CvBridgeError ) as e :
165
107
continue
108
+
166
109
count += 1
167
- rospy .sleep (1 )
168
110
169
- np .savez ("../data/calibration/calibration_info.npz" , position = positions , orientation = orientations , camerainfo = K_matrix , \
170
- joint_angles = joint_angles , forward_kinematics = end_effector_position )
111
+
112
+ R = calculate_extrinsic (positions , K_matrix , end_effector_positions )
113
+ print (R )
114
+ # np.savez("../data/calibration/calibration_info.npz", position=positions, orientation=orientations, camerainfo=K_matrix, \
115
+ # joint_angles=joint_angles, forward_kinematics=end_effector_position)
116
+
117
+ def calculate_extrinsic (positions , K_matrix , end_effector_positions ):
118
+ p = get_p (positions )
119
+ P = get_P (end_effector_positions )
120
+ M = get_projection_matrix (p , P )
121
+ K ,R = linalg .rq (M [:,0 :3 ])
122
+ T = np .matmul (np .linalg .inv (K ),M [:,3 ])
123
+ H = np .vstack ((np .hstack ((R ,T .reshape ((3 ,1 )))),np .array ([[0 ,0 ,0 ,1 ]])))
124
+ return H
125
+
126
+ def get_projection_matrix (p , P ):
127
+ num_points = p .shape [0 ]
128
+ xi = P [:,0 ].reshape ((num_points ,1 ))
129
+ yi = P [:,1 ].reshape ((num_points ,1 ))
130
+ zi = P [:,2 ].reshape ((num_points ,1 ))
131
+ ui = p [:,0 ].reshape ((num_points ,1 ))
132
+ vi = p [:,1 ].reshape ((num_points ,1 ))
133
+ ones = np .ones ((num_points , 1 ))
134
+ zeros = np .zeros ((num_points , 1 ))
135
+ A = np .zeros ((num_points * 2 ,12 ))
136
+ A [0 :2 * num_points :2 ,:] = np .hstack ((xi , yi , zi , ones , zeros , zeros , zeros , zeros , - ui * xi , - ui * yi , - ui * zi , - ui ))
137
+ A [1 :2 * num_points :2 ,:] = np .hstack ((zeros , zeros , zeros , zeros , xi , yi , zi , ones , - vi * xi , - vi * yi , - vi * zi , - vi ))
138
+ w ,v = np .linalg .eigh (np .matmul (A .T , A ))
139
+ M = v [:,0 ].reshape ((3 ,4 ))
140
+ return M
141
+
142
+
143
+ def get_p (position ):
144
+ intrinsic = np .array ([[615.8053588867188 , 0.0 , 328.7790222167969 ],
145
+ [0.0 , 615.8865356445312 , 229.94618225097656 ],
146
+ [0.0 , 0.0 , 1.0 ]])
147
+ K_3_4 = np .hstack ((intrinsic , np .array ([[0 ],[0 ],[0 ]])))
148
+ homogeneous_3D_point = np .hstack ((position , np .ones ((position .shape [0 ], 1 ))))
149
+ homogeneous_pixel_coordinate = np .matmul (K_3_4 ,homogeneous_3D_point .T )
150
+ homogeneous_pixel_coordinate = homogeneous_pixel_coordinate / homogeneous_pixel_coordinate [2 ,:]
151
+ return homogeneous_pixel_coordinate [0 :2 ,:].T
152
+
153
+
154
+ def get_P (end_effector_position ):
155
+ point_3D_world_frame = end_effector_position [:,3 ,0 :3 ]
156
+ return point_3D_world_frame
171
157
172
158
if __name__ == "__main__" :
173
159
main ()
160
+
161
+ # class camera_calibration:
162
+ # def __init__(self):
163
+ # self.positions = np.zeros((15, 3))
164
+ # self.orientations = np.zeros((15, 4))
165
+
166
+ # self.sub_img = message_filters.Subscriber("camera/color/image_raw", Image)
167
+ # self.sub_artag = message_filters.Subscriber("ar_pose_marker", AlvarMarkers)
168
+ # self.ts = message_filters.ApproximateTimeSynchronizer([self.sub_img, self.sub_artag], 10, 0.1, allow_headerless=True)
169
+ # self.ts.registerCallback(self.callback)
170
+
171
+ # self.sub_info = rospy.Subscriber("camera/color/camera_info", CameraInfo, self.infocallback)
172
+ # self.K_matrix = None
173
+
174
+ # def callback(self, img, artag):
175
+ # print(flag, 'Hiiiiiiii')
176
+ # if artag.markers == None:
177
+ # return
178
+ # else:
179
+ # try:
180
+ # cv_image = self.bridge.imgmsg_to_cv2(data, "bgr8")
181
+ # except CvBridgeError as e:
182
+ # print(e)
183
+ # if flag != 0:
184
+ # print(flag)
185
+ # pos = artag.markers[0].pose.pose.position
186
+ # self.positions[i, :] = np.array([pos.x, pos.y, pos.z])
187
+ # q = artag.markers[0].pose.pose.orientation
188
+ # self.orientations[i, :] = np.array([q.x, q.y, q.z, q.w])
189
+ # cv2.imwrite('../data/caloibration/image_%d.png'%flag, cv_image)
190
+ # flag == 0
191
+ # if flag == 15:
192
+ # np.savez("../data/calibration/artag_pos.npz", position = self.positions, orientation=self.orientations)
193
+ # # (rows,cols,channels) = cv_image.shape
194
+
195
+ # # cv2.imshow("Image window", cv_image)
196
+ # # cv2.waitKey(3)
197
+
198
+ # def infocallback(self, data):
199
+ # if self.K_matrix == None:
200
+ # self.K_matrix = data.K
0 commit comments