debug for vins_transfer

This commit is contained in:
robin_shaun 2021-04-11 15:28:10 +08:00
parent ac5f1363fb
commit 50f0c25775
3 changed files with 36 additions and 13 deletions

View File

@ -1,10 +1,12 @@
import rospy
from geometry_msgs.msg import Twist, PoseStamped, TransformStamped
from gazebo_msgs.msg import LinkStates
from geometry_msgs.msg import Twist, Pose, TransformStamped
from sensor_msgs.msg import Image, CameraInfo
from tf2_ros import TransformListener, Buffer
import sys
import cv2
import numpy as np
from scipy.spatial.transform import Rotation as R
from cv_bridge import CvBridge
bridge = CvBridge()
import matplotlib.pyplot as plt
@ -15,13 +17,16 @@ ax = fig.add_subplot(111, projection='3d')
class Tracker():
def __init__(self, vehicle_type, vehicle_id):
self.K = np.eye(3)
self.E = np.hstack((np.eye(3),np.array([[0.1],[0.1],[0.5]])))
self.lines_3d = np.array([[[0.05,0,0],[0.05,0.05,0]],[[0.05,0.05,0],[0.05,0.05,0.05]],[[0.05,0.05,0.05],[0.05,0,0.05]],[[0.05,0,0.05],[0.05,0,0]],[[0,0,0],[0,0.05,0]],[[0,0.05,0],[0,0.05,0.05]],[[0,0.05,0.05],[0,0,0.05]],[[0,0,0.05],[0,0,0]],[[0,0,0],[0.05,0,0]],[[0,0.05,0],[0.05,0.05,0]],[[0,0,0.05],[0.05,0,0.05]],[[0,0.05,0.05],[0.05,0.05,0.05]]])
self.E = np.hstack((np.eye(3),np.array([[0.25],[0.25],[1]])))
self.lines_3d = 6*np.array([[[0.05,0,0],[0.05,0.05,0]],[[0.05,0.05,0],[0.05,0.05,0.05]],[[0.05,0.05,0.05],[0.05,0,0.05]],[[0.05,0,0.05],[0.05,0,0]],[[0,0,0],[0,0.05,0]],[[0,0.05,0],[0,0.05,0.05]],[[0,0.05,0.05],[0,0,0.05]],[[0,0,0.05],[0,0,0]],[[0,0,0],[0.05,0,0]],[[0,0.05,0],[0.05,0.05,0]],[[0,0,0.05],[0.05,0,0.05]],[[0,0.05,0.05],[0.05,0.05,0.05]]])
self.points_2d = []
self.box_pose = Pose()
self.camera_pose = Pose()
# self.plot_3d()
rospy.init_node(vehicle_type+'_'+vehicle_id+'_visual_servo')
rospy.Subscriber(vehicle_type+'_'+vehicle_id+'/le_arm/camera/image_raw', Image, self.image_callback)
rospy.Subscriber(vehicle_type+'_'+vehicle_id+'/le_arm/camera/camera_info', CameraInfo, self.camera_info_callback)
rospy.Subscriber(vehicle_type+'_'+vehicle_id+'/realsense/depth_camera/color/image_raw', Image, self.image_callback)
rospy.Subscriber(vehicle_type+'_'+vehicle_id+'/realsense/depth_camera/color/camera_info', CameraInfo, self.camera_info_callback)
rospy.Subscriber("/gazebo/link_states", LinkStates, self.link_states_callback)
self.image_pub = rospy.Publisher(vehicle_type+'_'+vehicle_id+'/le_arm/visual_servo/image', Image, queue_size=2)
rate = rospy.Rate(20)
@ -33,16 +38,13 @@ class Tracker():
for line_3d in self.lines_3d:
for point_3d in line_3d:
point_2d = self.K.dot(self.E).dot(np.vstack((point_3d.reshape(3,1),1)))
point_2d = point_2d.astype(int)
if self.points_2d == []:
self.points_2d = point_2d
else:
self.points_2d = np.hstack((self.points_2d, point_2d))
def plot_3d(self):
for line_3d in self.lines_3d:
print(zip(line_3d[0],line_3d[1]))
ax.plot(*zip(line_3d[0],line_3d[1]),color="b")
plt.show()
@ -59,13 +61,33 @@ class Tracker():
if points is None:
return
for i in range(points.shape[1]/2):
cv2.line(line_img, (points[0,2*i], points[1,i]), (points[0,i+1], points[1,2*i+1]), color, thickness)
cv2.line(line_img, (int(points[0,2*i]/points[2,2*i]), int(points[1,2*i]/points[2,2*i])), (int(points[0,2*i+1]/points[2,2*i+1]), int(points[1,2*i+1]/points[2,2*i+1])), color, thickness)
img = cv2.addWeighted(img, 0.8, line_img, 1.0, 0.0)
return img
def link_states_callback(self, msg):
try:
box_id = msg.name.index("box::base_link")
camera_id = msg.name.index("iris_0::realsense_camera::link")
box_pose = msg.pose[box_id]
box_R = R.from_quat([box_pose.orientation.x, box_pose.orientation.y, box_pose.orientation.z, box_pose.orientation.w]).as_dcm()
box_t = np.array([[box_pose.position.x],[box_pose.position.y],[box_pose.position.z]])
box_pose = np.hstack((box_R,box_t))
box_pose = np.vstack((box_pose,np.array([0,0,0,1])))
camera_pose = msg.pose[camera_id]
camera_R = R.from_quat([camera_pose.orientation.x, camera_pose.orientation.y, camera_pose.orientation.z, camera_pose.orientation.w]).as_dcm()
camera_t = np.array([[camera_pose.position.x],[camera_pose.position.y],[camera_pose.position.z]])
camera_pose = np.hstack((camera_R,camera_t))
camera_pose = np.vstack((camera_pose,np.array([0,0,0,1])))
self.E = box_pose.dot(np.linalg.inv(camera_pose))
self.E = self.E[:3,:]
except ValueError:
pass
def camera_info_callback(self, msg):
self.K = np.array(msg.K).reshape([3,3])
print(self.K)
# print(self.K)
def image_callback(self, msg):
cv_image = bridge.imgmsg_to_cv2(msg, "rgb8")
@ -84,7 +106,8 @@ class Tracker():
# )
# if lines is None:
# return
self.project()
if self.points_2d == []:
self.project()
line_image = self.draw_lines(cv_image,self.points_2d,thickness=5)
img_ros = bridge.cv2_to_imgmsg(line_image)

View File

@ -24,7 +24,7 @@ def vins_callback(data):
local_pose.pose.orientation.y = q_[2]
local_pose.pose.orientation.z = q_[3]
rospy.init_node(vehicle_type+"_"+vehicle_id+'/vins_transfer')
rospy.init_node(vehicle_type+"_"+vehicle_id+'_vins_transfer')
rospy.Subscriber("/vins_estimator/camera_pose", Odometry, vins_callback)
position_pub = rospy.Publisher(vehicle_type+"_"+vehicle_id+"/mavros/vision_pose/pose", PoseStamped, queue_size=2)
rate = rospy.Rate(60)

View File

@ -34,7 +34,7 @@ This link is the base of the arm in which arm is placed
<plugin name="camera_controller" filename="libgazebo_ros_camera.so">
<alwaysOn>true</alwaysOn>
<updateRate>0.0</updateRate>
<cameraName>probot_anno/camera</cameraName>
<cameraName>le_arm/camera</cameraName>
<imageTopicName>image_raw</imageTopicName>
<cameraInfoTopicName>camera_info</cameraInfoTopicName>
<frameName>camera_link</frameName>