def publish_odom(self):
if self.last_baseline is None or self.last_vel is None:
return
if self.last_baseline.tow == self.last_vel.tow:
self.odom_msg.header.stamp = rospy.Time.now()
self.odom_msg.pose.pose.position.x = self.last_baseline.e/1000.0
self.odom_msg.pose.pose.position.y = self.last_baseline.n/1000.0
self.odom_msg.pose.pose.position.z = -self.last_baseline.d/1000.0
self.odom_msg.twist.twist.linear.x = self.last_vel.e/1000.0
self.odom_msg.twist.twist.linear.y = self.last_vel.n/1000.0
self.odom_msg.twist.twist.linear.z = -self.last_vel.d/1000.0
self.pub_odom.publish(self.odom_msg)
python类Time()的实例源码
def callback_sbp_base_pos(self, msg, **metadata):
if self.debug:
rospy.loginfo("Received SBP_MSG_BASE_POS (Sender: %d): %s" % (msg.sender, repr(msg)))
if self.send_observations:
for s in self.obs_senders:
s.send(msg)
# publish tf for rtk frame
if self.publish_utm_rtk_tf:
if not self.proj:
self.init_proj((msg.lat, msg.lon))
E,N = self.proj(msg.lon,msg.lat, inverse=False)
self.transform.header.stamp = rospy.Time.now()
self.transform.transform.translation.x = E
self.transform.transform.translation.y = N
self.transform.transform.translation.z = -msg.height
self.tf_br.sendTransform(self.transform)
def _get_transforms(self, time=None):
"""
Samples the transforms at the given time.
:param time: sampling time (now if None)
:type time: None|rospy.Time
:rtype: dict[str, ((float, float, float), (float, float, float, float))]
"""
if time is None:
time = self._wait_for_transforms()
# here we trick the library (it is actually made for eye_on_hand only). Trust me, I'm an engineer
if self.eye_on_hand:
rob = self.listener.lookupTransform(self.robot_base_frame, self.robot_effector_frame, time)
else:
rob = self.listener.lookupTransform(self.robot_effector_frame, self.robot_base_frame, time)
opt = self.listener.lookupTransform(self.tracking_base_frame, self.tracking_marker_frame, time)
return {'robot': rob, 'optical': opt}
def __init__(self, limb, joint_names):
self._joint_names = joint_names
ns = 'robot/limb/' + limb + '/'
self._client = actionlib.SimpleActionClient(
ns + "follow_joint_trajectory",
FollowJointTrajectoryAction,
)
self._goal = FollowJointTrajectoryGoal()
self._goal_time_tolerance = rospy.Time(0.1)
self._goal.goal_time_tolerance = self._goal_time_tolerance
server_up = self._client.wait_for_server(timeout=rospy.Duration(10.0))
if not server_up:
rospy.logerr("Timed out waiting for Joint Trajectory"
" Action Server to connect. Start the action server"
" before running example.")
rospy.signal_shutdown("Timed out waiting for Action Server")
sys.exit(1)
self.clear(limb)
def robot_listener(self):
'''
rospy.wait_for_service('spawn')
spawner = rospy.ServiceProxy('spawn', turtlesim.srv.Spawn)
spawner(4, 2, 0, 'turtle2')
'''
robot_vel_pub = rospy.Publisher('robot_0/cmd_vel', geometry_msgs.msg.Twist, queue_size=1)
rate = rospy.Rate(10.0)
while not rospy.is_shutdown():
try:
(trans,rot) = self.listener.lookupTransform('/robot_3', '/robot_0', rospy.Time(0))
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
continue
angular = 4 * math.atan2(trans[1], trans[0])
linear = 0.5 * math.sqrt(trans[0] ** 2 + trans[1] ** 2)
cmd = geometry_msgs.msg.Twist()
cmd.linear.x = linear
cmd.angular.z = angular
robot_vel_pub.publish(cmd)
rate.sleep()
def printJointStates(self):
try:
# now = rospy.Time.now()
# self.tf.waitForTransform(self.frame, 'r_gripper_tool_frame', now, rospy.Duration(10.0))
self.tf.waitForTransform(self.frame, 'r_gripper_tool_frame', rospy.Time(), rospy.Duration(10.0))
currentRightPos, currentRightOrient = self.tf.lookupTransform(self.frame, 'r_gripper_tool_frame', rospy.Time(0))
msg = rospy.wait_for_message('/r_arm_controller/state', JointTrajectoryControllerState)
currentRightJointPositions = msg.actual.positions
print 'Right positions:', currentRightPos, currentRightOrient
print 'Right joint positions:', currentRightJointPositions
# now = rospy.Time.now()
# self.tf.waitForTransform(self.frame, 'l_gripper_tool_frame', now, rospy.Duration(10.0))
currentLeftPos, currentLeftOrient = self.tf.lookupTransform(self.frame, 'l_gripper_tool_frame', rospy.Time(0))
msg = rospy.wait_for_message('/l_arm_controller/state', JointTrajectoryControllerState)
currentLeftJointPositions = msg.actual.positions
print 'Left positions:', currentLeftPos, currentLeftOrient
print 'Left joint positions:', currentLeftJointPositions
# print 'Right gripper state:', rospy.wait_for_message('/r_gripper_controller/state', JointControllerState)
except tf.ExtrapolationException:
print 'No transformation available! Failing to record this time step.'
def figure_target(self, zarj, previous):
""" Set the origin """
self.origin = previous.target
if abs(self.distance) > 0.001:
pelvis = zarj.transform.lookup_transform('world', 'pelvis',
rospy.Time()).transform
quaternion = [pelvis.rotation.w, pelvis.rotation.x,
pelvis.rotation.y, pelvis.rotation.z]
matrix = quaternion_matrix(quaternion)
point = np.matrix([0, self.distance, 0, 1], dtype='float32')
point.resize((4, 1))
rotated = matrix*point
self.target = Vector3(self.origin.x - rotated.item(1),
self.origin.y + rotated.item(2),
self.origin.z)
else:
self.target = self.origin
if self.turn is not None:
self.target_orientation = self.turn + \
previous.target_orientation
def lookup_feet(name, tf_buffer):
global lf_start_position
global lf_start_orientation
global rf_start_position
global rf_start_orientation
lfp = "/ihmc_ros/{0}/left_foot_frame_name".format(name)
rfp = "/ihmc_ros/{0}/right_foot_frame_name".format(name)
if rospy.has_param(rfp) and rospy.has_param(lfp):
lfname = rospy.get_param(lfp)
rfname = rospy.get_param(rfp)
leftFootWorld = tf_buffer.lookup_transform('world', lfname, rospy.Time())
lf_start_orientation = leftFootWorld.transform.rotation
lf_start_position = leftFootWorld.transform.translation
rightFootWorld = tf_buffer.lookup_transform('world', rfname, rospy.Time())
rf_start_orientation = rightFootWorld.transform.rotation
rf_start_position = rightFootWorld.transform.translation
def lookupFeet(name, tfBuffer):
global lf_start_position
global lf_start_orientation
global rf_start_position
global rf_start_orientation
lfp = "/ihmc_ros/{0}/left_foot_frame_name".format(name)
rfp = "/ihmc_ros/{0}/right_foot_frame_name".format(name)
if rospy.has_param(rfp) and rospy.has_param(lfp):
lfname = rospy.get_param(lfp)
rfname = rospy.get_param(rfp)
leftFootWorld = tfBuffer.lookup_transform('world', lfname, rospy.Time())
lf_start_orientation = leftFootWorld.transform.rotation
lf_start_position = leftFootWorld.transform.translation
rightFootWorld = tfBuffer.lookup_transform('world', rfname, rospy.Time())
rf_start_orientation = rightFootWorld.transform.rotation
rf_start_position = rightFootWorld.transform.translation
def lookupFeet(name, tfBuffer):
global lf_start_position
global lf_start_orientation
global rf_start_position
global rf_start_orientation
lfp = "/ihmc_ros/{0}/left_foot_frame_name".format(name)
rfp = "/ihmc_ros/{0}/right_foot_frame_name".format(name)
if rospy.has_param(rfp) and rospy.has_param(lfp):
lfname = rospy.get_param(lfp)
rfname = rospy.get_param(rfp)
leftFootWorld = tfBuffer.lookup_transform('world', lfname, rospy.Time())
lf_start_orientation = leftFootWorld.transform.rotation
lf_start_position = leftFootWorld.transform.translation
rightFootWorld = tfBuffer.lookup_transform('world', rfname, rospy.Time())
rf_start_orientation = rightFootWorld.transform.rotation
rf_start_position = rightFootWorld.transform.translation
2017_Task9.py 文件源码
项目:cu-perception-manipulation-stack
作者: correlllab
项目源码
文件源码
阅读 17
收藏 0
点赞 0
评论 0
def goto(self, from_frame, to_frame):
'''
Calculates the transfrom from from_frame to to_frame
and commands the arm in cartesian mode.
'''
try:
trans = self.tfBuffer.lookup_transform(from_frame, to_frame, rospy.Time())
except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
rate.sleep()
# continue
translation = [trans.transform.translation.x, trans.transform.translation.y, trans.transform.translation.z]
rotation = [trans.transform.rotation.x, trans.transform.rotation.y, trans.transform.rotation.z, trans.transform.rotation.w]
pose_value = translation + rotation
#second arg=0 (absolute movement), arg = '-r' (relative movement)
self.cmmnd_CartesianPosition(pose_value, 0)
2017_Task10.py 文件源码
项目:cu-perception-manipulation-stack
作者: correlllab
项目源码
文件源码
阅读 16
收藏 0
点赞 0
评论 0
def goto_spoon(self):
self.calibrate_obj_det_pub.publish(True)
while self.calibrated == False:
pass
print("Finger Sensors calibrated")
try:
trans = self.tfBuffer.lookup_transform('root', 'spoon_position', rospy.Time())
except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
rate.sleep()
# continue
translation = [trans.transform.translation.x, trans.transform.translation.y, trans.transform.translation.z]
rotation = [trans.transform.rotation.x, trans.transform.rotation.y, trans.transform.rotation.z, trans.transform.rotation.w]
pose_value = translation + rotation
#second arg=0 (absolute movement), arg = '-r' (relative movement)
self.cmmnd_CartesianPosition(pose_value, 0)
def _place(self):
self.state = "place"
rospy.loginfo("Placing...")
place_pose = self.int_markers['place_pose']
# It seems positions and orientations are randomly required to
# be actual Point and Quaternion objects or lists/tuples. The
# least coherent API ever seen.
self.br.sendTransform(Point2list(place_pose.pose.position),
Quaternion2list(place_pose.pose.orientation),
rospy.Time.now(),
"place_pose",
self.robot.base)
self.robot.place(place_pose)
# For cubelets:
place_pose.pose.position.z += 0.042
# For YCB:
# place_pose.pose.position.z += 0.026
self.place_pose = place_pose
2017_Task4.py 文件源码
项目:cu-perception-manipulation-stack
作者: correlllab
项目源码
文件源码
阅读 22
收藏 0
点赞 0
评论 0
def goto(self, from_frame, to_frame):
'''
Calculates the transfrom from from_frame to to_frame
and commands the arm in cartesian mode.
'''
try:
trans = self.tfBuffer.lookup_transform(from_frame, to_frame, rospy.Time())
except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
rate.sleep()
# continue
translation = [trans.transform.translation.x, trans.transform.translation.y, trans.transform.translation.z]
rotation = [trans.transform.rotation.x, trans.transform.rotation.y, trans.transform.rotation.z, trans.transform.rotation.w]
pose_value = translation + rotation
#second arg=0 (absolute movement), arg = '-r' (relative movement)
self.cmmnd_CartesianPosition(pose_value, 0)
def search_USBlight(self):
if self.listen.frameExists("/j2n6a300_end_effector") and self.listen.frameExists("/root"):
# print ("we are in the search spoon fucntion")
self.listen.waitForTransform('/j2n6a300_end_effector','/root',rospy.Time(),rospy.Duration(100.0))
t = self.listen.getLatestCommonTime("/j2n6a300_end_effector","/root")
translation, quaternion = self.listen.lookupTransform("/j2n6a300_end_effector","/root",t)
matrix1=self.listen.fromTranslationRotation(translation,quaternion)
counter=0
rate=rospy.Rate(100)
while not self.obj_det:
counter = counter + 1
if(counter < 200):
cart_velocities = np.dot(matrix1[:3,:3],np.array([0,0,0.05])[np.newaxis].T) #change in y->x, z->y, x->z
cart_velocities = cart_velocities.T[0].tolist()
self.cmmnd_CartesianVelocity(cart_velocities + [0,0,0,1])
else:
cart_velocities = np.dot(matrix1[:3,:3],np.array([0,0,-0.05])[np.newaxis].T)
cart_velocities = cart_velocities.T[0].tolist()
self.cmmnd_CartesianVelocity(cart_velocities + [0,0,0,1])
rate.sleep()
if(counter >400):
counter=0
def search_straw(self):
if self.listen.frameExists("/j2n6a300_end_effector") and self.listen.frameExists("/root"):
# print ("we are in the search spoon fucntion")
self.listen.waitForTransform('/j2n6a300_end_effector','/root',rospy.Time(),rospy.Duration(100.0))
t = self.listen.getLatestCommonTime("/j2n6a300_end_effector","/root")
translation, quaternion = self.listen.lookupTransform("/j2n6a300_end_effector","/root",t)
matrix1=self.listen.fromTranslationRotation(translation,quaternion)
counter=0
rate=rospy.Rate(100)
while not self.obj_det:
counter = counter + 1
if(counter < 200):
cart_velocities = np.dot(matrix1[:3,:3],np.array([00,0,0.05])[np.newaxis].T) #change in y->x, z->y, x->z
cart_velocities = cart_velocities.T[0].tolist()
self.cmmnd_CartesianVelocity(cart_velocities + [0,0,0,1])
else:
cart_velocities = np.dot(matrix1[:3,:3],np.array([0.0,0,-.05])[np.newaxis].T)
cart_velocities = cart_velocities.T[0].tolist()
self.cmmnd_CartesianVelocity(cart_velocities + [0,0,0,1])
rate.sleep()
if(counter >400):
counter=0
def goto_plate(self):
if self.listen.frameExists("/root") and self.listen.frameExists("/plate_position"):
self.listen.waitForTransform('/root','/plate_position',rospy.Time(),rospy.Duration(100.0))
# t1 = self.listen.getLatestCommonTime("/root", "bowl_position")
translation, quaternion = self.listen.lookupTransform("/root", "/plate_position", rospy.Time(0))
translation = list(translation)
quaternion = list(quaternion)
#quaternion = [0.8678189045198146, 0.0003956789257977804, -0.4968799802988633, 0.0006910675928639343]
#quaternion = [0]*4
pose_value = translation + quaternion
#second arg=0 (absolute movement), arg = '-r' (relative movement)
self.cmmnd_CartesianPosition(pose_value, 0)
else:
print ("we DONT have the bowl frame")
def searchSpoon(self):
if self.listen.frameExists("/j2n6a300_end_effector") and self.listen.frameExists("/root"):
# print ("we are in the search spoon fucntion")
self.listen.waitForTransform('/j2n6a300_end_effector','/root',rospy.Time(),rospy.Duration(100.0))
t = self.listen.getLatestCommonTime("/j2n6a300_end_effector","/root")
translation, quaternion = self.listen.lookupTransform("/j2n6a300_end_effector","/root",t)
matrix1=self.listen.fromTranslationRotation(translation,quaternion)
counter=0
rate=rospy.Rate(100)
while not self.obj_det:
counter = counter + 1
if(counter < 200):
cart_velocities = np.dot(matrix1[:3,:3],np.array([0,0,0.05])[np.newaxis].T) #change in y->x, z->y, x->z
cart_velocities = cart_velocities.T[0].tolist()
self.cmmnd_CartesianVelocity(cart_velocities + [0,0,0,1])
else:
cart_velocities = np.dot(matrix1[:3,:3],np.array([0,0,-0.05])[np.newaxis].T)
cart_velocities = cart_velocities.T[0].tolist()
self.cmmnd_CartesianVelocity(cart_velocities + [0,0,0,1])
rate.sleep()
if(counter >400):
counter=0
def searchSpoon(self):
if self.listen.frameExists("/j2n6a300_end_effector") and self.listen.frameExists("/root"):
# print ("we are in the search spoon fucntion")
self.listen.waitForTransform('/j2n6a300_end_effector','/root',rospy.Time(),rospy.Duration(100.0))
t = self.listen.getLatestCommonTime("/j2n6a300_end_effector","/root")
translation, quaternion = self.listen.lookupTransform("/j2n6a300_end_effector","/root",t)
matrix1=self.listen.fromTranslationRotation(translation,quaternion)
counter=0
rate=rospy.Rate(100)
while not self.obj_det:
counter = counter + 1
if(counter < 200):
cart_velocities = np.dot(matrix1[:3,:3],np.array([0,0,0.05])[np.newaxis].T) #change in y->x, z->y, x->z
cart_velocities = cart_velocities.T[0].tolist()
self.cmmnd_CartesianVelocity(cart_velocities + [0,0,0,1])
else:
cart_velocities = np.dot(matrix1[:3,:3],np.array([0,0,-0.05])[np.newaxis].T)
cart_velocities = cart_velocities.T[0].tolist()
self.cmmnd_CartesianVelocity(cart_velocities + [0,0,0,1])
rate.sleep()
if(counter >400):
counter=0
def scoopSpoon(self):
while not (self.listen.frameExists("/j2n6a300_end_effector") and self.listen.frameExists("/j2n6a300_link_finger_tip_3")):
pass
print ("Publishing transform of figner wrt endEffector frame")
p.listen.waitForTransform('/j2n6a300_end_effector','/j2n6a300_link_finger_tip_3',rospy.Time(),rospy.Duration(100.0))
t = self.listen.getLatestCommonTime("/j2n6a300_end_effector", "/j2n6a300_link_finger_tip_3")
translation, quaternion = self.listen.lookupTransform("/j2n6a300_end_effector", "/j2n6a300_link_finger_tip_3", t)
# print (translation)
matrix2 = self.listen.fromTranslationRotation(translation, quaternion)
# print (matrix2)
# required_cartvelo = [0,0,0,0.1,0,0]
quaternion = pose_action_client.EulerXYZ2Quaternion([0.15,0,0]) # set rot angle HERE (deg)
matrix1 = self.listen.fromTranslationRotation([0,0,0], quaternion)
# print (matrix1)
matrix3 = np.dot(matrix2,matrix1)
scale, shear, rpy_angles, trans, perps = tf.transformations.decompose_matrix(matrix3)
trans = list(trans.tolist())
rpy_angles = list(rpy_angles)
# euler = pose_action_client.Quaternion2EulerXYZ(quat_1)
# pose = trans + rpy_angles
return pose
# return translation, quaternion
def searchdriver(self):
if self.listen.frameExists("/j2n6a300_end_effector") and self.listen.frameExists("/root"):
# print ("we are in the search spoon fucntion")
self.listen.waitForTransform('/j2n6a300_end_effector','/root',rospy.Time(),rospy.Duration(100.0))
t = self.listen.getLatestCommonTime("/j2n6a300_end_effector","/root")
translation, quaternion = self.listen.lookupTransform("/j2n6a300_end_effector","/root",t)
matrix1=self.listen.fromTranslationRotation(translation,quaternion)
counter=0
rate=rospy.Rate(100)
while not self.obj_det:
counter = counter + 1
if(counter < 200):
cart_velocities = np.dot(matrix1[:3,:3],np.array([0,0,0.05])[np.newaxis].T) #change in y->x, z->y, x->z
cart_velocities = cart_velocities.T[0].tolist()
self.cmmnd_CartesianVelocity(cart_velocities + [0,0,0,1])
else:
cart_velocities = np.dot(matrix1[:3,:3],np.array([0,0,-0.05])[np.newaxis].T)
cart_velocities = cart_velocities.T[0].tolist()
self.cmmnd_CartesianVelocity(cart_velocities + [0,0,0,1])
rate.sleep()
if(counter >400):
counter=0
action_database.py 文件源码
项目:cu-perception-manipulation-stack
作者: correlllab
项目源码
文件源码
阅读 15
收藏 0
点赞 0
评论 0
def create_pose_velocity_msg(self,cart_velo,transform=False):
if transform:
if self.listen.frameExists("/root") and self.listen.frameExists("/j2n6a300_end_effector"):
self.listen.waitForTransform('/root',"/j2n6a300_end_effector",rospy.Time(),rospy.Duration(100.0))
t = self.listen.getLatestCommonTime("/root", "/j2n6a300_end_effector")
translation, quaternion = self.listen.lookupTransform("/root", "/j2n6a300_end_effector", t)
transform = self.transformer.fromTranslationRotation(translation, quaternion)
transformed_vel = np.dot(transform,(cart_velo[0:3]+[1.0]))
print(transformed_vel)
cart_velo[0:3] = transformed_vel[0:3]
msg = PoseVelocity(
twist_linear_x=cart_velo[0],
twist_linear_y=cart_velo[1],
twist_linear_z=cart_velo[2],
twist_angular_x=cart_velo[3],
twist_angular_y=cart_velo[4],
twist_angular_z=cart_velo[5])
return msg
def __init__(self):
self.services = {'record': {'name': 'perception/record', 'type': Record}}
for service_name, service in self.services.items():
rospy.loginfo("Controller is waiting service {}...".format(service['name']))
rospy.wait_for_service(service['name'])
service['call'] = rospy.ServiceProxy(service['name'], service['type'])
# Buttons switches + LEDs
self.prefix = 'sensors'
self.button_leds_topics = ['button_leds/help', 'button_leds/pause']
self.buttons_topics = ['buttons/help', 'buttons/pause']
self.subscribers = [rospy.Subscriber('/'.join([self.prefix, topic]), Bool, lambda msg: self._cb_button_pressed(msg, topic)) for topic in self.buttons_topics]
self.publishers = {topic: rospy.Publisher('/'.join([self.prefix, topic]), Bool, queue_size=1) for topic in self.button_leds_topics}
self.button_leds_status = {topic: False for topic in self.button_leds_topics}
self.button_pressed = {topic: False for topic in self.buttons_topics}
self.last_press = {topic: rospy.Time(0) for topic in self.buttons_topics}
self.rospack = RosPack()
with open(join(self.rospack.get_path('apex_playground'), 'config', 'perception.json')) as f:
self.params = json.load(f)
def range_cb(self, rng):
self.ranges[rng.header.frame_id] = rng.range
try:
trans, _ = self.listener.lookupTransform(
self.frame_id, rng.header.frame_id, rospy.Time(0))
self.tag_pos[rng.header.frame_id] = np.array(trans[:3])
except:
return
if len(self.ranges.values()) == 6 and len(self.tag_pos.values()) == 6:
pos = self.find_xyz()
pos_3d = self.find_position_3d()
self.altitude_pub(pos[2])
self.publish_position(
pos, self.ps_pub, self.ps_cov_pub, self.cov)
self.publish_position(
pos_3d, self.ps_pub_3d, self.ps_cov_pub_3d, self.cov)
self.ranges = dict()
def callback_state(self, data):
for idx, cube in enumerate(data.name):
self.cubes_state.setdefault(cube, [0] * 3)
pose = self.cubes_state[cube]
cube_init = self.CubeMap[cube]["init"]
pose[0] = data.pose[idx].position.x + cube_init[0]
pose[1] = data.pose[idx].position.y + cube_init[1]
pose[2] = data.pose[idx].position.z + cube_init[2]
# def add_cube(self, name):
# p = PoseStamped()
# p.header.frame_id = ros_robot.get_planning_frame()
# p.header.stamp = rospy.Time.now()
#
# # p.pose = self._arm.get_random_pose().pose
# p.pose.position.x = -0.18
# p.pose.position.y = 0
# p.pose.position.z = 0.046
#
# q = quaternion_from_euler(0.0, 0.0, 0.0)
# p.pose.orientation = Quaternion(*q)
# ros_scene.add_box(name, p, (0.02, 0.02, 0.02))
#
# def remove_cube(self, name):
# ros_scene.remove_world_object(name)
def send_transform(self, translation, rotation, time, child, parent):
"""
:param translation: the translation of the transformation as a tuple (x, y, z)
:param rotation: the rotation of the transformation as a tuple (x, y, z, w)
:param time: the time of the transformation, as a rospy.Time()
:param child: child frame in tf, string
:param parent: parent frame in tf, string
Broadcast the transformation from tf frame child to parent on ROS topic ``"/tf"``.
"""
t = TransformStamped()
t.header.frame_id = parent
t.header.stamp = time
t.child_frame_id = child
t.transform.translation.x = translation[0]
t.transform.translation.y = translation[1]
t.transform.translation.z = translation[2]
t.transform.rotation.x = rotation[0]
t.transform.rotation.y = rotation[1]
t.transform.rotation.z = rotation[2]
t.transform.rotation.w = rotation[3]
self.send_transform_message(t)
def _publish_diagnostics(self):
# alias
diag_status = self._diag_array.status[0]
# fill diagnostics array
battery_voltage = self._cozmo.battery_voltage
diag_status.values[0].value = '{:.2f} V'.format(battery_voltage)
diag_status.values[1].value = '{:.2f} deg'.format(self._cozmo.head_angle.degrees)
diag_status.values[2].value = '{:.2f} mm'.format(self._cozmo.lift_height.distance_mm)
if battery_voltage > 3.5:
diag_status.level = DiagnosticStatus.OK
diag_status.message = 'Everything OK!'
elif battery_voltage > 3.4:
diag_status.level = DiagnosticStatus.WARN
diag_status.message = 'Battery low! Go charge soon!'
else:
diag_status.level = DiagnosticStatus.ERROR
diag_status.message = 'Battery very low! Cozmo will power off soon!'
# update message stamp and publish
self._diag_array.header.stamp = rospy.Time.now()
self._diag_pub.publish(self._diag_array)
def _publish_joint_state(self):
"""
Publish joint states as JointStates.
"""
# only publish if we have a subscriber
if self._joint_state_pub.get_num_connections() == 0:
return
js = JointState()
js.header.stamp = rospy.Time.now()
js.header.frame_id = 'cozmo'
js.name = ['head', 'lift']
js.position = [self._cozmo.head_angle.radians,
self._cozmo.lift_height.distance_mm * 0.001]
js.velocity = [0.0, 0.0]
js.effort = [0.0, 0.0]
self._joint_state_pub.publish(js)
def _publish_battery(self):
"""
Publish battery as BatteryState message.
"""
# only publish if we have a subscriber
if self._battery_pub.get_num_connections() == 0:
return
battery = BatteryState()
battery.header.stamp = rospy.Time.now()
battery.voltage = self._cozmo.battery_voltage
battery.present = True
if self._cozmo.is_on_charger: # is_charging always return False
battery.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_CHARGING
else:
battery.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_NOT_CHARGING
self._battery_pub.publish(battery)
def __init__(self,dis=0.0):
State.__init__(self, outcomes=['succeeded','preempted','aborted'],input_keys=['target'])
rospy.on_shutdown(self.shutdown)
self.test_distance = target.y-dis
self.rate = 200
self.r = rospy.Rate(self.rate)
self.speed = rospy.get_param('~speed',0.08)
self.tolerance = rospy.get_param('~tolerance', 0.01)
self.cmd_vel = rospy.Publisher('/mobile_base_controller/smooth_cmd_vel',Twist,queue_size=5)
self.base_frame = rospy.get_param('~base_frame', '/base_link')
self.odom_frame = rospy.get_param('~odom_frame', '/odom')
self.tf_listener = tf.TransformListener()
rospy.sleep(1)
self.tf_listener.waitForTransform(self.odom_frame,self.base_frame,rospy.Time(),rospy.Duration(60.0))
#define a bianliang
self.flag = rospy.get_param('~flag', True)
rospy.loginfo(self.test_distance)
#tf get position
#publish cmd_vel