• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    迪恩网络公众号

Python transformations.quaternion_from_euler函数代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Python中tf.transformations.quaternion_from_euler函数的典型用法代码示例。如果您正苦于以下问题:Python quaternion_from_euler函数的具体用法?Python quaternion_from_euler怎么用?Python quaternion_from_euler使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。



在下文中一共展示了quaternion_from_euler函数的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Python代码示例。

示例1: execute

    def execute(self, userdata):
        rospy.loginfo("Creating goal to put robot in front of handle")
        pose_handle = Pose()
        if userdata.door_detection_data_in_base_link.handle_side == "left" or userdata.door_detection_data_in_base_link.handle_side == "right":  # closed door
            (r, p, theta) = euler_from_quaternion(( userdata.door_detection_data_in_base_link.door_handle.pose.orientation.x,
                                                    userdata.door_detection_data_in_base_link.door_handle.pose.orientation.y,
                                                    userdata.door_detection_data_in_base_link.door_handle.pose.orientation.z,
                                                    userdata.door_detection_data_in_base_link.door_handle.pose.orientation.w))  # gives back r, p, y
            theta += 3.1416  # the orientation of the door is looking towards the robot, we need the inverse
            pose_handle.orientation = Quaternion(*quaternion_from_euler(0, 0, theta))  # orientation to look parallel to the door
            pose_handle.position.x = userdata.door_detection_data_in_base_link.door_handle.pose.position.x - 0.4  # to align the shoulder with the handle
            pose_handle.position.y = userdata.door_detection_data_in_base_link.door_handle.pose.position.y + 0.2  # refer to upper comment
            pose_handle.position.z = 0.0  # we dont need the Z for moving
            userdata.door_handle_pose_goal = pose_handle
        else:  # open door
            # if it's open... just cross it?
            (r, p, theta) = euler_from_quaternion(( userdata.door_detection_data_in_base_link.door_position.pose.orientation.x,
                                                    userdata.door_detection_data_in_base_link.door_position.pose.orientation.y,
                                                    userdata.door_detection_data_in_base_link.door_position.pose.orientation.z,
                                                    userdata.door_detection_data_in_base_link.door_position.pose.orientation.w))  # gives back r, p, y
            theta += 3.1416  # the orientation of the door is looking towards the robot, we need the inverse
            pose_handle.orientation = Quaternion(*quaternion_from_euler(0, 0, theta))  # orientation to look parallel to the door
            pose_handle.position.x = userdata.door_detection_data_in_base_link.door_position.pose.position.x + 1.0 # enter the door
            pose_handle.position.y = userdata.door_detection_data_in_base_link.door_position.pose.position.y
            userdata.door_handle_pose_goal = pose_handle


        rospy.loginfo("Move base goal: \n" + str(pose_handle))

        return succeeded
开发者ID:jypuigbo,项目名称:robocup-code,代码行数:30,代码来源:approach_door.py


示例2: create_axes_markers

	def create_axes_markers(self, msg):
		x_marker = Marker()
		x_marker.type = Marker.CYLINDER
		x_marker.scale.x = msg.scale * 0.05
		x_marker.scale.y = msg.scale * 0.05
		x_marker.scale.z = msg.scale * 0.25
		x_marker.color.a = 1.0
		y_marker = copy.deepcopy(x_marker)
		z_marker = copy.deepcopy(x_marker)
		
		x_marker.pose.position.x += x_marker.scale.z * 0.5
		y_marker.pose.position.y += y_marker.scale.z * 0.5
		z_marker.pose.position.z += z_marker.scale.z * 0.5
		x_marker.color.r = 1.0
		y_marker.color.g = 1.0
		z_marker.color.b = 1.0
		
		x_quat = tr.quaternion_from_euler(0, np.deg2rad(90), 0)
		x_marker.pose.orientation.x = x_quat[0]
		x_marker.pose.orientation.y = x_quat[1]
		x_marker.pose.orientation.z = x_quat[2]
		x_marker.pose.orientation.w = x_quat[3]
		y_quat = tr.quaternion_from_euler(np.deg2rad(-90), 0, 0)
		y_marker.pose.orientation.x = y_quat[0]
		y_marker.pose.orientation.y = y_quat[1]
		y_marker.pose.orientation.z = y_quat[2]
		y_marker.pose.orientation.w = y_quat[3]
		
		y_marker.id = 1
		z_marker.id = 2
		
		return x_marker, y_marker, z_marker
开发者ID:GKIFreiburg,项目名称:gki_pose_creator,代码行数:32,代码来源:pose_creator.py


示例3: calculate_mk2_ik

    def calculate_mk2_ik(self):

        #goal_point = Point(0.298, -0.249, -0.890) home position

        goal_pose = Pose()

        # Goto position 1
        goal_point = Point(0.298, -0.249, -0.890)
        #goal_point = Point(0.298, -0.5, -1.0)
        goal_pose.position = goal_point
        quat = quaternion_from_euler(0.0, 0.0, 0.0) # roll, pitch, yaw
        goal_pose.orientation = Quaternion(*quat.tolist())
        moveit_goal = self.create_move_group_pose_goal(goal_pose, group="manipulator", end_link_name="link_7", plan_only=False)
        rospy.loginfo("Sending goal...")
        self.moveit_ac.send_goal(moveit_goal)
        rospy.loginfo("Waiting for result...")
        self.moveit_ac.wait_for_result(rospy.Duration(10.0))
        moveit_result = self.moveit_ac.get_result()

        # Goto position 2
        goal_point = Point(0.305, -0.249, -1.05)
        goal_pose.position = goal_point
        quat = quaternion_from_euler(0.0, 0.0, 0.0) # roll, pitch, yaw
        goal_pose.orientation = Quaternion(*quat.tolist())
        moveit_goal = self.create_move_group_pose_goal(goal_pose, group="manipulator", end_link_name="link_7", plan_only=False)
        rospy.loginfo("Sending goal...")
        self.moveit_ac.send_goal(moveit_goal)
        rospy.loginfo("Waiting for result...")
        self.moveit_ac.wait_for_result(rospy.Duration(10.0))
        moveit_result = self.moveit_ac.get_result()
开发者ID:MorS25,项目名称:dasl-ros-pkg,代码行数:30,代码来源:move_mk2_ik.py


示例4: listen_imu

    def listen_imu(self, imu):
        # node can sleep for 16 out of 20 ms without dropping any messages
        # rospy.sleep(16.0/1000.0)
        # self.NN += 1
        # if rospy.get_time() - self.t0 > 1:
        #     rospy.logfatal("Published {} messages in 1 s, {} Hz".format(self.NN, self.NN))
        #     self.NN, self.t0 = 0, rospy.get_time()
        current_orientation = np.array([imu.orientation.x,
                                        imu.orientation.y,
                                        imu.orientation.z,
                                        imu.orientation.w])
        current_angular_speed = np.array([imu.angular_velocity.x,
                                          imu.angular_velocity.y,
                                          0.0])
                                          
        current_rpy = list(transformations.euler_from_quaternion(current_orientation))
        # No care about yaw, but must be close to zero
        current_rpy[-1] = 0
        desired_rpy = list(transformations.euler_from_quaternion(self.desired_orientation))
        desired_rpy[-1] = 0
        corrected = self.rp_pid(desired_rpy, current_rpy, current_angular_speed)
        corrected_orientation = transformations.quaternion_multiply(
            transformations.quaternion_from_euler(*corrected),
            transformations.quaternion_from_euler(*current_rpy))

        setpoint_pose = Pose(self.desired_position, corrected_orientation)

        servo_angles = self.platform.ik(setpoint_pose)
        rospy.logdebug(
            "Servo angles (deg): {}".format(np.rad2deg(servo_angles)))
        self.publish_servo_angles(servo_angles)
开发者ID:rcr2f,项目名称:advancedrobotics,代码行数:31,代码来源:stewart.py


示例5: follow_pose_with_admitance_by_ft

    def follow_pose_with_admitance_by_ft(self):
        fx, fy, fz = self.get_force_movement()
        rospy.loginfo(
            "fx, fy, fz: " + str((round(fx, 3), round(fy, 3), round(fz, 3))))
        ps = Pose()
        if abs(fx) > self.fx_deadband:
            ps.position.x = (fx * self.fx_scaling) * self.frame_fixer.fx
        if abs(fy) > self.fy_deadband:
            ps.position.y = (fy * self.fy_scaling) * self.frame_fixer.fy
        if abs(fz) > self.fz_deadband:
            ps.position.z = (fz * self.fz_scaling) * self.frame_fixer.fz

        tx, ty, tz = self.get_torque_movement()
        rospy.loginfo(
            "tx, ty, tz: " + str((round(tx, 3), round(ty, 3), round(tz, 3))))

        roll = pitch = yaw = 0.0
        if abs(tx) > self.tx_deadband:
            roll += (tx * self.tx_scaling) * self.frame_fixer.tx
        if abs(ty) > self.ty_deadband:
            pitch += (ty * self.ty_scaling) * self.frame_fixer.ty
        if abs(tz) > self.tz_deadband:
            yaw += (tz * self.tz_scaling) * self.frame_fixer.tz

        q = quaternion_from_euler(roll, pitch, yaw)
        ps.orientation = Quaternion(*q)

        o = self.last_pose_to_follow.pose.orientation
        r_lastp, p_lastp, y_lastp = euler_from_quaternion([o.x, o.y, o.z, o.w])

        final_roll = r_lastp + roll
        final_pitch = p_lastp + pitch
        final_yaw = y_lastp + yaw
        self.current_pose.pose.orientation = Quaternion(*quaternion_from_euler(final_roll,
                                                                               final_pitch,
                                                                               final_yaw))

        self.current_pose.pose.position.x = self.last_pose_to_follow.pose.position.x + \
            ps.position.x
        self.current_pose.pose.position.y = self.last_pose_to_follow.pose.position.y + \
            ps.position.y
        self.current_pose.pose.position.z = self.last_pose_to_follow.pose.position.z + \
            ps.position.z

        self.current_pose.pose.position.x = self.sanitize(self.current_pose.pose.position.x,
                                                          self.min_x,
                                                          self.max_x)
        self.current_pose.pose.position.y = self.sanitize(self.current_pose.pose.position.y,
                                                          self.min_y,
                                                          self.max_y)
        self.current_pose.pose.position.z = self.sanitize(self.current_pose.pose.position.z,
                                                          self.min_z,
                                                          self.max_z)
        self.current_pose.header.stamp = rospy.Time.now()
        if self.send_goals:  # send MODIFIED GOALS
            self.pose_pub.publish(self.current_pose)
        else:
            self.last_pose_to_follow.header.stamp = rospy.Time.now()
            self.pose_pub.publish(self.last_pose_to_follow)
        self.debug_pose_pub.publish(self.current_pose)
开发者ID:pal-robotics,项目名称:move_by_ft_wrist,代码行数:60,代码来源:arm_by_ft_wrist_hardcoded_frame.py


示例6: in_odom_callback

    def in_odom_callback(self, in_odom_msg):
        q = np.array([in_odom_msg.pose.pose.orientation.x,
                      in_odom_msg.pose.pose.orientation.y,
                      in_odom_msg.pose.pose.orientation.z,
                      in_odom_msg.pose.pose.orientation.w])
        p = np.array([in_odom_msg.pose.pose.position.x,
                      in_odom_msg.pose.pose.position.y,
                      in_odom_msg.pose.pose.position.z])

        e = tfs.euler_from_quaternion(q, 'rzyx')
        wqb = tfs.quaternion_from_euler(e[0], e[1], e[2], 'rzyx')
        wqc = tfs.quaternion_from_euler(e[0],  0.0,  0.0, 'rzyx')

        #### odom ####
        odom_msg = in_odom_msg
        assert(in_odom_msg.header.frame_id == self.frame_id_in)
        odom_msg.header.frame_id = self.frame_id_out
        odom_msg.child_frame_id = ""
        self.out_odom_pub.publish(odom_msg)

        #### tf ####
        if self.broadcast_tf and self.tf_pub_flag:
            self.tf_pub_flag = False
            if not self.frame_id_in == self.frame_id_out:
                br.sendTransform((0.0, 0.0, 0.0),
                                 tfs.quaternion_from_euler(0.0, 0.0, 0.0, 'rzyx'),
                                 odom_msg.header.stamp,
                                 self.frame_id_in,
                                 self.frame_id_out)

            if not self.world_frame_id == self.frame_id_out:
                br.sendTransform((0.0, 0.0, 0.0),
                                 tfs.quaternion_from_euler(0.0, 0.0, 0.0, 'rzyx'),
                                 odom_msg.header.stamp,
                                 self.world_frame_id,
                                 self.frame_id_out)

            br.sendTransform((p[0], p[1], p[2]),
                             wqb,
                             odom_msg.header.stamp,
                             self.body_frame_id,
                             self.world_frame_id)

            br.sendTransform(((p[0], p[1], p[2])),
                             wqc,
                             odom_msg.header.stamp,
                             self.intermediate_frame_id,
                             self.world_frame_id)
        #### path ####
        pose = PoseStamped()
        pose.header = odom_msg.header
        pose.pose.position.x = p[0]
        pose.pose.position.y = p[1]
        pose.pose.position.z = p[2]
        pose.pose.orientation.x = q[0]
        pose.pose.orientation.y = q[1]
        pose.pose.orientation.z = q[2]
        pose.pose.orientation.w = q[3]

        self.path.append(pose)
开发者ID:groundmelon,项目名称:uav_utils,代码行数:60,代码来源:tf_assist.py


示例7: publish_gaussians

def publish_gaussians():
    # Use TF to calculate relative area definitions

    # Gaussians depending on edges of furniture and object/furniture positions
    br.sendTransform((0.205067,-0.0136504 , 0), 
                     quaternion_from_euler(0,0,0),
                     rospy.Time.now(),
                     "table_gauss_mean",
                     "plate_edge")

    br.sendTransform((0.366749, 0.088665, 0), 
                     quaternion_from_euler(0,0,0),
                     rospy.Time.now(),
                     "drawer55_gauss_mean",
                     "drawer55_edge")

    br.sendTransform((0.1462250, 0.1426716 , 0), 
                     quaternion_from_euler(0,0,0),
                     rospy.Time.now(),
                     "stove_gauss_mean",
                     "placemat_edge")
    # TODO Here, i manually set this to minus, but this should actually be calculated 
    # based on the location of the door of the cupboard
    br.sendTransform((0.42658, -0.107175 , 0), 
                     quaternion_from_euler(0,0,0),
                     rospy.Time.now(),
                     "cupboard_gauss_mean",
                     "drawer115_edge")
开发者ID:hcaigroup,项目名称:morse_ros,代码行数:28,代码来源:label_kinect_ias_data.py


示例8: publish_gaussians

def publish_gaussians():
    # Use TF to calculate relative area definitions
    br.sendTransform((0.9718, 2.6195, 0),
                     quaternion_from_euler(0,0,3.1415) ,
                     rospy.Time.now(),
                     "drawer_gaussian_mean",
                     "map")

    br.sendTransform((2.17495, 2.6431, 0),
                     quaternion_from_euler(0,0,0) ,
                     rospy.Time.now(),
                     "table_gaussian_mean",
                     "map")
    
    br.sendTransform((0.76371, 2.17398, 0),
                     quaternion_from_euler(0,0,3.1415) ,
                     rospy.Time.now(),
                     "stove_gaussian_mean",
                     "map")

    br.sendTransform((0.77843, 3.21516, 0),
                     quaternion_from_euler(0,0,3.1415) ,
                     rospy.Time.now(),
                     "cupboard_gaussian_mean",
                     "map")
开发者ID:hcaigroup,项目名称:morse_ros,代码行数:25,代码来源:write_true_static_obj_positions.py


示例9: publish_gaussians

def publish_gaussians():
    
    # Gaussians depending on edges of furniture and object/furniture positions
    br.sendTransform((0.205067,-0.0136504 , 0), 
                     quaternion_from_euler(0,0,0),
                     rospy.Time.now(),
                     "table_gauss_mean",
                     "plate_edge")

    br.sendTransform((0.366749, 0.088665, 0), 
                     quaternion_from_euler(0,0,0),
                     rospy.Time.now(),
                     "drawer_gauss_mean",
                     "drawer_edge")

    br.sendTransform((0.1462250, 0.1426716 , 0), 
                     quaternion_from_euler(0,0,0),
                     rospy.Time.now(),
                     "stove_gauss_mean",
                     "placemat_edge")

    br.sendTransform((0.42658, 0.107175 , 0), 
                     quaternion_from_euler(0,0,0),
                     rospy.Time.now(),
                     "cupboard_gauss_mean",
                     "cupboard_edge")
开发者ID:hcaigroup,项目名称:morse_ros,代码行数:26,代码来源:convert_ias_msek.py


示例10: test_avg_tag_localization

    def test_avg_tag_localization(self):
        self.setup()

        # Publish a matching apriltag observation (from the static publisher in the test file)
        msg_tag = AprilTags()
        tag_100 = TagDetection()
        tag_100.id = 100
        trans = tag_100.transform.translation
        rot = tag_100.transform.rotation
        (trans.x, trans.y, trans.z) = (1,-1,0)
        (rot.x, rot.y, rot.z, rot.w) = (0,0,1,0)
        msg_tag.detections.append(tag_100)
        tag_101 = TagDetection()
        tag_101.id = 101
        trans = tag_101.transform.translation
        rot = tag_101.transform.rotation
        (trans.x, trans.y, trans.z) = (1,0,0)
        (rot.x, rot.y, rot.z, rot.w) = tr.quaternion_from_euler(0,0,np.pi/2)
        msg_tag.detections.append(tag_101)
        self.pub_tag.publish(msg_tag)

        # Wait for the node to publish a robot transform
        tfbuf = tf2_ros.Buffer()
        tfl = tf2_ros.TransformListener(tfbuf)
        try:
            Tr_w = tfbuf.lookup_transform("world", "duckiebot", rospy.Time(), rospy.Duration(5))
        except(tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
            self.assertFalse(True, "Test timed out waiting for the transform to be broadcast.")

        trans = (Tr_w.transform.translation.x, Tr_w.transform.translation.y, Tr_w.transform.translation.z)
        rot = (Tr_w.transform.rotation.x, Tr_w.transform.rotation.y, Tr_w.transform.rotation.z, Tr_w.transform.rotation.w)

        numpy.testing.assert_array_almost_equal(trans, (2, 0.5, 0))
        numpy.testing.assert_array_almost_equal(rot, tr.quaternion_from_euler(0,0,3*np.pi/4))
开发者ID:Duckietown-NCTU,项目名称:Software,代码行数:34,代码来源:localization_tester_node.py


示例11: plan_path_cb

 def plan_path_cb(self, req):
     rospy.loginfo('Received [plan_path] request.')
     p1, p2 = (req.start.x, req.start.y), (req.end.x, req.end.y)
     path = self.planner.plan(p1, p2)
     try:
         self.visualizer.visualize(self.planner.graph)
     except:
         # If a student code in RandomizedRoadmapPlanner does not have a
         # graph object, or it is not compatible with GraphVisualizer,
         # silently skip visualization
         pass
     if not len(path):
         rospy.logwarn(self.__class__.__name__ + ': Failed to find a path.')
         return None
     # Convert the path output by the planner to ROS message
     path_msg = Path()
     path_msg.header.frame_id = 'odom'
     path_msg.header.stamp = rospy.Time.now()
     q_start = quaternion_from_euler(0, 0, req.start.theta)
     q_end = quaternion_from_euler(0, 0, req.end.theta)
     for pt in path:
         p = PoseStamped()
         p.pose.position = Point(pt[0], pt[1], 0.0)
         p.pose.orientation = Quaternion(*q_start)
         path_msg.poses.append(p)
     # The randomized roadmap planner does not consider orientation, thus
     # we manually add a final path segment which rotates the robot to the
     # desired orientation
     last = path_msg.poses[-1]
     last.pose.orientation = Quaternion(*q_end)
     path_msg.poses.append(last)
     return path_msg
开发者ID:minhnh,项目名称:hbrs_courses,代码行数:32,代码来源:path_planner.py


示例12: drawer

    def drawer(self, point):
        #Prepare
        GRIPPER_OPEN = .08
        GRIPPER_CLOSE = .003
        MAX_HANDLE_SIZE = .03
        linear_movement = self.behaviors.movement
        gripper = self.robot.left_gripper

        #gripper.open(True, position=GRIPPER_OPEN)
        #linear_movement.gripper_open()
        self.open_gripper()
        linear_movement.move_absolute((self.start_location_drawer[0], 
            #np.matrix(tr.quaternion_from_euler(np.radians(90.), 0, 0))), 
            np.matrix(tr.quaternion_from_euler(np.radians(90.), 0, 0))), 
            stop='pressure_accel', pressure=1000)

        #Reach
        success, reason, touchloc_bl = self.behaviors.reach(point, 300, #np.matrix([0.0, 0, 0]).T, 
                             reach_direction=np.matrix([0.1, 0, 0]).T, 
                             orientation=np.matrix(tr.quaternion_from_euler(np.radians(90.), 0, 0)))

        #Error recovery
        if not success:
            error_msg = 'Reach failed due to "%s"' % reason
            rospy.loginfo(error_msg)
            rospy.loginfo('Failure recovery: moving back')
            try:
                linear_movement.move_relative_gripper(np.matrix([-.25,0,0]).T, stop='pressure_accel', pressure=300)
            except lm.RobotSafetyError, e:
                rospy.loginfo('robot safety error %s' % str(e))
            self.behaviors.movement.move_absolute(self.start_location_drawer, stop='accel', pressure=300)
            return False, 'reach failed', point
开发者ID:rgleichman,项目名称:berkeley_demos,代码行数:32,代码来源:application_behaviors.py


示例13: pre_place_pose

        def pre_place_pose(ud):
            robot.torso.high()

            goal_pose = PoseStamped()
            goal_pose.header.stamp = rospy.Time.now()
            goal_pose.header.frame_id = 'map'
            goal_pose.pose.position.x = -4.55638664735
            goal_pose.pose.position.y = 4.99959353359
            goal_pose.pose.orientation = Quaternion(*quaternion_from_euler(0, 0, 0.395625992))
            ControlToPose(robot, goal_pose, ControlParameters(0.5, 1.0, 0.3, 0.3, 0.3, 0.01, 0.1)).execute({})

            robot.torso.wait_for_motion_done()
            robot.speech.speak('I am now raising my arm')

            arm.resolve()._send_joint_trajectory([[-1.1, 0.044, 0.80, 0.297, 0.934, -0.95, 0.4]],
                                                 timeout=rospy.Duration(0))
            arm.resolve().wait_for_motion_done()

            goal_pose.pose.orientation = Quaternion(*quaternion_from_euler(0, 0, 1.9656259917))
            ControlToPose(robot, goal_pose, ControlParameters(0.5, 1.0, 0.3, 0.3, 0.3, 0.01, 0.1)).execute({})

            arm.resolve().wait_for_motion_done()

            arm.resolve()._send_joint_trajectory([[-0.92, 0.044, 0.80, 0.497, 0.934, -0.95, 0.4]],
                                                 timeout=rospy.Duration(0))
            arm.resolve().wait_for_motion_done()
            rospy.sleep(1.0)

            return 'done'
开发者ID:tue-robotics,项目名称:tue_robocup,代码行数:29,代码来源:custom_place.py


示例14: __init__

    def __init__ (self):
        rospy.init_node('move_base', anonymous=True)
        
        rospy.on_shutdown(self.shutdown)
        quaternions = list()

        square_size = 1.0 # meters
        
        # The first two target orientations are 90 degrees (horizontal pointing left)
        q_turn_angle = quaternion_from_euler(0, 0, pi / 2, axes='sxyz')
        q = Quaternion(*q_turn_angle)
        # Append the first turn
        rospy.loginfo(q)
        quaternions.append(q)
        # Append the second turn
        quaternions.append(q)

        # The second two target orientations are 270 degrees (horizontal point right)
        q_turn_angle = quaternion_from_euler(0, 0, 3 * pi / 2, axes='sxyz')
        q = Quaternion(*q_turn_angle)
        # Append the third turn
        quaternions.append(q)
        # Append hte fourth turn
        quaternions.append(q)
        
        waypoints = list()
        waypoints.append(Pose(Point(square_size, 0.0, 0.0), quaternions[0]))
        waypoints.append(Pose(Point(square_size, square_size, 0.0), quaternions[1]))
        waypoints.append(Pose(Point(0.0, square_size, 0.0), quaternions[2]))
        waypoints.append(Pose(Point(0.0, 0.0, 0.0), quaternions[3]))
        
        # Initialize the visualization markers for RViz
        self.init_markers()
        
        # Set a visualization marker at each waypoint        
        for waypoint in waypoints:           
            p = Point()
            p = waypoint.position
            self.markers.points.append(p)
            
        # Publisher to manually control the robot (e.g. to stop it)
        self.cmd_vel_pub = rospy.Publisher('cmd_vel', Twist)
        
        # Subscribe to the move_base action server
        self.move_base = actionlib.SimpleActionClient("move_base", MoveBaseAction)
        
        rospy.loginfo("Waiting for move_base action server...")
        self.move_base.wait_for_server(rospy.Duration(60))
        rospy.loginfo("Connected to move base server")
            
        rospy.loginfo("Starting navigation test")
        
        for i in range(4):
            self.marker_pub.publish(self.markers)
            self.goal = MoveBaseGoal()
            self.goal.target_pose.pose = waypoints[i]
            self.goal.target_pose.header.frame_id = 'map'
            self.goal.target_pose.header.stamp = rospy.Time.now()
            self.move()
开发者ID:faustino-lukolo,项目名称:Cpts483-Intro-To-Robotics,代码行数:59,代码来源:move_base.py


示例15: _publish_tf

    def _publish_tf(self, stamp):
        # check that we know which frames we need to publish from
        if self.map is None or self.base_frame is None:
            rospy.loginfo('Not publishing until map and odometry frames found')
            return

        # calculate the mean position
        x = np.mean([p.x for p in self.particles])
        y = np.mean([p.y for p in self.particles])
        theta = np.mean([p.theta for p in self.particles]) #TODO - wraparound

        # map to base_link
        map2base_frame = PyKDL.Frame(
            PyKDL.Rotation.Quaternion(*transformations.quaternion_from_euler(0, 0, theta)),
            PyKDL.Vector(x,y,0)
        )

        odom2base_frame = tf2_kdl.transform_to_kdl(self.tf_buffer.lookup_transform(
            target_frame=self.odom_frame,
            source_frame=self.base_frame,
            time=stamp,
            timeout=rospy.Duration(4.0)
        ))

        # derive frame according to REP105
        map2odom = map2base_frame * odom2base_frame.Inverse() 

        br = tf2_ros.TransformBroadcaster()
        t = TransformStamped()

        t.header.stamp = stamp
        t.header.frame_id = self.map.frame
        t.child_frame_id = self.odom_frame
        t.transform.translation = Vector3(*map2odom.p)
        t.transform.rotation = Quaternion(*map2odom.M.GetQuaternion())
        br.sendTransform(t)


        # for Debugging
        if False:
            t.header.stamp = stamp
            t.header.frame_id = self.map.frame
            t.child_frame_id = self.base_frame+"_old"
            t.transform.translation = Vector3(*map2base_frame.p)
            t.transform.rotation = Quaternion(*map2base_frame.M.GetQuaternion())
            br.sendTransform(t)

        if self.publish_cloud:
            msg = PoseArray(
                header=Header(stamp=stamp, frame_id=self.map.frame),
                poses=[
                    Pose(
                        position=Point(p.x, p.y, 0),
                        orientation=Quaternion(*transformations.quaternion_from_euler(0, 0, p.theta))
                    )
                    for p in self.particles
                ]
            )
            self.pose_array.publish(msg)
开发者ID:g41903,项目名称:MIT-RACECAR,代码行数:59,代码来源:localizer.py


示例16: generate_grasp_poses

    def generate_grasp_poses(self, object_pose):
        # Compute all the points of the sphere with step X
        # http://math.stackexchange.com/questions/264686/how-to-find-the-3d-coordinates-on-a-celestial-spheres-surface
        radius = self._grasp_desired_distance
        ori_x = 0.0
        ori_y = 0.0
        ori_z = 0.0
        sphere_poses = []
        rotated_q = quaternion_from_euler(0.0, 0.0, math.radians(180))

        yaw_qtty = int((self._max_degrees_yaw - self._min_degrees_yaw) / self._step_degrees_yaw)  # NOQA
        pitch_qtty = int((self._max_degrees_pitch - self._min_degrees_pitch) / self._step_degrees_pitch)  # NOQA
        info_str = "Creating poses with parameters:\n" + \
            "Radius: " + str(radius) + "\n" \
            "Yaw from: " + str(self._min_degrees_yaw) + \
            " to " + str(self._max_degrees_yaw) + " with step " + \
            str(self._step_degrees_yaw) + " degrees.\n" + \
            "Pitch from: " + str(self._min_degrees_pitch) + \
            " to " + str(self._max_degrees_pitch) + \
            " with step " + str(self._step_degrees_pitch) + " degrees.\n" + \
            "Total: " + str(yaw_qtty) + " yaw * " + str(pitch_qtty) + \
            " pitch = " + str(yaw_qtty * pitch_qtty) + " grap poses."
        rospy.loginfo(info_str)

        # altitude is yaw
        for altitude in range(self._min_degrees_yaw, self._max_degrees_yaw, self._step_degrees_yaw):  # NOQA
            altitude = math.radians(altitude)
            # azimuth is pitch
            for azimuth in range(self._min_degrees_pitch, self._max_degrees_pitch, self._step_degrees_pitch):  # NOQA
                azimuth = math.radians(azimuth)
                # This gets all the positions
                x = ori_x + radius * math.cos(azimuth) * math.cos(altitude)
                y = ori_y + radius * math.sin(altitude)
                z = ori_z + radius * math.sin(azimuth) * math.cos(altitude)
                # this gets all the vectors pointing outside of the center
                # quaternion as x y z w
                q = quaternion_from_vectors([radius, 0.0, 0.0], [x, y, z])
                # Cannot compute so the vectors are parallel
                if q is None:
                    # with this we add the missing arrow
                    q = rotated_q
                # We invert the orientations to look inwards by multiplying
                # with a quaternion 180deg rotation on yaw
                q = quaternion_multiply(q, rotated_q)

                # We actually want roll to be always 0.0 so we approach
                # the object with the gripper always horizontal
                # this rotation can be tuned with the dynamic params
                # multiplying later on
                roll, pitch, yaw = euler_from_quaternion(q)
                q = quaternion_from_euler(math.radians(0.0), pitch, yaw)

                x += object_pose.pose.position.x
                y += object_pose.pose.position.y
                z += object_pose.pose.position.z
                current_pose = Pose(
                    Point(x, y, z), Quaternion(*q))
                sphere_poses.append(current_pose)
        return sphere_poses
开发者ID:rizasif,项目名称:Robotics_intro,代码行数:59,代码来源:grasps_server.py


示例17: process_touch_pose

        def process_touch_pose(ud):
            ######################################################################### 
            # tranformation logic for manipulation
            # put touch pose in torso frame
            frame_B_touch = util.pose_msg_to_mat(ud.touch_click_pose)
            torso_B_frame = self.get_transform("torso_lift_link", 
                                               ud.touch_click_pose.header.frame_id)
            if torso_B_frame is None:
                return 'tf_failure'
            torso_B_touch_bad = torso_B_frame * frame_B_touch

            # rotate pixel23d the right way
            t_pos, t_quat = util.pose_mat_to_pq(torso_B_touch_bad)
            # rotate so x axis pointing out
            quat_flip_rot = tf_trans.quaternion_from_euler(0.0, np.pi/2.0, 0.0)
            quat_flipped = tf_trans.quaternion_multiply(t_quat, quat_flip_rot)
            # rotate around x axis so the y axis is flat
            mat_flipped = tf_trans.quaternion_matrix(quat_flipped)
            rot_angle = np.arctan(-mat_flipped[2,1] / mat_flipped[2,2])
            quat_ortho_rot = tf_trans.quaternion_from_euler(rot_angle + np.pi, 0.0, 0.0)
            quat_flipped_ortho = tf_trans.quaternion_multiply(quat_flipped, quat_ortho_rot)

            torso_B_touch = util.pose_pq_to_mat(t_pos, quat_flipped_ortho)

            # offset the touch location by the approach tranform
            appr_B_tool = self.get_transform(self.tool_approach_frame, self.tool_frame) 
            if appr_B_tool is None:
                return 'tf_failure'
            torso_B_touch_appr = torso_B_touch * appr_B_tool

            # project the approach back into the wrist
            appr_B_wrist = self.get_transform(self.tool_frame, 
                                              self.arm + "_wrist_roll_link") 
            if appr_B_wrist is None:
                return 'tf_failure'

            torso_B_wrist = torso_B_touch_appr * appr_B_wrist
            ######################################################################### 
            # create PoseStamped
            appr_wrist_ps = util.pose_mat_to_stamped_msg('torso_lift_link',
                                                         torso_B_wrist)
            appr_tool_ps = util.pose_mat_to_stamped_msg('torso_lift_link', 
                                                        torso_B_touch_appr)
            touch_tool_ps = util.pose_mat_to_stamped_msg('torso_lift_link', 
                                                         torso_B_touch)

            # visualization
            self.wrist_pub.publish(appr_wrist_ps)
            self.appr_pub.publish(appr_tool_ps)
            self.touch_pub.publish(touch_tool_ps)

            # set return values
            ud.appr_wrist_mat = torso_B_wrist
            ud.appr_tool_ps = appr_tool_ps
            ud.touch_tool_ps = touch_tool_ps

            
            return 'succeeded'
开发者ID:rgleichman,项目名称:berkeley_demos,代码行数:58,代码来源:sm_touch_face.py


示例18: motionToTrajectory

    def motionToTrajectory(self, action, frame_id):
        """ Convert an arm movement action into a trajectory. """
        ps = PoseStamped()
        ps.header.frame_id = frame_id
        ps.pose = action.goal
        pose = self._listener.transformPose(self.root, ps)
        rospy.loginfo("Parsing move to:\n" + str(pose))
        
        # create IK request
        request = GetPositionIKRequest()
        request.timeout = rospy.Duration(self.timeout)

        request.ik_request.pose_stamped.header.frame_id = self.root;
        request.ik_request.ik_link_name = self.tip;
        request.ik_request.pose_stamped.pose.position.x = pose.pose.position.x
        request.ik_request.pose_stamped.pose.position.y = pose.pose.position.y
        request.ik_request.pose_stamped.pose.position.z = pose.pose.position.z

        e = euler_from_quaternion([pose.pose.orientation.x,pose.pose.orientation.y,pose.pose.orientation.z,pose.pose.orientation.w])
        e = [i for i in e]
        if self.dof < 6:
            # 5DOF, so yaw angle = atan2(Y,X-shoulder offset)
            e[2] = atan2(pose.pose.position.y, pose.pose.position.x-self.offset)
        if self.dof < 5:
            # 4 DOF, so yaw as above AND no roll
            e[0] = 0
        q =  quaternion_from_euler(e[0], e[1], e[2])

        request.ik_request.pose_stamped.pose.orientation.x = q[0]
        request.ik_request.pose_stamped.pose.orientation.y = q[1]
        request.ik_request.pose_stamped.pose.orientation.z = q[2]
        request.ik_request.pose_stamped.pose.orientation.w = q[3]

        request.ik_request.ik_seed_state.joint_state.name = self.arm_solver_info.kinematic_solver_info.joint_names
        request.ik_request.ik_seed_state.joint_state.position = [self.servos[joint] for joint in request.ik_request.ik_seed_state.joint_state.name]

        # get IK, wiggle if needed
        tries = 0
        pitch = e[1]
        print "roll", e[0]
        while tries < 80:
            try:
                response = self._get_ik_proxy(request)
                if response.error_code.val == response.error_code.SUCCESS:
                    break
                else:
                    tries += 1
                    # wiggle gripper
                    pitch = e[1] + ((-1)**tries)*((tries+1)/2)*0.025
                    # update quaternion
                    q = quaternion_from_euler(e[0], pitch, e[2])
                    request.ik_request.pose_stamped.pose.orientation.x = q[0]
                    request.ik_request.pose_stamped.pose.orientation.y = q[1]
                    request.ik_request.pose_stamped.pose.orientation.z = q[2]
                    request.ik_request.pose_stamped.pose.orientation.w = q[3]
            except rospy.ServiceException, e:
                print "Service did not process request: %s"%str(e)
开发者ID:hanfeng-github,项目名称:vanadium-ros-pkg,代码行数:57,代码来源:simple_arm_server.py


示例19: on_ptu_state_msg

 def on_ptu_state_msg(self, msg):
     pan1 = -msg.position[0]
     pan_q = quaternion_from_euler(0, 0, -pan1)
     tilt_q = quaternion_from_euler(0, -msg.position[1], 0)
     b2 = tf.TransformBroadcaster()
     b2.sendTransform( (0.0, 0, 0.065),
                     tilt_q, 
                     rospy.Time.from_sec(time.time()),
                     'rover_amalia_turret_tilt_hinge',
                     'rover_amalia_turret_tilt_base')
开发者ID:team-diana,项目名称:gazebo-models,代码行数:10,代码来源:flir_pantilt_broadcaster-tilt.py


示例20: axis_cb

def axis_cb(data):
    global broadcaster, base_frame, base_name
    pan = math.pi + data.pan * math.pi / 180.
    tilt = -data.tilt * math.pi / 180.
    q = quaternion_from_euler(0,0,pan)
    now = rospy.Time.now()
    broadcaster.sendTransform((0,0,0),
           

鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
热门推荐
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap