diff --git a/config/gate_locations_no_perturbation.yaml b/config/gate_locations_no_perturbation.yaml
new file mode 100644
index 0000000..1185656
--- /dev/null
+++ b/config/gate_locations_no_perturbation.yaml
@@ -0,0 +1,70 @@
+Gate1:
+ location: [[-0.009051204, -34.8755, 3.071861], [-0.008949876, -32.9505, 3.071861], [-0.008949876, -32.9505, 1.134362], [-0.009051204, -34.8755, 1.134362]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate13:
+ location: [[1.237332, 9.001728, 2.9625], [3.162332, 9.001728, 2.9625], [3.162332, 9.001728, 1.025], [1.237332, 9.001728, 1.025]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate3:
+ location: [[-0.3703381, -12.99827, 4.728], [-0.3699976, -11.45827, 4.728], [-0.3699976, -11.45827, 3.177999], [-0.3703381, -12.99827, 3.177999]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate4:
+ location: [[-31.56487, -13.929, 6.99714], [-30.04735, -13.929, 7.259325], [-29.78347, -13.929, 5.731955], [-31.30098, -13.929, 5.469769]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate5:
+ location: [[-20.94317, 14.607, 2.515013], [-20.94283, 16.147, 2.515013], [-20.94283, 16.147, 0.965013], [-20.94317, 14.607, 0.965013]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate7:
+ location: [[16.2506, 41.63, 11.32092], [17.79061, 41.63, 11.32092], [17.79061, 41.63, 9.770914], [16.2506, 41.63, 9.770914]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate8:
+ location: [[-3.430186, 11.79953, 6.122593], [-3.430186, 12.97997, 6.122593], [-3.430186, 11.79952, 4.911986], [-3.430186, 12.97997, 4.911986]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate9:
+ location: [[-6.40867, -12.13678, 4.152941], [-8.208672, -12.13678, 4.152941], [-6.408669, -12.13678, 2.306941], [-8.208672, -12.13678, 2.306941]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate10:
+ location: [[18.15111, 3.631447, 7.229498], [16.35111, 3.63155, 7.229498], [18.15111, 3.631447, 5.383497], [16.35111, 3.63155, 5.383497]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate11:
+ location: [[-27.01367, 2.543159, 7.515942], [-28.81367, 2.543289, 7.515942], [-27.01367, 2.543159, 5.669941], [-28.81367, 2.543289, 5.669941]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate12:
+ location: [[18.40823, 13.60605, 9.143385], [16.33437, 14.09245, 9.143507], [18.90706, 15.73291, 9.14324], [16.83319, 16.21931, 9.143364]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate23:
+ location: [[-9.328671, 7.773174, 2.942941], [-11.12867, 7.773277, 2.942941], [-9.328669, 7.773174, 1.096941], [-11.12867, 7.773277, 1.096941]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate6:
+ location: [[-9.14867, 30.62316, 3.820941], [-10.94867, 30.62329, 3.820941], [-9.148668, 30.62316, 1.974941], [-10.94867, 30.62329, 1.974941]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate2:
+ location: [[0.8248205, 27.86797, 4.16025], [3.834821, 27.86797, 4.16025], [3.834821, 27.86797, 0.9327497], [0.8248205, 27.86797, 0.9327497]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate14:
+ location: [[-8.674376, -29.53623, 4.25625], [-6.545984, -27.40784, 4.25625], [-6.545984, -27.40784, 1.02875], [-8.674376, -29.53623, 1.02875]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate15:
+ location: [[5.744822, -11.79203, 4.16025], [8.75482, -11.79203, 4.16025], [8.75482, -11.79203, 0.9327497], [5.744822, -11.79203, 0.9327497]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate16:
+ location: [[-13.73218, 28.64197, 12.65825], [-13.73218, 26.23396, 12.65825], [-13.73218, 26.23396, 10.07625], [-13.73218, 28.64197, 10.07625]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate17:
+ location: [[-20.57, -13.29803, 3.46425], [-20.57, -15.70603, 3.46425], [-20.57, -15.70603, 0.8822498], [-20.57, -13.29803, 0.8822498]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate18:
+ location: [[13.51782, 21.76397, 3.61825], [13.51782, 19.35596, 3.61825], [13.51782, 19.35596, 1.03625], [13.51782, 21.76397, 1.03625]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate19:
+ location: [[15.96136, -17.26184, 12.31488], [18.36892, -17.26184, 12.36067], [18.41802, -17.26184, 9.779139], [16.01046, -17.26184, 9.733349]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate20:
+ location: [[-25.39118, 25.019, 7.856133], [-22.98318, 25.019, 7.856133], [-22.98318, 25.019, 5.274133], [-25.39118, 25.019, 5.274133]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate21:
+ location: [[15.68563, 38.50578, 8.376249], [17.81402, 40.63416, 8.376249], [17.81402, 40.63416, 5.14875], [15.68563, 38.50578, 5.14875]]
+ perturbation: [0.0, 0.0, 0.0]
+Gate22:
+ location: [[4.803625, -27.37584, 4.18125], [6.932016, -29.50423, 4.18125], [6.932016, -29.50423, 0.9537499], [4.803625, -27.37584, 0.9537499]]
+ perturbation: [0.0, 0.0, 0.0]
+
diff --git a/launch/sim.launch b/launch/sim.launch
index f90b075..1c8f3a9 100644
--- a/launch/sim.launch
+++ b/launch/sim.launch
@@ -4,6 +4,8 @@
+
+
@@ -28,7 +30,7 @@
-
+
diff --git a/requirements.txt b/requirements.txt
index d733215..63060b1 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,5 +1,4 @@
numpy-quaternion==2019.2.20.16.38.5
simple-pid==0.1.5
numpy==1.16.0
-
-
+opencv-python==4.0.0.21
diff --git a/scripts/control.py b/scripts/control.py
index a3e9308..a7ea676 100755
--- a/scripts/control.py
+++ b/scripts/control.py
@@ -10,13 +10,14 @@
from collections import defaultdict
import numpy as np
import quaternion
-import math_utils
+from lib import math_utils
+from std_msgs.msg import String
from simple_pid import PID
from mav_msgs.msg import RateThrust
from sensor_msgs.msg import Imu
from geometry_msgs.msg import Vector3, Quaternion, Pose
from flightgoggles.msg import IRMarkerArray, IRMarker
-from lib.graphix import camera_ray, ray_p_dist
+from lib.vision import camera_ray, ray_p_dist
from simcontrol.msg import State
GRAVITY = 9.81
@@ -34,25 +35,19 @@ def __init__(self, rate, use_gt_state=False):
self.ir_beacons = rospy.Subscriber('/uav/camera/left/ir_beacons', IRMarkerArray, self._ir_callback)
self.target_pub = rospy.Publisher('/simcontrol/target_pose', Pose, queue_size=1)
self.state_pub = rospy.Publisher('/simcontrol/state_estimate', State, queue_size=1)
+ self.gate_pub = rospy.Publisher('/simcontrol/target_gate', String, queue_size=1)
- self.imu_topic = '/uav/sensors/imu'
- self.imu_subscriber = rospy.Subscriber(self.imu_topic, Imu, self._imu_callback)
self.tf_listener = tf.TransformListener()
self.tf_prev_x = self.init_position
self.tf_prev_xdot = np.array([0.0, 0.0, 0.0])
self.tf_prev_xddot = np.array([0.0, 0.0, 0.0])
- self.imu_latest_x = self.init_position
- self.imu_latest_xdot = np.array([0.0, 0.0, 0.0])
- self.imu_latest_xddot = np.array([0.0, 0.0, 0.0])
- self.imu_latest_omega = np.array([0.0, 0.0, 0.0])
self.is_armed = False
self.gate_names = rospy.get_param("/gate_names") if rospy.has_param('/gate_names') else None
self.use_ir_markers = rospy.get_param("/use_ir_markers") if rospy.has_param('/use_ir_markers') else True
self.target_gate = 0
self.target_lead_distance = 5.0
- self.reference_lead_steps = 2
- self.target_update_weight = 1.0
+ self.target_update_weight = 0.1
self.target_vector = np.float32([0, 0, 0])
self.prev_tf_time = rospy.Time.now()
@@ -86,9 +81,6 @@ def linear_interpolate(self, p, n):
def get_gate_info(self):
for gate in range(1, 24):
nominal_location = np.float32(rospy.get_param('/uav/Gate%d/nominal_location' % gate))
- self.gate_markers['Gate%d' % gate] = {}
- for i in range(1, 5):
- self.gate_markers['Gate%d' % gate][i] = nominal_location[i-1,:]
self.gate_normal['Gate%d' % gate] = -np.cross(nominal_location[1,:] - nominal_location[0,:], nominal_location[2,:] - nominal_location[0,:])
self.gate_normal['Gate%d' % gate] /= np.linalg.norm(self.gate_normal['Gate%d' % gate])
perturbation_bound = np.float32(rospy.get_param('/uav/Gate%d/perturbation_bound' % gate))
@@ -110,7 +102,8 @@ def get_gate_info(self):
n = 10
ev, total = self.linear_interpolate(p, n)
- self.path = ev[:total, :]
+ self.reference_waypoints = p
+ self.reference_trajectory = ev[:total, :]
def get_init_pose(self):
if not rospy.has_param('/uav/flightgoggles_uav_dynamics/init_pose'):
@@ -128,10 +121,11 @@ def loop(self):
rate = rospy.Rate(self.rate)
while not rospy.is_shutdown():
target_gate_name = self.gate_names[self.target_gate]
+ self.gate_pub.publish(String(data=target_gate_name))
state_estimate = self.state_estimate()
x = state_estimate[0]
q_b = state_estimate[3]
- new_target_vector = self.reference_target_vector(x)
+ new_target_vector = self._gaze_direction(x)
# We only want to use IR markers to correct for gate perturbation
# If we are close to the target gate or there is no perturbation, just use reference trajectory
if self.use_ir_markers:
@@ -160,10 +154,12 @@ def loop(self):
def _gaze_direction(self, x):
gaze_target_gate = min(len(self.gate_names) - 1, self.target_gate+1)
pointing_direction = self.gate_mean[self.gate_names[gaze_target_gate]] - x
+ gaze_waypoint = 2*(self.target_gate+1)
+ pointing_direction = self.reference_waypoints[gaze_waypoint] - x
if self.use_ir_markers:
ir_ray = self.ir_waypoint()
- if ir_ray is not None:
- pointing_direction = ir_ray
+ #if ir_ray is not None and self.visible_target_markers >= 3:
+ #pointing_direction = ir_ray
return pointing_direction
def _orientation_quaternion(self, x):
@@ -174,34 +170,14 @@ def _orientation_quaternion(self, x):
return math_utils.shortest_arc(np.array([1., 0., 0.]), gaze_direction)
def _set_target(self, target_vector, state_estimate):
- x, xdot, xddot, q = state_estimate
-
- current_state = State()
- current_state.header.stamp = rospy.Time.now()
- current_state.header.frame_id = 'state_estimate'
- current_state.pose.position = Vector3(x[0], x[1], x[2])
- current_state.pose.orientation = Quaternion(q.x, q.y, q.z, q.w)
- current_state.linear_velocity = Vector3(xdot[0], xdot[1], xdot[2])
- current_state.linear_acceleration = Vector3(xddot[0], xddot[1], xddot[2])
- self.state_pub.publish(current_state)
-
+ x, _, _, _ = state_estimate
target_pose = Pose()
target_pose.position = Vector3(target_vector[0], target_vector[1], target_vector[2])
-
- q = self._orientation_quaternion(x)
+ # Using GT for the gaze direction, since we still haven't solved state estimation
+ q = self._orientation_quaternion(x)
target_pose.orientation = Quaternion(q.x, q.y, q.z, q.w)
self.target_pub.publish(target_pose)
- def reference_target_vector(self, x):
- closest_dist = 1e9
- closest_i = 0
- for i in range(self.path.shape[0])[1:]:
- new_d = np.linalg.norm(self.path[i, :] - x)
- if new_d < closest_dist:
- closest_dist = new_d
- closest_i = i
- return self.tangent_unit_vector(self.path[closest_i, :], self.path[closest_i+self.reference_lead_steps, :])
-
def ir_waypoint(self):
center = np.float32([0, 0])
target_gate_name = None
@@ -227,7 +203,7 @@ def ir_waypoint(self):
gate_visible_area += 0.5 * np.linalg.norm(np.cross(r2 - r4, r3 - r4))
self.latest_gate_visible_area = gate_visible_area
# Unit vector in the direction of the average of IR markers in pixel space
- return np.float32(camera_ray(mean_pixel[0], mean_pixel[1], q_b, corr=True))
+ return np.float32(camera_ray(mean_pixel[0:2], q_b, corr=True))
def _ir_callback(self, msg):
self.latest_markers = defaultdict(dict)
@@ -235,17 +211,6 @@ def _ir_callback(self, msg):
for marker in msg.markers:
self.latest_markers[marker.landmarkID.data][marker.markerID.data] = np.array([marker.x, marker.y])
- def _imu_callback(self, msg):
- omega = np.array([msg.angular_velocity.x, msg.angular_velocity.y, msg.angular_velocity.z])
- omegabar = (omega + self.imu_latest_omega) / 2
- xddot = np.array([msg.linear_acceleration.x, msg.linear_acceleration.y, msg.linear_acceleration.z])
- xdot = self.imu_latest_xdot + (xddot + self.imu_latest_xddot) / 2 * self.sample_time # Integrate from xddot
- x = self.imu_latest_x + (xdot + self.imu_latest_xdot) / 2 * self.sample_time # Integrate from xdot
- self.imu_latest_x = x
- self.imu_latest_xdot = xdot
- self.imu_latest_xddot = xddot
- self.imu_latest_omega = omega
-
def state_estimate(self, timestamp=None):
if self.use_gt_state:
use_timestamp = rospy.Time.now()
@@ -261,13 +226,15 @@ def state_estimate(self, timestamp=None):
xddot = self.tf_prev_xddot
if timestamp is None:
time_elapsed = rospy.Time.now() - self.prev_tf_time
- true_rate = 1/time_elapsed.to_sec()
- xdot = true_rate * (x - self.tf_prev_x)
- xddot = true_rate * (xdot - self.tf_prev_xdot)
- self.prev_tf_time = self.prev_tf_time + time_elapsed
- self.tf_prev_x = x
- self.tf_prev_xdot = xdot
- self.tf_prev_xddot = xddot
+ secs = time_elapsed.to_sec()
+ if secs > 0.0:
+ true_rate = 1/secs
+ xdot = true_rate * (x - self.tf_prev_x)
+ xddot = true_rate * (xdot - self.tf_prev_xdot)
+ self.prev_tf_time = self.prev_tf_time + time_elapsed
+ self.tf_prev_x = x
+ self.tf_prev_xdot = xdot
+ self.tf_prev_xddot = xddot
return x, xdot, xddot, q
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
return self.init_position, np.float32([0, 0, 0]), np.float32([0, 0, 0]), self.init_orientation
diff --git a/scripts/data_generator.py b/scripts/data_generator.py
index a11e0d9..9b146cc 100755
--- a/scripts/data_generator.py
+++ b/scripts/data_generator.py
@@ -13,7 +13,7 @@
from mav_msgs.msg import RateThrust
from sensor_msgs.msg import Imu
from flightgoggles.msg import IRMarkerArray, IRMarker
-from lib.graphix import camera_ray
+from lib.vision import camera_ray
from sensor_msgs.msg import Image
diff --git a/scripts/lib/graphix.py b/scripts/lib/graphix.py
deleted file mode 100644
index 46f9159..0000000
--- a/scripts/lib/graphix.py
+++ /dev/null
@@ -1,52 +0,0 @@
-import numpy as np
-import quaternion
-
-w = 1028
-h = 768
-fov = 70.0
-baseline = 0.32
-
-def radial_correction(x):
- k = [[-0.00012990738802631872, 1.2054033902290686, 0.0045308598380855436],
- [8.0829008163425578e-05, 0.9003035510021189, -1.2587111710371524e-05]]
- xcorr1 = k[0][0] * x[0]**2 + k[0][1] * x[0] + k[0][2]
- xcorr2 = k[1][0] * x[1]**2 + k[1][1] * x[1] + k[1][2]
- return np.float32([xcorr1, xcorr2])
-
-def q_rot(q, r):
- qr = np.quaternion(0.0, r[0], r[1], r[2])
- qr = q * qr * q.conjugate()
- return np.array([qr.x, qr.y, qr.z])
-
-def stereo_correction(q, o, side='left'):
- # perform origin correction because stereo cameras are spaced out
- y_trans = baseline/2 if side == 'left' else -baseline/2
- stereo_translation = q_rot(q, np.float32([0, y_trans, 0]))
- return o + stereo_translation
-
-def screen_space(x, y):
- x = (x - w/2)/(w/2)
- y = (y - h/2)/(h/2)
- return np.float32([x, y])
-
-def camera_proj(q, p):
- pdot = q_rot(q.conjugate(), p)
- x = pdot[1] / pdot[0]
- y = pdot[2] / pdot[0]
- x = x/(fov/2/45.0)
- y = y/(fov/2/45.0)
- return np.float32([-x, -y])
-
-def camera_ray(x, y, q, corr=False):
- x = screen_space(x, y)
- if corr:
- x = radial_correction(x)
- x, y = x[0], x[1]
- x = x*(fov/2/45.0)
- y = y*(fov/2/45.0)
- v = np.float32([1, -x, -y])
- return q_rot(q, v / np.linalg.norm(v))
-
-def ray_p_dist(o, d, p, pert=np.float32([0.0, 0.0, 0.0]), stereo=False):
- pdot = p - o
- return np.linalg.norm(np.cross(pdot, d))
diff --git a/scripts/lib/math_utils.py b/scripts/lib/math_utils.py
new file mode 100644
index 0000000..6035be7
--- /dev/null
+++ b/scripts/lib/math_utils.py
@@ -0,0 +1,78 @@
+import numpy as np
+import quaternion
+
+def identity_quaternion():
+ return np.quaternion(1.0, 0.0, 0.0, 0.0)
+
+def shortest_arc(v1, v2):
+ cross = np.cross(v1, v2)
+ if np.linalg.norm(cross) < 1e-15:
+ # Opposite or same directions.
+ if np.linalg.norm(v1 - v2):
+ return np.quaternion(1.0, 0.0, 0.0, 0.0)
+ else:
+ # Return rotation along x-axis.
+ rotation = np.sin(np.pi/2) + np.cos(np.pi/2)
+ return np.quaternion(rotation, 1.0, 0.0, 0.0).normalized()
+ v1sqr = np.dot(v1,v1)
+ v2sqr = np.dot(v2,v2)
+ return np.quaternion(np.sqrt(v1sqr * v2sqr) + np.dot(v1, v2), cross[0], cross[1], cross[2]).normalized()
+
+def angle_between(q1, q2):
+ # Returns the angle (in radians) required to rotate one quaternion to another.
+ q1 = quaternion.as_float_array(q1)
+ q2 = quaternion.as_float_array(q2)
+ return np.arccos(q1.dot(q2))
+
+def q_rot(q, r):
+ """Rotate vector r by rotation described by quaternion q"""
+ qr = np.quaternion(0.0, r[0], r[1], r[2])
+ qr = q * qr * q.conjugate()
+ return np.array([qr.x, qr.y, qr.z])
+
+def axis_angle(q):
+ """Compute axis-angle representation from quaternion"""
+ n = np.float32([q.x, q.y, q.z])
+ n_norm = np.linalg.norm(n)
+ return n/n_norm, 2 * np.arctan2(n_norm, q.w)
+
+def axis_angle_inv(axis, angle):
+ """Compute quaternion from axis-angle representation"""
+ x = axis[0] * np.sin(angle/2)
+ y = axis[1] * np.sin(angle/2)
+ z = axis[2] * np.sin(angle/2)
+ return np.quaternion(np.cos(angle/2), x, y, z)
+
+def q_from_rpy(r, p, y):
+ """Returns the quaternion corresponding to given roll-pitch-yaw (Tait-Bryan) angles"""
+ cosr = np.cos(r/2)
+ sinr = np.sin(r/2)
+ cosp = np.cos(p/2)
+ sinp = np.sin(p/2)
+ cosy = np.cos(y/2)
+ siny = np.sin(y/2)
+ return np.quaternion(
+ cosy * cosp * cosr + siny * sinp * sinr,
+ cosy * cosp * sinr - siny * sinp * cosr,
+ cosy * sinp * cosr + siny * cosp * sinr,
+ siny * cosp * cosr - cosy * sinp * sinr
+ )
+
+def rpy(q):
+ w, x, y, z = q.w, q.x, q.y, q.z
+ ysqr = y * y
+ t0 = 2.0 * (w * x + y * z)
+ t1 = 1.0 - 2.0 * (x * x + ysqr)
+ roll = np.arctan2(t0, t1)
+
+ t2 = 2.0 * (w * y - z * x)
+ if t2 > 1.0:
+ t2 = 1.0
+ elif t2 < -1.0:
+ t2 = -1.0
+ pitch = np.arcsin(t2)
+
+ t3 = 2.0 * (w * z + x * y)
+ t4 = 1.0 - 2.0 * (ysqr + z * z)
+ yaw = np.arctan2(t3, t4)
+ return roll, pitch, yaw
diff --git a/scripts/lib/vision.py b/scripts/lib/vision.py
new file mode 100644
index 0000000..d06d781
--- /dev/null
+++ b/scripts/lib/vision.py
@@ -0,0 +1,121 @@
+import numpy as np
+import quaternion
+import cv2
+from math_utils import q_rot, identity_quaternion
+import time
+
+w = 1028
+h = 768
+fov = 70.0
+baseline = 0.32
+fx = 1/(fov/2/45.0)*w/2
+fy = 1/(fov/2/45.0)*h/2
+cx = w / 2.0
+cy = h / 2.0
+camera_matrix = np.float32([[fx, 0.0, cx], [0.0, fy, cy], [0.0, 0.0, 1.0]])
+
+
+def polynomial_correction(x):
+ """Apply a 2nd order lens distortion correction to screen space point x"""
+ k = [[-0.00012990738802631872, 1.2054033902290686, 0.0045308598380855436],
+ [8.0829008163425578e-05, 0.9003035510021189, -1.2587111710371524e-05]]
+ xcorr1 = k[0][0] * x[0]**2 + k[0][1] * x[0] + k[0][2]
+ xcorr2 = k[1][0] * x[1]**2 + k[1][1] * x[1] + k[1][2]
+ return np.float32([xcorr1, xcorr2])
+
+def pixel_space(x):
+ """From screen space to pixel space"""
+ p1 = (x[0] + 1)*(w/2)
+ p2 = (x[1] + 1)*(h/2)
+ return np.float32([p1, p2])
+
+def stereo_correction(q, o, side='left'):
+ """Perform origin correction because stereo cameras are spaced out"""
+ y_trans = baseline/2 if side == 'left' else -baseline/2
+ stereo_translation = q_rot(q, np.float32([0, y_trans, 0]))
+ return o + stereo_translation
+
+def screen_space(x):
+ """Transform from pixel space to screen space (x in [-1, 1], y in [-1, 1])"""
+ y1 = (x[0] - w/2)/(w/2)
+ y2 = (x[1] - h/2)/(h/2)
+ return np.float32([y1, y2])
+
+def camera_proj(q, p):
+ """Project point p in object space to camera screen space"""
+ pdot = q_rot(q.conjugate(), p)
+ x = pdot[1] / pdot[0]
+ y = pdot[2] / pdot[0]
+ x = x/(fov/2/45.0)
+ y = y/(fov/2/45.0)
+ return np.float32([-x, -y])
+
+def camera_ray(x, q, corr=False):
+ """Get the ray in object space which corresponds to pixel at (x, y)"""
+ x = screen_space(x)
+ if corr:
+ x = polynomial_correction(x)
+ x, y = x[0], x[1]
+ x = x*(fov/2/45.0)
+ y = y*(fov/2/45.0)
+ v = np.float32([1, -x, -y])
+ return q_rot(q, v / np.linalg.norm(v))
+
+def ray_p_dist(origin, direction, point, pert=np.float32([0.0, 0.0, 0.0]), stereo=False):
+ """Closest distance from ray defined by origin and direction to point b"""
+ pdot = point - origin
+ return np.linalg.norm(np.cross(pdot, direction))
+
+def to_rvec(q):
+ return cv2.Rodrigues(quaternion.as_rotation_matrix(q))[0]
+
+def to_q(rvec):
+ return quaternion.from_rotation_matrix(cv2.Rodrigues(rvec)[0])
+
+def cv_point_trans(x):
+ """Transform position from our representation to OpenCV convention"""
+ return np.float32([-x[1], -x[2], x[0]])
+
+def cv_point_inv_trans(x):
+ """Transform position from OpenCV convention to our representation"""
+ return np.float32([x[2], -x[0], -x[1]])
+
+def cv_q_trans(q):
+ """Transform orientation from our representation to OpenCV convention"""
+ return np.quaternion(-q.w, -q.y, -q.z, q.x)
+
+def cv_q_inv_trans(q):
+ """Transform orientation from OpenCV convention to our representation"""
+ return np.quaternion(q.w, q.z, -q.x, -q.y)
+
+def q2rvec(q):
+ """Compute OpenCV Rodrigues rotation vector from our quaternion"""
+ return to_rvec(cv_q_trans(q))
+
+def rvec2q(rvec):
+ """Compute our quaternion to OpenCV Rodrigues rotation vector"""
+ return cv_q_inv_trans(to_q(rvec))
+
+def solve_pnp(object_points, image_points, pos=np.float32([0,0,0]), q=identity_quaternion()):
+ """
+ Solves the PnP problem for the given object-point image-point correspondences
+
+ :param object_points: array-like, shape=(n, 3), object points in flightgoggles representation
+ :param image_points: array-like, shape=(n, 3), screen space points corresponding to object points
+ :return tuple, (bool, numpy.float32, numpy.quaternion), (Whether a solution was found, solved position, solved orientation)
+ """
+ t0 = time.time()
+ cv_object_points = np.apply_along_axis(cv_point_trans, 1, object_points)
+ image_points = np.apply_along_axis(screen_space, 1, image_points)
+ image_points = np.apply_along_axis(polynomial_correction, 1, image_points)
+ image_points = np.apply_along_axis(pixel_space, 1, image_points)
+ rvec = q2rvec(q)
+ tvec = cv_point_trans(q_rot(q.conjugate(), -pos))
+ success, rvec, tvec = cv2.solvePnP(cv_object_points, np.float32(image_points), camera_matrix, rvec=rvec, tvec=tvec, useExtrinsicGuess=True, distCoeffs=None, flags=0)
+ if not success: # Did not converge
+ return False, None, None
+ else:
+ orientation_result = rvec2q(rvec).conjugate()
+ position_result = -q_rot(orientation_result, cv_point_inv_trans(tvec))
+ #print('solve_pnp time:', (time.time() - t0) * 1000, 'ms')
+ return True, position_result, orientation_result
diff --git a/scripts/lowlevel.py b/scripts/lowlevel.py
index ece59ab..a46d490 100755
--- a/scripts/lowlevel.py
+++ b/scripts/lowlevel.py
@@ -14,11 +14,11 @@
from mav_msgs.msg import RateThrust
from sensor_msgs.msg import Imu
from flightgoggles.msg import IRMarkerArray, IRMarker
-from lib.graphix import camera_ray, ray_p_dist
+from lib.vision import camera_ray, ray_p_dist
from simcontrol.msg import State
from geometry_msgs.msg import Pose
from std_msgs.msg import Bool
-import math_utils
+from lib import math_utils
class ControllerBase(object):
def __init__(self):
@@ -30,17 +30,18 @@ def __init__(self, rate):
ControllerBase.__init__(self)
self.rate = rate
self.sample_time = 1.0 / rate
+ self.max_omega = 5*np.pi
pki = [1.0, 0.0, 0.0]
pki2xy = [1.0, 0.0, 0.0]
- pki2z = [2.0, 0.0, 0.0]
+ pki2z = [5.0, 0.0, 0.0]
pki3 = [50.0, 0.0, 0.0]
- pki3z = [2.0, 0.0, 0.0]
- vz_lim = (-6.0, 6.0)
+ pki3z = [15.0, 0.0, 0.0]
+ vz_lim = (-1.0, 6.0)
vxy_lim = (-15.0, 15.0)
axy_lim = (-2.0, 2.0)
- az_lim = (-4.0, 4.0)
- omegaxy_lim = (-self.max_omega, self.max_omega)
- omegaz_lim = (-self.max_omega, self.max_omega)
+ az_lim = (-1.0, 6.0)
+ omegaxy_lim = (-1e9, 1e9)
+ omegaz_lim = (-1e9, 1e9)
self.px_pid = PID(*pki, sample_time=self.sample_time)
self.py_pid = PID(*pki, sample_time=self.sample_time)
self.pz_pid = PID(*pki, sample_time=self.sample_time)
@@ -60,13 +61,13 @@ def __init__(self, rate):
self.thetay_pid.output_limits = omegaxy_lim
self.thetaz_pid.output_limits = omegaz_lim
- self.x = None
- self.xdot = None
- self.xddot = None
- self.q_b = None
-
- self.x_d = None
- self.q_d = None
+ init_pose = rospy.get_param('/uav/flightgoggles_uav_dynamics/init_pose')
+ self.x = np.float32([init_pose[0], init_pose[1], init_pose[2]])
+ self.q_b = np.quaternion(init_pose[6], init_pose[3], init_pose[4], init_pose[5])
+ self.x_d = self.x
+ self.q_d = self.q_b
+ self.xdot = np.float32([0, 0, 0])
+ self.xddot = np.float32([0, 0, 0])
self.takeoff = False
@@ -187,7 +188,6 @@ def loop(self):
rate = rospy.Rate(self.rate)
while not rospy.is_shutdown():
self.pid()
- print("remaining: ", rate.remaining().nsecs)
rate.sleep()
def command(self, c, omega):
diff --git a/scripts/math_utils.py b/scripts/math_utils.py
deleted file mode 100644
index 80436b3..0000000
--- a/scripts/math_utils.py
+++ /dev/null
@@ -1,23 +0,0 @@
-import numpy as np
-import quaternion
-
-def shortest_arc(v1, v2):
- cross = np.cross(v1, v2)
- if np.linalg.norm(cross) < 1e-15:
- # Opposite or same directions.
- if np.linalg.norm(v1 - v2):
- return np.quaternion(1.0, 0.0, 0.0, 0.0)
- else:
- # Return rotation along x-axis.
- rotation = np.sin(np.pi/2) + np.cos(np.pi/2)
- return np.quaternion(rotation, 1.0, 0.0, 0.0).normalized()
- v1sqr = np.dot(v1,v1)
- v2sqr = np.dot(v2,v2)
- return np.quaternion(np.sqrt(v1sqr * v2sqr) + np.dot(v1, v2), cross[0], cross[1], cross[2]).normalized()
-
-def angle_between(q1, q2):
- # Returns the angle (in radians) required to rotate one quaternion to another.
- q1 = quaternion.as_float_array(q1)
- q2 = quaternion.as_float_array(q2)
- return np.arccos(q1.dot(q2))
-
diff --git a/scripts/state_estimation.py b/scripts/state_estimation.py
index 21c8e0d..baaec7e 100755
--- a/scripts/state_estimation.py
+++ b/scripts/state_estimation.py
@@ -4,12 +4,18 @@
import rospy
import tf
import quaternion
-import math_utils
-from geometry_msgs.msg import Quaternion, Vector3
+from lib import math_utils
+from geometry_msgs.msg import Quaternion, Vector3, Point
from mav_msgs.msg import RateThrust
from nav_msgs.msg import Odometry
from sensor_msgs.msg import Imu
from simcontrol.msg import State
+from std_msgs.msg import Bool, String
+from flightgoggles.msg import IRMarkerArray
+from collections import defaultdict
+from lib.vision import solve_pnp
+import random
+import math
GRAVITY = 9.81000041962
EPSILON = np.finfo(np.float64).eps
@@ -21,8 +27,46 @@ def solve_for_orientation(f, a):
a_without_thrust = (a - f)
return math_utils.shortest_arc(a_without_thrust, gravity_vector)
+class TFStateEstimator(object):
+ def __init__(self):
+ self.tf_listener = tf.TransformListener()
+ self.tf_prev_x = np.float32([0, 0, 0])
+ self.tf_prev_q = math_utils.identity_quaternion()
+ self.tf_prev_xdot = np.array([0.0, 0.0, 0.0])
+ self.tf_prev_xddot = np.array([0.0, 0.0, 0.0])
+ self.prev_tf_time = rospy.Time.now()
+
+ def state_estimate(self, timestamp=None):
+ use_timestamp = rospy.Time.now()
+ if timestamp is not None:
+ use_timestamp = timestamp
+ try:
+ translation, rotation = self.tf_listener.lookupTransform('world','uav/imu', use_timestamp)
+ q = np.quaternion(rotation[3], rotation[0], rotation[1], rotation[2])
+ #q = q.conjugate()
+ x = np.array(translation)
+ # Discrete derivatives
+ xdot = self.tf_prev_xdot
+ xddot = self.tf_prev_xddot
+ if timestamp is None:
+ time_elapsed = rospy.Time.now() - self.prev_tf_time
+ nsec_elapsed = time_elapsed.to_nsec()
+ if nsec_elapsed > 0:
+ true_rate = 1.0/nsec_elapsed * 1e9
+ xdot = true_rate * (x - self.tf_prev_x)
+ xddot = true_rate * (xdot - self.tf_prev_xdot)
+ self.prev_tf_time = self.prev_tf_time + time_elapsed
+ self.tf_prev_x = x
+ self.tf_prev_q = q
+ self.tf_prev_xdot = xdot
+ self.tf_prev_xddot = xddot
+ return x, xdot, xddot, q
+ except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
+ return self.tf_prev_x, self.tf_prev_xdot, self.tf_prev_xddot, self.tf_prev_q
+
+
class Dynamics(object):
- def __init__(self, initial_state, sampling_time):
+ def __init__(self, initial_state):
"""States are as follows:
x[0]: x position
x[1]: y position
@@ -40,8 +84,9 @@ def __init__(self, initial_state, sampling_time):
"""
assert len(initial_state.shape) == 2
self.x = initial_state
- self.dt = sampling_time
self.gravity = GRAVITY
+ self.dt = 0
+ self.prev_time = rospy.Time.now()
def reset(self, x):
self.x = x
@@ -51,89 +96,89 @@ def step(self, u):
return self.x
def f(self, x, u):
- angular_velocity = u[0:3]
- thrust = u[5]
+ now = rospy.Time.now()
+ nsec_elapsed = (now - self.prev_time).to_nsec()
+ self.prev_time = now
+ if nsec_elapsed > 0:
+ self.dt = nsec_elapsed / 1e9
+ angular_velocity = u[0:3]
+ thrust = u[5]
- x = x
- dx = np.zeros_like(x)
+ dx = np.zeros_like(x)
- # Velocity affecting position.
- dx[0:3] = self.dt * x[3:6]
+ # Velocity affecting position.
+ dx[0:3] = self.dt * x[3:6]
- # Update velocity from thrust.
- dx[3] = self.dt * 2.0 * (x[6] * x[8] + x[7] * x[9]) * thrust
- dx[4] = self.dt * 2.0 * (x[7] * x[8] - x[6] * x[9]) * thrust
- dx[5] = self.dt * ((-x[6]**2 - x[7]**2 + x[8]**2 + x[9]**2) * thrust - self.gravity)
- dx[6] = self.dt * 0.5*(angular_velocity[0] * x[9] + angular_velocity[2] * x[7] - angular_velocity[1] * x[8])
- dx[7] = self.dt * 0.5*(angular_velocity[1] * x[9] - angular_velocity[2] * x[6] + angular_velocity[0] * x[8])
- dx[8] = self.dt * 0.5*(angular_velocity[2] * x[9] + angular_velocity[1] * x[6] - angular_velocity[0] * x[7])
- dx[9] = self.dt * 0.5*(-angular_velocity[0] * x[6] - angular_velocity[1] * x[7] - angular_velocity[2] * x[8])
-
- x += dx
- # Normalize quaternion.
- x[6:10] = x[6:10] / np.linalg.norm(x[6:10], 2)
+ # Update velocity from thrust.
+ dx[3] = self.dt * 2.0 * (x[6] * x[8] + x[7] * x[9]) * thrust
+ dx[4] = self.dt * 2.0 * (x[7] * x[8] - x[6] * x[9]) * thrust
+ dx[5] = self.dt * ((-x[6]**2 - x[7]**2 + x[8]**2 + x[9]**2) * thrust - self.gravity)
+ dx[6] = self.dt * 0.5*(angular_velocity[0] * x[9] + angular_velocity[2] * x[7] - angular_velocity[1] * x[8])
+ dx[7] = self.dt * 0.5*(angular_velocity[1] * x[9] - angular_velocity[2] * x[6] + angular_velocity[0] * x[8])
+ dx[8] = self.dt * 0.5*(angular_velocity[2] * x[9] + angular_velocity[1] * x[6] - angular_velocity[0] * x[7])
+ dx[9] = self.dt * 0.5*(-angular_velocity[0] * x[6] - angular_velocity[1] * x[7] - angular_velocity[2] * x[8])
+ x += dx
+ # Normalize quaternion.
+ x[6:10] = x[6:10] / np.linalg.norm(x[6:10], 2)
return x
-class StateEstimatorNode(object):
- def __init__(self):
- rospy.init_node('state_estimator')
-
- self.parent_frame = 'init_pose'
- self.frame = 'state_estimate'
- self.thrust = self._initial_thrust()
+class IREstimator(object):
+ def __init__(self):
+ self.init_position, self.init_orientation = self.get_init_pose()
+ self.ir_sub = rospy.Subscriber('/uav/camera/left/ir_beacons', IRMarkerArray, self.ir_subscriber, queue_size=1)
+ self.gate_sub = rospy.Subscriber('/simcontrol/target_gate', String, self._gate_callback)
+ self.taget_gate_name = None
+ self.tf = tf.TransformBroadcaster()
+ self.gate_markers = {}
+ self.gate_mean = {}
+ self.gate_normal = {}
+ self.gate_is_perturbed = {}
+ self.prev_closest_landmark = self.init_position
+ self.gate_names = rospy.get_param("/gate_names") if rospy.has_param('/gate_names') else None
+ self.last_orientation = self.init_orientation
+ self.state_pub = rospy.Publisher('/simcontrol/state_estimate', State, queue_size=1)
+ self.takeoff_pub = rospy.Publisher('/simcontrol/takeoff', Bool, queue_size=1)
+ self.tf_estimator = TFStateEstimator()
+ self.prev_time = rospy.Time.now()
+ self.prev_time_int = rospy.Time.now()
+ self.last_xdot = np.float32([0, 0, 0])
+ self.wait_i = 0
+ self.last_position_ir = self.init_position
+ self.last_position = np.copy(self.init_position)
+ self.target_gate_name = None
+ x = self._initial_pose()
+ self.dynamics = Dynamics(initial_state=x)
+ self.imu_enabled = True
+ self.latest_thrust = None
self.thrust_sub = rospy.topics.Subscriber(
name='/uav/input/rateThrust',
data_class=RateThrust,
callback=self._thrust_callback)
- self.state_pub = rospy.topics.Publisher("simcontrol/state_estimate",
- State,
- queue_size=1)
-
- self.broadcaster = tf.TransformBroadcaster()
- self.reset_sub = rospy.topics.Subscriber(
- name='simcontrol/state_estimate/reset',
- data_class=State,
- callback=self._reset)
-
- x = self._initial_pose()
- self._publish_frame(x)
-
- self.dynamics = Dynamics(initial_state=x,
- sampling_time=1.0/120.0)
-
- self.tf = tf.TransformListener(True, rospy.Duration(0.1))
-
- def step(self):
- u = np.array(self.thrust[0:6])[:, None]
- x = self.dynamics.step(u)
- self._publish_frame(x)
+ self.imu_topic = '/uav/sensors/imu'
+ self.imu_subscriber = rospy.Subscriber(self.imu_topic, Imu, self._imu_callback)
+ self.imu_acc_xddot = np.float32([0, 0, 0])
+ self.imu_acc_omega = np.float32([0, 0, 0])
+ self.imu_count = 0
+ self.prev_imu_time = rospy.Time.now()
+ self.x_min = np.float32([-20, -45, 0])
+ self.x_max = np.float32([21, 55, 19])
+ self.reference_waypoints = None
+ self.reference_trajectory = None
+ self.gate_waypoint_distance = 2.0
+ self.get_gate_info()
- def _reset(self, state):
- """
- Will set the state to the sent state.
- """
- x = np.array([
- state.pose.position.x,
- state.pose.position.y,
- state.pose.position.z,
- state.linear_velocity.x,
- state.linear_velocity.y,
- state.linear_velocity.z,
- state.pose.orientation.x,
- state.pose.orientation.y,
- state.pose.orientation.z,
- state.pose.orientation.w])[:, None]
- self.dynamics.reset(x)
- self.broadcaster.sendTransform((state.pose.position.x, state.pose.position.y,
- state.pose.position.z),
- (state.pose.orientation.x, state.pose.orientation.y,
- state.pose.orientation.z, state.pose.orientation.w),
- rospy.Time.now(),
- self.frame,
- self.parent_frame)
+ def _thrust_callback(self, thrust_msg):
+ self.latest_thrust = np.array([
+ thrust_msg.angular_rates.x,
+ thrust_msg.angular_rates.y,
+ thrust_msg.angular_rates.z,
+ thrust_msg.thrust.x,
+ thrust_msg.thrust.y,
+ thrust_msg.thrust.z
+ ])
def _initial_pose(self):
# Initial pose in [x y z w]
@@ -143,39 +188,245 @@ def _initial_pose(self):
x[6:] = initial_pose[3:]
return x
- def _initial_thrust(self):
- return np.array([0.0, 0.0, 0.0,
- 0.0, 0.0, GRAVITY
- ])[:, None]
+ def _gate_callback(self, msg):
+ self.target_gate_name = msg.data
- def _thrust_callback(self, thrust_msg):
- self.thrust = np.array([
- thrust_msg.angular_rates.x,
- thrust_msg.angular_rates.y,
- thrust_msg.angular_rates.z,
- thrust_msg.thrust.x,
- thrust_msg.thrust.y,
- thrust_msg.thrust.z
- ])
+ def body_to_world(self, q_b, v):
+ qr = np.quaternion(0.0, v[0], v[1], v[2])
+ qr = q_b * qr * q_b.conjugate()
+ return np.float32([qr.x, qr.y, qr.z])
+
+ def _imu_callback(self, msg):
+ self.imu_acc_omega += np.float32([msg.angular_velocity.x, msg.angular_velocity.y, msg.angular_velocity.z])
+ self.imu_acc_xddot += np.float32([msg.linear_acceleration.x, msg.linear_acceleration.y, msg.linear_acceleration.z])
+ self.imu_count += 1
+ if self.imu_count % 100 == 0:
+ self.imu_acc_omega /= 100
+ self.imu_acc_xddot /= 100
+ self._imu_update()
+ self.imu_acc_omega = np.float32([0, 0, 0])
+ self.imu_acc_xddot = np.float32([0, 0, 0])
+
+ def get_init_pose(self):
+ if not rospy.has_param('/uav/flightgoggles_uav_dynamics/init_pose'):
+ return np.float32([0, 0, 0]), np.quaternion(1.0, 0.0, 0.0, 0.0)
+ else:
+ init_pose = rospy.get_param('/uav/flightgoggles_uav_dynamics/init_pose')
+ x = np.float32([init_pose[0], init_pose[1], init_pose[2]])
+ q = np.quaternion(init_pose[6], init_pose[3], init_pose[4], init_pose[5])
+ return x, q
+
+ def linear_interpolate(self, p, n):
+ res = np.zeros((n * p.shape[0], p.shape[1]))
+ ii = 0
+ for i in range(p.shape[0] - 1):
+ res[ii, :] = p[i, :]
+ trans = p[i + 1, :] - p[i, :]
+ dist = np.linalg.norm(trans)
+ for j in range(1, n):
+ ii += 1
+ res[ii, :] = p[i, :] + trans * (1.0*j/n)
+ ii += 1
+ return res, ii
+
+ def distance_to_trajectory(self, x):
+ closest_dist = 1e9
+ target = self.gate_names.index(self.target_gate_name) # Consider only segment between previous and target gate
+ for i in range(self.reference_trajectory.shape[0])[100*2*target:(2*target+1)*100]:
+ if i == 0:
+ continue
+ new_d = np.linalg.norm(self.reference_trajectory[i, :] - x)
+ if new_d < closest_dist:
+ closest_dist = new_d
+ return closest_dist
+
+ def get_gate_info(self):
+ for gate in range(1, 24):
+ nominal_location = np.float32(rospy.get_param('/uav/Gate%d/nominal_location' % gate))
+ self.gate_markers['Gate%d' % gate] = {}
+ for i in range(1, 5):
+ self.gate_markers['Gate%d' % gate][str(i)] = nominal_location[i-1, :]
+ self.gate_normal['Gate%d' % gate] = -np.cross(nominal_location[1,:] - nominal_location[0,:], nominal_location[2,:] - nominal_location[0,:])
+ self.gate_normal['Gate%d' % gate] /= np.linalg.norm(self.gate_normal['Gate%d' % gate])
+ perturbation_bound = np.float32(rospy.get_param('/uav/Gate%d/perturbation_bound' % gate))
+ self.gate_is_perturbed['Gate%d' % gate] = np.any(perturbation_bound > 0.1)
+ self.gate_mean['Gate%d' % gate] = np.mean(nominal_location, axis=0)
- def _publish_frame(self, x):
+ # Generate reference trajectory
+ p = np.zeros((len(self.gate_names)*2+1, 3))
+ p[0, :] = self.init_position
+ for i, g in enumerate(self.gate_names):
+ point_a = self.gate_mean[g] - self.gate_normal[g] * self.gate_waypoint_distance
+ point_b = self.gate_mean[g] + self.gate_normal[g] * self.gate_waypoint_distance
+ if np.linalg.norm(point_a - p[i*2, :]) < np.linalg.norm(point_b - p[i*2, :]):
+ p[i*2+1, :] = point_a
+ p[i*2+2, :] = point_b
+ else:
+ p[i*2+1, :] = point_b
+ p[i*2+2, :] = point_a
+
+ n = 100
+ ev, total = self.linear_interpolate(p, n)
+ self.reference_waypoints = p
+ self.reference_trajectory = ev[:total, :]
+
+ def _imu_update(self):
+ if self.imu_enabled:
+ now = rospy.Time.now()
+ time_elapsed = now - self.prev_imu_time
+ self.prev_imu_time = now
+ nsec_elapsed = time_elapsed.to_nsec()
+ sec_elapsed = nsec_elapsed / 1e9
+ omega = self.imu_acc_omega
+ r, p, y = math_utils.rpy(self.last_orientation)
+ r_updated = omega[0] * sec_elapsed + r
+ p_updated = omega[1] * sec_elapsed + p
+ y_updated = omega[2] * sec_elapsed + y
+ updated_q = math_utils.q_from_rpy(r_updated, p_updated, y_updated)
+ body_xddot = self.imu_acc_xddot
+ xddot = self.body_to_world(self.last_orientation, body_xddot) + gravity_vector
+ xdot = sec_elapsed * xddot + self.last_xdot # Integrate from xddot
+ self.last_xdot = xdot
+ self.last_orientation = updated_q
+
+ def pub_state(self, x, xdot, xddot, q):
msg = State()
msg.header.stamp = rospy.Time.now()
- msg.header.frame_id = self.parent_frame
+ msg.header.frame_id = 'world'
msg.pose.position = Vector3(x[0], x[1], x[2])
- msg.pose.orientation = Quaternion(x[6], x[7], x[8], x[9])
- msg.linear_acceleration = Vector3(x[3], x[4], x[5])
-
+ msg.pose.orientation = Quaternion(q.x, q.y, q.z, q.w)
+ msg.linear_velocity = Vector3(xdot[0], xdot[1], xdot[2])
self.state_pub.publish(msg)
- self.broadcaster.sendTransform((x[0], x[1], x[2]),
- (x[6], x[7], x[8], x[9]),
- rospy.Time.now(),
- self.frame,
- self.parent_frame)
+
+ def integrate_position(self):
+ time_now = rospy.Time.now()
+ nsec_elapsed = (time_now - self.prev_time_int).to_nsec()
+ if nsec_elapsed > 0:
+ sec_elapsed = nsec_elapsed / 1e9
+ self.prev_time_int = time_now
+ self.last_position = sec_elapsed * self.last_xdot + self.last_position
+
+ def _build_state(self):
+ x = self.last_position
+ xdot = self.last_xdot
+ q = self.last_orientation
+ return np.float32([x[0], x[1], x[2], xdot[0], xdot[1], xdot[2], q.x, q.y, q.z, q.w])
+
+ def _set_from_state(self, x):
+ self.last_position = np.float32(x[0:3])
+ self.last_xdot = np.float32(x[3:6])
+ self.last_orientation = np.quaternion(x[9], x[6], x[7], x[8])
+
+ def step(self):
+ if self.imu_enabled:
+ self.integrate_position()
+ elif self.latest_thrust is not None:
+ self._set_from_state(self.dynamics.f(self._build_state(), self.latest_thrust))
+ xi = self.last_position
+ xdoti = self.last_xdot
+ qi = self.last_orientation
+ x, xdot, xddot, q = self.tf_estimator.state_estimate() # This is GT
+ print("Pos drift:", np.linalg.norm(xi - x))
+ self.pub_state(xi, xdoti, xdoti, qi)
+
+ def sample(self, iterator1, iterator2, k):
+ """
+ Samples k elements from an iterable object.
+
+ :param iterator: an object that is iterable
+ :param k: the number of items to sample
+ """
+ # fill the reservoir to start
+ result1 = [iterator1[i] for i in range(k)]
+ result2 = [iterator2[i] for i in range(k)]
+
+ n = k - 1
+ for i in range(len(iterator1)):
+ n += 1
+ s = random.randint(0, n)
+ if s < k:
+ result1[s] = iterator1[i]
+ result2[s] = iterator2[i]
+
+ return result1, result2
+
+ def pos_sanity(self, x):
+ """Check if position is inside of race hall"""
+ return np.all(x > self.x_min) and np.all(x < self.x_max)
+
+ def closest_gate(self, x):
+ best_dist = 1e9
+ best_gate = None
+ for gate in self.gate_mean:
+ dist = np.linalg.norm(x - self.gate_mean[gate])
+ if dist < best_dist:
+ best_dist = dist
+ best_gate = self.gate_mean[gate]
+ return best_gate
+
+ def ir_subscriber(self, msg):
+ self.wait_i += 1
+ if self.wait_i > 40:
+ takeoff_msg = Bool(data=True)
+ self.takeoff_pub.publish(takeoff_msg)
+
+ image_points = defaultdict(list)
+ object_points = defaultdict(list)
+ all_image_points = []
+ all_object_points = []
+ num_markers = 0
+ for marker in msg.markers:
+ image_points[marker.landmarkID.data].append(np.float32([marker.x, marker.y]))
+ object_points[marker.landmarkID.data].append(self.gate_markers[marker.landmarkID.data][marker.markerID.data])
+ all_image_points.append(np.float32([marker.x, marker.y]))
+ all_object_points.append(self.gate_markers[marker.landmarkID.data][marker.markerID.data])
+ num_markers += 1
+
+ p_results = []
+ q_results = []
+ for gate in image_points:
+ if len(image_points[gate]) == 4: # If all 4 points are in view, solve a relative pose to the gate
+ success, position, orientation = solve_pnp(object_points[gate], image_points[gate], self.last_position_ir, self.last_orientation)
+ if not self.pos_sanity(position):
+ continue
+ p_results.append(position)
+ q_results.append(orientation)
+
+ if len(p_results) == 0:
+ return
+ use_pos = None
+ use_q = None
+ max_dist = 1e9
+ for i in range(len(p_results)):
+ # If we have recent IR measurement, this one cannot be too far off
+ secs_elapsed = (rospy.Time.now() - self.prev_time).to_sec()
+ if secs_elapsed < 0.1 and np.linalg.norm(p_results[i] - self.last_position_ir) > 1.0:
+ continue
+ if np.linalg.norm(self.distance_to_trajectory(p_results[i])) < max_dist: # Choose the solution closest to the reference trajectory
+ use_pos = p_results[i]
+ use_q = q_results[i]
+ time_now = rospy.Time.now()
+ nsec_elapsed = (time_now - self.prev_time).to_nsec()
+ if nsec_elapsed > 0 and use_pos is not None:
+ true_rate = 1.0/nsec_elapsed * 1e9
+ xdot = true_rate * (use_pos - self.last_position_ir)
+ x = use_pos
+ q = use_q
+ print('update')
+ self.prev_time = time_now
+ self.prev_time_int = time_now
+ self.last_xdot = xdot
+ self.last_position_ir = x
+ self.last_position = x
+ self.prev_closest_landmark = self.closest_gate(x)
+ self.last_orientation = q
+
+
if __name__ == "__main__":
try:
- node = StateEstimatorNode()
+ rospy.init_node('ir_node')
+ node = IREstimator()
rate = rospy.Rate(RATE)
while not rospy.is_shutdown():
node.step()