Last active
February 27, 2018 15:03
-
-
Save iKrishneel/530d070fa467505a1d6d3d044286e79c to your computer and use it in GitHub Desktop.
Pseudo-script outlining operations required to obtain the x,y position a robot
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
# -*- coding: utf-8 -*- | |
# (C) 02-27-2018 by Krishneel | |
import os | |
import sys | |
import math | |
import random | |
import time | |
import numpy as np | |
import cv2 as cv | |
from sklearn.neighbors import KDTree | |
def info(): | |
print ("======") | |
print ('Pseudo-script outlining operations required to obtain the x,y position a robot') | |
class Odometry(object): | |
def __init__(self, focal_lenght, center, im_size, wheel_rad, num_wheels): | |
self.__focal_lenght = focal_lenght | |
self.__center = center | |
self.__im_size = im_size | |
self.__wheel_radius = wheel_rad | |
self.__num_wheels = num_wheels | |
self.__prev_pts = None | |
self.__kdtree = None | |
self.__rotation = np.identity(3, dtype=np.float32) | |
self.__translation = np.array([[1, 0, 0], [0, 1, 0]], dtype=np.float32) | |
##! assuming z in camera link is forward facing | |
angle = math.pi / 2.0 | |
self.__cam2base = np.array([[1, 0, 0], \ | |
[0, np.cos(angle), -np.sin(angle)], \ | |
[0, np.sin(angle), np.cos(angle)]], dtype=np.float32) | |
""" | |
function to return the position | |
""" | |
def get_position(self, curr_pts): | |
if self.__prev_pts is None: | |
self.__prev_pts = curr_pts | |
return | |
current_time = int(round(time.time() * 1000)) | |
##! distance travelled | |
distance_traveled = self.get_distance_traveled(rotations) | |
print 'distance traveled: ', distance_traveled | |
##! find the correspondence between image (t-1, t) | |
match_points1, match_points2 = self.points_correspondence(prev_pts, curr_pts, thresh) | |
##! estimate the pose between images (t-1, t) | |
rot, trans = self.get_pose(match_points1, match_points2) | |
##! trajactory | |
self.__translation = self.__translation + self.__rotation.dot(trans) | |
self.__rotation = rot.dot(self.__rotation) | |
##! transform the rotation from camera link to the robot base link | |
##! in the base_link z-axis is upwards | |
transformed_rot = rot.dot(self.__cam2base) | |
##! get the yaw angles (ez) | |
ex, ey, ez = self.convertRotationMatrixToEulerAngles(transformed_rot) | |
x = distance_traveled * np.sin(ez) | |
y = distance_traveled * np.cos(ez) | |
##! copy current to previous | |
self.__prev_pts = curr_pts | |
return (x, y) | |
""" | |
function to return the distance travelled by the rover | |
distance is averaged over all the wheels | |
""" | |
def get_distance_traveled(self, rotations): | |
distance = 0.0 | |
for w in xrange(0, self.__num_wheels, 1): | |
distance += (2 * math.pi * self.__wheel_radius) | |
return distance / float(self.__num_wheels) | |
""" | |
function to find correspondance between points in prev and current images | |
""" | |
def points_correspondence(self, prev_pts, curr_pts, thresh = 5): | |
#! first find the consistant points in two images | |
#! use icp to find points forming similar geometrical layout | |
self.__kdtree = KDTree(curr_pts, leaf_size = 10, metric='minkowski') | |
trans = iterative_matching(prev_pts, curr_pts, tolerance = 1e-6) | |
#! transform prev_pts using [trans|rot] matrix | |
trans_prev_pts = np.ndarray((prev_pts.shape[0], prev_pts.shape[1]), dtype = np.float32) | |
for index, point in enumerate(prev_pts): | |
x, y = point | |
trans_prev_pts[index] = [trans[0, 0] * x + trans[0, 1] * y + trans[0, 2], \ | |
trans[1, 0] * x + trans[1, 1] * y + trans[1, 2]] | |
#! find the correspondence using nearest neighbor search | |
distances, indices = self.__kdtree.query(trans_prev_pts, k=1, return_distance=True) | |
match_points1 = [] | |
match_points2 = [] | |
for index, (dist, idx) in enumerate(zip(distances, indices)): | |
if (dist[0] < thresh): | |
match_points1.append(trans_prev_pts[index]) | |
match_points2.append(idx[0]) | |
#! -> RANSAC(match_points1, match_points2) remove outliers? | |
return np.array(match_points1), np.array(match_points2) | |
""" | |
function to compute the pose using image info | |
""" | |
def get_pose(self, match_points1, match_points2): | |
if match_points1.shape[0] != match_points2.shape[0] and \ | |
len(match_points1.shape[0]) < 5: ##! use 5 point algorithm | |
print 'get_pose [ERROR]: input array size are not equal' | |
return | |
#! using opencv's 5point alg | |
ess_mat, mask = cv.findEssentialMat(match_points2, match_points1, self.__focal_lenght, \ | |
self.__center, cv.RANSAC) | |
#! decompose essential matrix into [R|T] using SVD | |
pts, rot, trans, mask = cv.recoverPose(ess_mat, match_points2, match_points1, \ | |
self.__focal_lenght, self.__center) | |
return rot, trans | |
""" | |
function to match points in prev image on the the current image | |
- same idea as ICP where I assume that the geometrical context of the scene defined by | |
the feature point does not change | |
""" | |
def iterative_matching(self, prev_pts, curr_pts, max_iter = 50, rate = 0.9): | |
trans_mat = np.array([[np.cos(0), -np.sin(0), 0], [np.sin(0), np.cos(0), 0], [0, 0, 1]]) | |
prev_pts = cv.transform(prev_pts, trans_mat) | |
max_dist = sys.maxint | |
ratio = max(np.max(curr_pts[:, 0]) - np.min(curr_pts[:, 0]), \ | |
np.max(curr_pts[:, 1]) - np.min(curr_pts[:, 1])) | |
for i in xrange(0, max_iter, 1): | |
distances, indices = kdtree.query(prev_pts, k=1, return_distance=True) | |
dist = [] | |
src_points = [] | |
match_points = [] | |
for index, (d, idx) in enumerate(zip(distances, indices)): | |
if d[0] < max_dist * rate: | |
dist.append(d[0]) | |
pt = src[0][idx][0] | |
src_points.append(prev_pts[index]) | |
match_points.append(pt) | |
match_points = np.array(match_points) | |
src_points = np.array(src_points) | |
if src_points.shape[0] > 5 and match_points.shape[0] > 5: | |
rigid_trans = cv2.estimateRigidTransform(src_points, match_points, True) | |
if rigid_trans is not None: | |
prev_pts = cv2.transform(prev_pts, rigid_trans) | |
trans_mat = np.dot( | |
np.vstack((rigid_trans, [0,0,1])), \ | |
trans_mat) | |
if self.is_converge(rigid_trans, ratio): | |
break | |
max_dist = np.max(dist) | |
return trans_mat | |
def is_converge(self, rigid_trans, scale, rate = 1e-5): | |
delta_angle = rate | |
delta_scale = scale * rate | |
min_cos = 1 - delta_angle | |
max_cos = 1 + delta_angle | |
min_sin = -delta_angle | |
max_sin = delta_angle | |
min_move = -delta_scale | |
max_move = delta_scale | |
return min_cos < Tr[0, 0] and Tr[0, 0] < max_cos and \ | |
min_cos < Tr[1, 1] and Tr[1, 1] < max_cos and \ | |
min_sin < -Tr[1, 0] and -Tr[1, 0] < max_sin and \ | |
min_sin < Tr[0, 1] and Tr[0, 1] < max_sin and \ | |
min_move < Tr[0, 2] and Tr[0, 2] < max_move and \ | |
min_move < Tr[1, 2] and Tr[1, 2] < max_move | |
""" | |
function to convert rotation matrix to axis angles | |
""" | |
def convertRotationMatrixToEulerAngles(rot_mat) : | |
if not self.valid_rotation_matrix(rot_mat): | |
return | |
x = math.atan2(rot_mat[2,1] , rot_mat[2,2]) | |
y = math.atan2(-rot_mat[2,0], sy) | |
z = math.atan2(rot_mat[1,0], rot_mat[0,0]) | |
return np.array([x, y, z]) | |
def valid_rotation_matrix(self, rotation_matrix, thresh = 0.000001): | |
val = np.dot(np.transpose(rotation_matrix), rotation_matrix) | |
dist = np.linalg.norm(val - np.identity(3, dtype = np.float32)) | |
return dist < thresh | |
def main(argv): | |
#! inputs <- (current time, rotations, radius, focal_lenght, curr_points) | |
rotations = 1 | |
radius = 0.30 | |
focal_lenght = 18.0 | |
im_size = (500, 500) | |
center = (im_size[0]/2, im_size[1]/2) # principle point of focus | |
num_wheels = 4 | |
odom = Odometry(focal_lenght, center, im_size, radius, num_wheels) | |
##! current keypoints (star, etc) | |
curr_pts = np.random.randint(im_size[0], size=(100, 2)) | |
while True: | |
##! get the postion of the robot | |
x, y = odom.get_position(curr_pts) | |
##! just some random points | |
r = random.randint(0, 10) | |
curr_pts += r if random.randint(0 ,1) \ | |
else -r | |
if __name__ == "__main__": | |
print info() | |
main(sys.argv) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment