Demo entry 1469742

dfa

   

Submitted by anonymous on Mar 18, 2015 at 23:46
Language: Matlab. Code size: 7.4 kB.

close all; clc; clear all;

%% load stereo parameters
addpath('estimateRigidTransform/estimateRigidTransform/');
load('CameraCalibration/stereoParams.mat');

% %% load the images 
load('../../cmu_16662_p2/sensor_data/hand_carry.mat');
right_image_names = strcat(['../../cmu_16662_p2/sensor_data/'],right_image_names);
left_image_names = strcat(['../../cmu_16662_p2/sensor_data/'],left_image_names);

%% simulation variables
start_image = 1;
step = 1;

%% calculate camera Matricies
IntrinsicMatrix = [164.2550 0 214.5240;
                   0 164.255 119.4333;
                   0 0 1];

baseline = .1621;

cameraMatrix1 = IntrinsicMatrix*[eye(3) zeros(3,1)]; 
cameraMatrix2 = IntrinsicMatrix*[eye(3), [baseline; 0; 0]];

cameraParams = cameraParameters('IntrinsicMatrix', IntrinsicMatrix');

%% initialize odometry to be at the origin (robot starts at origin)
pose(:,:,1) = eye(4);
T = [];
ind_pose = 1;

%% initialize previous image used for display
show = false;

%% RANSAC Parameters
ransac_points = 6; % must be > 5
num_iters = 100; % number of iterations
tol = 1.0;      % tolerance
max_inliers = -1;    % number of good valuestrue
inliers_index = [];

%% Step through the entire picture set
for im_num = 1:step:length(right_image_names)-step
    im_num
    %% images to use for this iteration 
    Ir1 = imread(right_image_names{im_num});
    Il1 = imread(left_image_names{im_num});
%     Ir2 = imread(right_image_names{im_num+step});
    Il2 = imread(left_image_names{im_num+step});

    Ir1 = im2double(Ir1);
    Il1 = im2double(Il1);
%     Ir2 = im2double(Ir2);
    Il2 = im2double(Il2);
    
    Ir1 = rgb2gray(Ir1);
    Il1 = rgb2gray(Il1);
    Il2 = rgb2gray(Il2);
    
    %% get points and features for right image
    points_r = detectSURFFeatures(Ir1);
    [features_r, valid_points_r] = extractFeatures(Ir1, points_r);

    %% get points and features for left image
    points_l1 = detectSURFFeatures(Il1);
    [features_l1, valid_points_l1] = extractFeatures(Il1, points_l1);
    
    %% get points and features for next left image
    points_l2 = detectSURFFeatures(Il2);
    [features_l2, valid_points_l2] = extractFeatures(Il2, points_l2);
    
    %% match features of the left and right image and get matched points
    indexPairs = matchFeatures(features_l1, features_r);
    matched_points_r = valid_points_r(indexPairs(:, 2), :);
    matched_points_l1 = valid_points_l1(indexPairs(:, 1), :);
    features_l1 = features_l1(indexPairs(:, 1), :);
        
    %% match features from two successive images to find point correspondence
    indexPairs = matchFeatures(features_l1, features_l2);
    matched_points_r = matched_points_r(indexPairs(:, 1), :); % previous image image point
    matched_points_l1 = matched_points_l1(indexPairs(:, 1), :); % previous image image points
    matched_points_l2 = valid_points_l2(indexPairs(:, 2), :); % current image image points
    
    %% get the world points of the current pair of images
    worldPoints = triangulateFeatures(matched_points_r.Location,matched_points_l1.Location,cameraMatrix2, cameraMatrix1);
%     worldPoints = [1 0 0; 0 1 0; 0 0 1] * worldPoints';
%     worldPoints = worldPoints';
%     worldPoints(1:10,:)
    
    %% check the world points 
    if show == true
        reprojectionError = checkWorldPoints(worldPoints, matched_points_l1.Location, cameraMatrix1, show);
        figure(3);
        plot3(worldPoints(:,1),worldPoints(:,2),worldPoints(:,3),'ro');
        zlabel('z'); ylabel('y');xlabel('x');
    end
    
    %% show current corresponding points between images
    if show == true
        figure(1);
        subplot(2,1,1);
        mp1 = matched_points_l1.Location;
        mp1(:,1) = mp1(:,1);
        mp2 = matched_points_l2.Location;
        mp2(:,1) = mp2(:,1);
        showMatchedFeatures(Il1, Il2, mp1,mp2);
    end
   
    %% re-initialize RANSAC PARAMS
    max_inliers = -1;
    Transformation = eye(4);
    inliers_index = [];
    inliers = [];
    
    invalid = find(isinf(worldPoints));
    
    if(length(invalid)>0)
        worldPoints
        pause
    end
    
    %% RANSAC   
    for i = 1:num_iters
        %% re initialize inliers
        inliers = [];
        
        %% randomly pick 3 world points
        index = randperm(length(worldPoints),ransac_points);
        world_pts = worldPoints(index,:);
        image_pts_l2 = matched_points_l2(index,:);
        
        %% calculate Rotation and Translation
        [rotationMatrix,translationVector] = extrinsics(double(image_pts_l2.Location),world_pts,cameraParams);
        translationVector = translationVector*sign(rotationMatrix(3,3));
        rotationMatrix = rotationMatrix*sign(rotationMatrix(3,3));
        H = [rotationMatrix', translationVector'; zeros(1,3), 1];
        
        %% minimize reprojection error on the second image (calc inliers)
        projectedPoints = H*[worldPoints, ones(size(worldPoints,1),1)]';
        projectedImagePoints = cameraMatrix1 * (projectedPoints(1:4,:)./repmat(projectedPoints(4,:),4,1));
        reprojectedPoints = projectedImagePoints(1:2,:)./repmat(projectedImagePoints(3,:),2,1);
        
        d = matched_points_l2.Location - reprojectedPoints';
        d = sqrt(d(:,1).^2 + d(:,2).^2);
        
%         figure(20);
%         plot(reprojectedPoints(1,:),reprojectedPoints(2,:),'ro');
%         hold on
%         plot(matched_points_l2.Location(:,1),matched_points_l2.Location(:,2),'b+');
%         pause
%         hold off
%         H
        
        inliers = abs(d) < tol;
        
        %% check if there are morre inliers
        if(sum(inliers) > max_inliers)
            %% calculate Rotation and translation between image frames (pose estimation
            Transformation = H;
            max_inliers = sum(inliers);
            inliers_index = []; inliers_index = inliers;
        end
    end
    
    %% recompute R and T on inliers
%     world_pts = worldPoints(inliers_index,:);
%     image_pts_l2 = matched_points_l2(inliers_index,:);
%     
%     [rotationMatrix,translationVector] = extrinsics(double(image_pts_l2.Location),world_pts,cameraParams);
%     translationVector = translationVector*sign(rotationMatrix(3,3));
%     rotationMatrix = rotationMatrix*sign(rotationMatrix(3,3));
%     Transformation = [rotationMatrix', translationVector'; zeros(1,3), 1];

    %% visualize corresponding point before and after outlier rejection
    if show == true
        subplot(2,1,2);
        inlier1 = matched_points_l1.Location;
        inlier1 = inlier1(inliers_index,:);
        inlier1(:,1) = inlier1(:,1);
        inlier2 = matched_points_l2.Location;
        inlier2 = inlier2(inliers_index,:);
        inlier2(:,1) = inlier2(:,1);
        showMatchedFeatures(Il1, Il2, inlier1, inlier2);
        hold off
        pause
    end
    
    %% adjust odometry for the robot
    pose(:,:,ind_pose+1) = Transformation * pose(:,:,ind_pose);   
    T(:,:,ind_pose) = Transformation;
    ind_pose = ind_pose + 1;
    
%     T(:,:,ind_pose-1)
%     sum(inliers_index)
%     pause
    figure(10)
    x = pose(1,4,:);
    z = pose(3,4,:);
    x = x(:);
    z = z(:);
    plot(x,z);
    figure(11)
    subplot(1,2,1)
    imshow(Il1)
    subplot(1,2,2)
    imshow(Il2)

end

%% replot the position
x = pose(1,4,:);
y = pose(2,4,:);
z = pose(3,4,:);
x = x(:);
y = y(:);
z = z(:);
% 
% figure();
% plot(x,y);
% hold off
        

This snippet took 0.01 seconds to highlight.

Back to the Entry List or Home.

Delete this entry (admin only).