-
Notifications
You must be signed in to change notification settings - Fork 1
/
KLT_SfM_Medusa.m
149 lines (133 loc) · 4.98 KB
/
KLT_SfM_Medusa.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
% Read a video frame.
close all; clear all;clc;
videoFileReader = vision.VideoFileReader('Dataset/medusa.mp4');
% Create a point tracker and enable the bidirectional error constraint to
% make it more robust in the presence of noise and clutter.
pointTracker = vision.PointTracker('MaxBidirectionalError', 2);
% Make a copy of the points to be used for computing the geometric
% transformation between the points in the previous and the current frames
startFrame = 140;endFrame = 180;tFrames = 300;
noFrames = 1;detectFlag = 0;
while noFrames <= tFrames %~isDone(videoFileReader)
videoFrame = step(videoFileReader);
% videoFrame = imresize(videoFrame, [size(videoFrame,1) size(videoFrame,1)]);
% figure(1);imshow(videoFrame); drawnow;title('Video Frame');
if(noFrames >= startFrame && noFrames <= endFrame)
% get the next frame
if (detectFlag == 0)
% figure;imshow();
figure(1);imshow(videoFrame); drawnow;title('Video Frame');
ROI = getrect;
% Detect feature points in the frame
ROIFrame = imcrop(videoFrame,ROI);
% ROIFrame = imresize( ROIFrame, [320 480]);
ROIFrame = imresize( ROIFrame, [size(videoFrame,1) size(videoFrame,1)]);
% videoFrame = ROIFrame;
points = detectMinEigenFeatures(rgb2gray(ROIFrame),'MinQuality',0.002); %, 'ROI', ROI);
% points = detectSURFFeatures(rgb2gray(ROIFrame)); %, 'ROI', ROI);
points = points.Location;
initialize(pointTracker, points, ROIFrame);
oldPoints = points;
featurePointsX = zeros(size(points,1),(endFrame-startFrame));
featurePointsY = zeros(size(points,1),(endFrame-startFrame));
ROIFrame = insertMarker(ROIFrame, points, '+', ...
'Color', 'white');
figure;imshow(ROIFrame);drawnow;
% hold on; plot(points(:,1),points(:,2),'+');
detectFlag = 1;
end
if detectFlag == 1
% Track the points. Note that some points may be lost.
ROIFrame = imcrop(videoFrame,ROI);
% ROIFrame = imresize( ROIFrame, [320 480]);
ROIFrame = imresize( ROIFrame,[size(videoFrame,1) size(videoFrame,1)]);
videoFrame = ROIFrame;
[points, isFound] = step(pointTracker, videoFrame);
% [points, isFound] = step(pointTracker, ROIFrame);
visiblePoints = points(isFound, :);
oldInliers = oldPoints(isFound, :);
if size(visiblePoints, 1) >= 2 % need at least 2 points
% Display tracked points
videoFrame = insertMarker(videoFrame, visiblePoints, '+', ...
'Color', 'white');
% Reset the points
%oldPoints = visiblePoints;
%setPoints(pointTracker, oldPoints);
featurePointsX(isFound,(noFrames-startFrame)+1) = points(isFound,1);
featurePointsY(isFound,(noFrames-startFrame)+1) = points(isFound,2);
size(visiblePoints)
end
figure(2);imshow(videoFrame);drawnow;
% plot(visiblePoints(1,:),visiblePoints(2,:),'*');title('Detected Features');
end
else
% figure(1);imshow(videoFrame); drawnow;title('Video Frame');
end
noFrames = noFrames + 1
% Display the annotated video frame using the video player object
% step(videoPlayer, videoFrame);
end
featureU = [];
featureV = [];
for i = 1:size(points,1)
if nnz(featurePointsX(i,:)) == (endFrame - startFrame)+1 %tFrames
featureU = [featureU; featurePointsX(i,:)];
i
end
if nnz(featurePointsY(i,:)) == (endFrame - startFrame)+1 % tFrames
featureV = [featureV; featurePointsY(i,:)];
end
i = i+1;
end
meanU = mean(featureU);
meanV = mean(featureV);
for i = 1: (endFrame - startFrame) %tFrames-1
objFeatureU(:,i) = featureU(:,i) - meanU(1,i);
objFeatureV(:,i) = featureV(:,i) - meanV(1,i);
end
objW = [ objFeatureU'; objFeatureV'];
[O1,S,O2T] = svd(objW);
O2 = O2T';
O1P = O1(:,1:3);
O2P = O2(1:3,:);
SP = S(1:3,1:3);
objR = O1P*(SP^0.5);
objS = (SP^0.5)*O2P;
% Step 3: Metric Constraints
Q = metricConstraints(objR);
% (iv) Find R and S [3.14]
R = objR * Q;
S = inv(Q) * objS;
% (v) Align the first camera reference system with the world reference
% system
F = endFrame - startFrame; %noFrames;
i1 = R(1,:)';
i1 = i1 / norm(i1);
j1 = R(F+1,:)';
j1 = j1 / norm(j1);
k1 = cross(i1, j1);
k1 = k1 / norm(k1);
R0 = [i1 j1 k1];
R = R * R0;
S = inv(R0) * S;
% Display Shape
figure; plot3(S(1, :), S(2, :), S(3, :), '*');
xlin = linspace(min(S(1,:)),max(S(1,:)),500);
ylin = linspace(min(S(2,:)),max(S(2,:)),500);
[X,Y] = meshgrid(xlin,ylin);
Z = griddata(S(1,:),S(2,:),S(3,:),X,Y,'cubic');
mesh(X,Y,Z);
axis tight; hold on;
plot3(S(1,:),S(2,:),S(3,:),'.','MarkerSize',15);
% plot3(S(1, :), S(3, :), S(2, :), '*');
%
% figure; plot3(S(1, :), S(3, :), S(2, :), '*');
% figure; plot3(S(2, :), S(1, :), S(3, :), '*');
% figure; plot3(S(2, :), S(3, :), S(2, :), '*');
% figure; plot3(S(3, :), S(1, :), S(2, :), '*');
% figure; plot3(S(3, :), S(2, :), S(1, :), '*');
%
% % Clean up
% release(videoFileReader);
% release(videoPlayer);
release(pointTracker);