1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
|
#pragma once
#include "window_pair.h"
namespace MopedNS {
class SFM_TRACKING_DISPLAY:public MopedAlg {
int display;
IplImage *prevprev;
IplImage *prev;
public:
SFM_TRACKING_DISPLAY(int display)
: display (display),
prevprev (NULL),
prev (NULL)
{}
void getConfig (map<string,string> &config) const {
GET_CONFIG(display );
}
void setConfig (map<string,string> &config) {
SET_CONFIG(display);
}
void processOne (FrameData &frameData, int i) {
Image *image = frameData.images[i].get ();
IplImage* img0 = cvCreateImage (cvSize (image->width,
image->height),
IPL_DEPTH_8U, 3);
for (int y = 0; y < image->height; y++) {
for (int x = 0; x < image->width; x++) {
img0->imageData[y*img0->widthStep+3*x + 0] = image->data[y*image->width + x];
img0->imageData[y*img0->widthStep+3*x + 1] = image->data[y*image->width + x];
img0->imageData[y*img0->widthStep+3*x + 2] = image->data[y*image->width + x];
}
}
IplImage* img = cvCreateImage (cvSize (image->width,
image->height),
IPL_DEPTH_8U, 1);
cvCvtColor (img0, img, CV_BGR2GRAY);
if (!prev)
{
prev = img;
return;
}
// Show the frames (with optional annotations)
WindowPair window_pair(prev, img, "Tracking");
vector<KeyPoint> & key_points = frameData.trackedFeatures[i];
vector<int> & old_points_indices = frameData.trackingPrevIndices[i];
vector<Point2f> & new_points = frameData.trackingNewPoints[i];
// Draw these keypoints
for (int i = 0; i < (int) old_points_indices.size (); ++i){
window_pair.DrawArrow (key_points[old_points_indices[i]].pt, new_points[i], CV_RGB(255,0,0));
}
prevprev = prev;
prev = img;
window_pair.Show();
}
void process(FrameData &frameData) {
if (display < 1) return;
clog << "FEATURES TRACKED: ";
foreach( tf, frameData.trackedFeatures )
clog << tf.first << " : " << tf.second.size() << " ";
clog << endl;
if (display < 2) return;
for( int i=0; i<(int)frameData.images.size(); i++) {
processOne (frameData, i);
/*
string windowName = _stepName + " #" + toString(i) + ":" + frameData.images[i]->name;
cvNamedWindow( windowName.c_str(), CV_WINDOW_AUTOSIZE);
IplImage* img = cvCreateImage(cvSize(frameData.images[i]->width,frameData.images[i]->height), IPL_DEPTH_8U, 3);
for (int y = 0; y < frameData.images[i]->height; y++) {
for (int x = 0; x < frameData.images[i]->width; x++) {
img->imageData[y*img->widthStep+3*x + 0] = frameData.images[i]->data[y*frameData.images[i]->width + x];
img->imageData[y*img->widthStep+3*x + 1] = frameData.images[i]->data[y*frameData.images[i]->width + x];
img->imageData[y*img->widthStep+3*x + 2] = frameData.images[i]->data[y*frameData.images[i]->width + x];
}
}
foreach( featureKind, frameData.detectedFeatures ) {
int objectHash = 0;
for(unsigned int x=0; x<featureKind.first.size(); x++) objectHash = objectHash ^ featureKind.first[x];
CvScalar color = objectColors[objectHash % 256];
foreach( corresp, featureKind.second ) {
if( corresp.imageIdx==i )
cvCircle(img, cvPoint( corresp.coord2D[0], corresp.coord2D[1]), 2, color , CV_FILLED, CV_AA );
}
}
cvShowImage (windowName.c_str(), img);
cvReleaseImage(&img);
}
cvWaitKey( 10 );
*/
}
}
};
};
|