44 #include <visp3/core/vpConfig.h>
46 #if defined(VISP_HAVE_MODULE_MBT)
48 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
49 #include <type_traits>
52 #include <visp3/core/vpIoTools.h>
53 #include <visp3/core/vpImageDraw.h>
54 #include <visp3/core/vpFont.h>
55 #include <visp3/io/vpParseArgv.h>
56 #include <visp3/io/vpImageIo.h>
57 #include <visp3/gui/vpDisplayX.h>
58 #include <visp3/gui/vpDisplayGDI.h>
59 #include <visp3/gui/vpDisplayOpenCV.h>
60 #include <visp3/gui/vpDisplayD3D.h>
61 #include <visp3/gui/vpDisplayGTK.h>
62 #include <visp3/mbt/vpMbGenericTracker.h>
64 #define GETOPTARGS "i:dsclt:e:DmCh"
68 void usage(
const char *name,
const char *badparam)
71 Regression test for vpGenericTracker.\n\
74 %s [-i <test image path>] [-c] [-d] [-s] [-h] [-l] \n\
75 [-t <tracker type>] [-e <last frame index>] [-D] [-m] [-C]\n", name);
79 -i <input image path> \n\
80 Set image input path.\n\
81 These images come from ViSP-images-x.y.z.tar.gz available \n\
82 on the ViSP website.\n\
83 Setting the VISP_INPUT_IMAGE_PATH environment\n\
84 variable produces the same behavior than using\n\
88 Turn off the display.\n\
91 If display is turn off, tracking results are saved in a video folder.\n\
94 Disable the mouse click. Useful to automate the \n\
95 execution of this program without human intervention.\n\
98 Set tracker type (<1 (Edge)>, <2 (KLT)>, <3 (both)>) for color sensor.\n\
101 Use the scanline for visibility tests.\n\
103 -e <last frame index>\n\
104 Specify the index of the last frame. Once reached, the tracking is stopped.\n\
110 Set a tracking mask.\n\
116 Print the help.\n\n");
119 fprintf(stdout,
"\nERROR: Bad parameter [%s]\n", badparam);
122 bool getOptions(
int argc,
const char **argv, std::string &ipath,
bool &click_allowed,
bool &display,
bool &save,
123 bool &useScanline,
int &trackerType,
int &lastFrame,
bool &use_depth,
bool &use_mask,
124 bool &use_color_image)
135 click_allowed =
false;
147 trackerType = atoi(optarg_);
150 lastFrame = atoi(optarg_);
159 use_color_image =
true;
162 usage(argv[0], NULL);
167 usage(argv[0], optarg_);
173 if ((c == 1) || (c == -1)) {
175 usage(argv[0], NULL);
176 std::cerr <<
"ERROR: " << std::endl;
177 std::cerr <<
" Bad argument " << optarg_ << std::endl << std::endl;
184 template <
typename Type>
185 bool read_data(
const std::string &input_directory,
int cpt,
const vpCameraParameters &cam_depth,
189 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
190 static_assert(std::is_same<Type, unsigned char>::value || std::is_same<Type, vpRGBa>::value,
191 "Template function supports only unsigned char and vpRGBa images!");
194 sprintf(buffer, std::string(input_directory +
"/Images/Image_%04d.pgm").c_str(), cpt);
195 std::string image_filename = buffer;
197 sprintf(buffer, std::string(input_directory +
"/Depth/Depth_%04d.bin").c_str(), cpt);
198 std::string depth_filename = buffer;
200 sprintf(buffer, std::string(input_directory +
"/CameraPose/Camera_%03d.txt").c_str(), cpt);
201 std::string pose_filename = buffer;
209 unsigned int depth_width = 0, depth_height = 0;
210 std::ifstream file_depth(depth_filename.c_str(), std::ios::in | std::ios::binary);
211 if (!file_depth.is_open())
216 I_depth.
resize(depth_height, depth_width);
217 pointcloud.resize(depth_height*depth_width);
219 const float depth_scale = 0.000030518f;
220 for (
unsigned int i = 0; i < I_depth.
getHeight(); i++) {
221 for (
unsigned int j = 0; j < I_depth.
getWidth(); j++) {
223 double x = 0.0, y = 0.0, Z = I_depth[i][j] * depth_scale;
229 pointcloud[i*I_depth.
getWidth()+j] = pt3d;
233 std::ifstream file_pose(pose_filename.c_str());
234 if (!file_pose.is_open()) {
238 for (
unsigned int i = 0; i < 4; i++) {
239 for (
unsigned int j = 0; j < 4; j++) {
240 file_pose >> cMo[i][j];
257 template <
typename Type>
258 bool run(
const std::string &input_directory,
259 bool opt_click_allowed,
bool opt_display,
bool useScanline,
int trackerType_image,
260 int opt_lastFrame,
bool use_depth,
bool use_mask,
bool save) {
261 #if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
262 static_assert(std::is_same<Type, unsigned char>::value || std::is_same<Type, vpRGBa>::value,
263 "Template function supports only unsigned char and vpRGBa images!");
266 #if defined VISP_HAVE_X11
268 #elif defined VISP_HAVE_GDI
270 #elif defined VISP_HAVE_OPENCV
272 #elif defined VISP_HAVE_D3D9
274 #elif defined VISP_HAVE_GTK
280 std::vector<int> tracker_type(2);
281 tracker_type[0] = trackerType_image;
284 #if defined(VISP_HAVE_PUGIXML)
285 tracker.loadConfigFile(input_directory +
"/Config/chateau.xml", input_directory +
"/Config/chateau_depth.xml");
291 tracker.setCameraParameters(cam_color, cam_depth);
303 tracker.setMovingEdge(me);
306 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
308 tracker.setKltMaskBorder(5);
317 tracker.setKltOpencv(klt);
322 tracker.setDepthNormalPclPlaneEstimationMethod(2);
323 tracker.setDepthNormalPclPlaneEstimationRansacMaxIter(200);
324 tracker.setDepthNormalPclPlaneEstimationRansacThreshold(0.001);
325 tracker.setDepthNormalSamplingStep(2, 2);
327 tracker.setDepthDenseSamplingStep(4, 4);
331 tracker.setNearClippingDistance(0.01);
332 tracker.setFarClippingDistance(2.0);
336 #ifdef VISP_HAVE_COIN3D
337 tracker.loadModel(input_directory +
"/Models/chateau.wrl", input_directory +
"/Models/chateau.cao");
339 tracker.loadModel(input_directory +
"/Models/chateau.cao", input_directory +
"/Models/chateau.cao");
350 tracker.loadModel(input_directory +
"/Models/cube.cao",
false, T);
352 tracker.getCameraParameters(cam_color, cam_depth);
353 tracker.setDisplayFeatures(
true);
354 tracker.setScanLineVisibilityTest(useScanline);
356 std::map<int, std::pair<double, double> > map_thresh;
358 #ifdef VISP_HAVE_COIN3D
360 = useScanline ? std::pair<double, double>(0.005, 3.9) : std::pair<double, double>(0.007, 2.9);
361 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
363 = useScanline ? std::pair<double, double>(0.006, 1.9) : std::pair<double, double>(0.005, 1.3);
365 = useScanline ? std::pair<double, double>(0.005, 3.2) : std::pair<double, double>(0.006, 2.8);
368 = useScanline ? std::pair<double, double>(0.003, 1.7) : std::pair<double, double>(0.002, 0.8);
369 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
371 = std::pair<double, double>(0.002, 0.3);
373 = useScanline ? std::pair<double, double>(0.002, 1.8) : std::pair<double, double>(0.002, 0.7);
377 = useScanline ? std::pair<double, double>(0.007, 2.3) : std::pair<double, double>(0.007, 2.1);
378 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
380 = useScanline ? std::pair<double, double>(0.006, 1.7) : std::pair<double, double>(0.005, 1.4);
382 = useScanline ? std::pair<double, double>(0.004, 1.2) : std::pair<double, double>(0.004, 1.0);
385 = useScanline ? std::pair<double, double>(0.002, 0.7) : std::pair<double, double>(0.001, 0.4);
386 #if defined(VISP_HAVE_MODULE_KLT) && (defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x020100))
388 = std::pair<double, double>(0.002, 0.3);
390 = useScanline ? std::pair<double, double>(0.001, 0.5) : std::pair<double, double>(0.001, 0.4);
397 std::vector<vpColVector> pointcloud;
399 if (!read_data(input_directory, cpt_frame, cam_depth, I, I_depth_raw, pointcloud, cMo_truth)) {
400 std::cerr <<
"Cannot read first frame!" << std::endl;
405 const double roi_step = 7.0;
406 const double roi_step2 = 6.0;
409 for (
unsigned int i = (
unsigned int) (I.
getRows()/roi_step); i < (
unsigned int) (I.
getRows()*roi_step2/roi_step); i++) {
410 for (
unsigned int j = (
unsigned int) (I.
getCols()/roi_step); j < (
unsigned int) (I.
getCols()*roi_step2/roi_step); j++) {
414 tracker.setMask(mask);
426 #ifdef VISP_HAVE_DISPLAY
427 display1.
init(I, 0, 0,
"Image");
433 depth_M_color[0][3] = -0.05;
434 tracker.setCameraTransformationMatrix(
"Camera2", depth_M_color);
435 tracker.initFromPose(I, cMo_truth);
438 bool click =
false, quit =
false;
439 std::vector<double> vec_err_t, vec_err_tu;
440 std::vector<double> time_vec;
441 while (read_data(input_directory, cpt_frame, cam_depth, I, I_depth_raw, pointcloud, cMo_truth) && !quit
442 && (opt_lastFrame > 0 ? (
int)cpt_frame <= opt_lastFrame :
true)) {
449 convert(I, resultsColor);
450 convert(I_depth, resultsDepth);
454 std::map<std::string, const vpImage<Type> *> mapOfImages;
455 mapOfImages[
"Camera1"] = &I;
456 std::map<std::string, const std::vector<vpColVector> *> mapOfPointclouds;
457 mapOfPointclouds[
"Camera2"] = &pointcloud;
458 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
460 mapOfWidths[
"Camera2"] = 0;
461 mapOfHeights[
"Camera2"] = 0;
463 mapOfWidths[
"Camera2"] = I_depth.
getWidth();
464 mapOfHeights[
"Camera2"] = I_depth.
getHeight();
467 tracker.track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights);
470 time_vec.push_back(t);
473 tracker.display(I, I_depth, cMo, depth_M_color*cMo, cam_color, cam_depth,
vpColor::red, 3);
477 std::stringstream ss;
478 ss <<
"Frame: " << cpt_frame;
481 ss <<
"Nb features: " << tracker.getError().getRows();
485 std::map<std::string, std::vector<std::vector<double> > > mapOfModels;
486 std::map<std::string, unsigned int> mapOfW;
489 std::map<std::string, unsigned int> mapOfH;
490 mapOfH[
"Camera1"] = I_depth.
getWidth();
492 std::map<std::string, vpHomogeneousMatrix> mapOfcMos;
493 mapOfcMos[
"Camera1"] = cMo;
494 mapOfcMos[
"Camera2"] = depth_M_color*cMo;
495 std::map<std::string, vpCameraParameters> mapOfCams;
496 mapOfCams[
"Camera1"] = cam_color;
497 mapOfCams[
"Camera2"] = cam_depth;
498 tracker.getModelForDisplay(mapOfModels, mapOfW, mapOfH, mapOfcMos, mapOfCams);
499 for (std::map<std::string, std::vector<std::vector<double> > >::const_iterator it = mapOfModels.begin();
500 it != mapOfModels.end(); ++it) {
501 for (
size_t i = 0; i < it->second.size(); i++) {
503 if (std::fabs(it->second[i][0]) <= std::numeric_limits<double>::epsilon()) {
511 std::map<std::string, std::vector<std::vector<double> > > mapOfFeatures;
512 tracker.getFeaturesForDisplay(mapOfFeatures);
513 for (std::map<std::string, std::vector<std::vector<double> > >::const_iterator it = mapOfFeatures.begin();
514 it != mapOfFeatures.end(); ++it) {
515 for (
size_t i = 0; i < it->second.size(); i++) {
516 if (std::fabs(it->second[i][0]) <= std::numeric_limits<double>::epsilon()) {
518 if (std::fabs(it->second[i][3]) <= std::numeric_limits<double>::epsilon()) {
520 }
else if (std::fabs(it->second[i][3] - 1) <= std::numeric_limits<double>::epsilon()) {
522 }
else if (std::fabs(it->second[i][3] - 2) <= std::numeric_limits<double>::epsilon()) {
524 }
else if (std::fabs(it->second[i][3] - 3) <= std::numeric_limits<double>::epsilon()) {
526 }
else if (std::fabs(it->second[i][3] - 4) <= std::numeric_limits<double>::epsilon()) {
531 }
else if (std::fabs(it->second[i][0] - 1) <= std::numeric_limits<double>::epsilon()) {
539 std::ostringstream oss;
540 oss <<
"Tracking time: " << t <<
" ms";
548 for (
unsigned int i = 0; i < 3; i++) {
549 t_est[i] = pose_est[i];
550 t_truth[i] = pose_truth[i];
551 tu_est[i] = pose_est[i+3];
552 tu_truth[i] = pose_truth[i+3];
555 vpColVector t_err = t_truth-t_est, tu_err = tu_truth-tu_est;
559 vec_err_t.push_back( t_err2 );
560 vec_err_tu.push_back( tu_err2 );
561 if ( !use_mask && (t_err2 > t_thresh || tu_err2 > tu_thresh) ) {
562 std::cerr <<
"Pose estimated exceeds the threshold (t_thresh = " << t_thresh <<
" ; tu_thresh = " << tu_thresh <<
")!" << std::endl;
563 std::cout <<
"t_err: " << t_err2 <<
" ; tu_err: " << tu_err2 << std::endl;
580 std::ostringstream oss;
581 oss <<
"results/image_%04d.png";
582 sprintf(buffer, oss.str().c_str(), cpt_frame);
585 results.insert(resultsDepth,
vpImagePoint(0, resultsColor.getWidth()));
590 if (opt_display && opt_click_allowed) {
611 if (!time_vec.empty())
615 if (!vec_err_t.empty())
616 std::cout <<
"Max translation error: " << *std::max_element(vec_err_t.begin(), vec_err_t.end()) << std::endl;
618 if (!vec_err_tu.empty())
619 std::cout <<
"Max thetau error: " << *std::max_element(vec_err_tu.begin(), vec_err_tu.end()) << std::endl;
621 #if defined(VISP_HAVE_COIN3D) && (COIN_MAJOR_VERSION >= 2)
630 int main(
int argc,
const char *argv[])
633 std::string env_ipath;
634 std::string opt_ipath =
"";
635 bool opt_click_allowed =
true;
636 bool opt_display =
true;
637 bool opt_save =
false;
638 bool useScanline =
false;
640 #if defined(__mips__) || defined(__mips) || defined(mips) || defined(__MIPS__)
642 int opt_lastFrame = 5;
644 int opt_lastFrame = -1;
646 bool use_depth =
false;
647 bool use_mask =
false;
648 bool use_color_image =
false;
655 if (!getOptions(argc, argv, opt_ipath, opt_click_allowed, opt_display, opt_save,
656 useScanline, trackerType_image, opt_lastFrame, use_depth,
657 use_mask, use_color_image)) {
661 std::cout <<
"trackerType_image: " << trackerType_image << std::endl;
662 std::cout <<
"useScanline: " << useScanline << std::endl;
663 std::cout <<
"use_depth: " << use_depth << std::endl;
664 std::cout <<
"use_mask: " << use_mask << std::endl;
665 std::cout <<
"use_color_image: " << use_color_image << std::endl;
666 #ifdef VISP_HAVE_COIN3D
667 std::cout <<
"COIN3D available." << std::endl;
670 #if !defined(VISP_HAVE_MODULE_KLT) || (!defined(VISP_HAVE_OPENCV) || (VISP_HAVE_OPENCV_VERSION < 0x020100))
671 if (trackerType_image & 2) {
672 std::cout <<
"KLT features cannot be used: ViSP is not built with "
673 "KLT module or OpenCV is not available.\nTest is not run."
680 if (opt_ipath.empty() && env_ipath.empty()) {
681 usage(argv[0], NULL);
682 std::cerr << std::endl <<
"ERROR:" << std::endl;
683 std::cerr <<
" Use -i <visp image path> option or set VISP_INPUT_IMAGE_PATH " << std::endl
684 <<
" environment variable to specify the location of the " << std::endl
685 <<
" image path where test images are located." << std::endl
691 std::string input_directory =
vpIoTools::createFilePath(!opt_ipath.empty() ? opt_ipath : env_ipath,
"mbt-depth/Castle-simu");
693 std::cerr <<
"ViSP-images does not contain the folder: " << input_directory <<
"!" << std::endl;
697 if (use_color_image) {
698 return run<vpRGBa>(input_directory, opt_click_allowed, opt_display, useScanline,
699 trackerType_image, opt_lastFrame, use_depth, use_mask, opt_save);
701 return run<unsigned char>(input_directory, opt_click_allowed, opt_display, useScanline,
702 trackerType_image, opt_lastFrame, use_depth, use_mask, opt_save);
705 std::cout <<
"Catch an exception: " << e << std::endl;
711 std::cout <<
"Enable MBT module (VISP_HAVE_MODULE_MBT) to launch this test." << std::endl;