Visual Servoing Platform  version 3.2.0
tutorial-mb-generic-tracker-live.cpp
#include <visp3/core/vpConfig.h>
#ifdef VISP_HAVE_MODULE_SENSOR
#include <visp3/sensor/vpV4l2Grabber.h>
#include <visp3/sensor/vp1394CMUGrabber.h>
#include <visp3/sensor/vp1394TwoGrabber.h>
#include <visp3/sensor/vpFlyCaptureGrabber.h>
#include <visp3/sensor/vpRealSense2.h>
#endif
#include <visp3/core/vpIoTools.h>
#include <visp3/core/vpXmlParserCamera.h>
#include <visp3/gui/vpDisplayGDI.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/vision/vpKeyPoint.h>
#include <visp3/mbt/vpMbGenericTracker.h>
//#undef VISP_HAVE_V4L2
//#undef VISP_HAVE_DC1394
//#undef VISP_HAVE_CMU1394
//#undef VISP_HAVE_FLYCAPTURE
//#undef VISP_HAVE_REALSENSE2
//#undef VISP_HAVE_OPENCV
int main(int argc, char **argv)
{
#if defined(VISP_HAVE_OPENCV) && \
(defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || (VISP_HAVE_OPENCV_VERSION >= 0x020100) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2) )
try {
std::string opt_modelname = "teabox";
int opt_tracker = 2;
int opt_device = 0; // For OpenCV and V4l2 grabber to set the camera device
double opt_proj_error_threshold = 20.;
bool opt_use_ogre = false;
bool opt_use_scanline = false;
bool opt_display_projection_error = false;
bool opt_learn = false;
bool opt_auto_init = false;
std::string opt_learning_data = "learning/data-learned.bin";
std::string opt_intrinsic_file = "";
std::string opt_camera_name = "";
for (int i = 0; i < argc; i++) {
if (std::string(argv[i]) == "--model") {
opt_modelname = std::string(argv[i + 1]);
}
else if (std::string(argv[i]) == "--tracker") {
opt_tracker = atoi(argv[i + 1]);
}
else if (std::string(argv[i]) == "--camera_device" && i + 1 < argc) {
opt_device = atoi(argv[i + 1]);
}
else if (std::string(argv[i]) == "--max_proj_error") {
opt_proj_error_threshold = atof(argv[i + 1]);
} else if (std::string(argv[i]) == "--use_ogre") {
opt_use_ogre = true;
} else if (std::string(argv[i]) == "--use_scanline") {
opt_use_scanline = true;
} else if (std::string(argv[i]) == "--learn") {
opt_learn = true;
} else if (std::string(argv[i]) == "--learning_data" && i+1 < argc) {
opt_learning_data = argv[i+1];
} else if (std::string(argv[i]) == "--auto_init") {
opt_auto_init = true;
} else if (std::string(argv[i]) == "--display_proj_error") {
opt_display_projection_error = true;
} else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
opt_intrinsic_file = std::string(argv[i + 1]);
} else if (std::string(argv[i]) == "--camera_name" && i + 1 < argc) {
opt_camera_name = std::string(argv[i + 1]);
}
else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
std::cout << "\nUsage: " << argv[0]
<< " [--camera_device <camera device> (default: 0)]"
<< " [--intrinsic <intrinsic file> (default: empty)]"
<< " [--camera_name <camera name>] (default: empty)"
<< " [--model <model name> (default: teabox)]"
<< " [--tracker <0=egde|1=keypoint|2=hybrid> (default: 2)]"
<< " [--use_ogre] [--use_scanline]"
<< " [--max_proj_error <allowed projection error> (default: 20)]"
<< " [--learn] [--auto_init] [--learning_data <data-learned.bin> (default: learning/data-learned.bin)]"
<< " [--display_proj_error]"
<< " [--help] [-h]\n"
<< std::endl;
return 0;
}
}
std::string parentname = vpIoTools::getParent(opt_modelname);
std::string objectname = vpIoTools::getNameWE(opt_modelname);
if (!parentname.empty())
objectname = parentname + "/" + objectname;
std::cout << "Tracker requested config files: " << objectname << ".[init, cao]" << std::endl;
std::cout << "Tracker optional config files: " << objectname << ".[ppm]" << std::endl;
std::cout << "Tracked features: " << std::endl;
std::cout << " Use edges : " << (opt_tracker == 0 || opt_tracker == 2) << std::endl;
std::cout << " Use klt : " << (opt_tracker == 1 || opt_tracker == 2) << std::endl;
std::cout << "Tracker options: " << std::endl;
std::cout << " Use ogre : " << opt_use_ogre << std::endl;
std::cout << " Use scanline: " << opt_use_scanline << std::endl;
std::cout << " Proj. error : " << opt_proj_error_threshold << std::endl;
std::cout << " Display proj. error: " << opt_display_projection_error << std::endl;
std::cout << "Config files: " << std::endl;
std::cout << " Config file: " << "\"" << objectname + ".xml" << "\"" << std::endl;
std::cout << " Model file : " << "\"" << objectname + ".cao" << "\"" << std::endl;
std::cout << " Init file : " << "\"" << objectname + ".init" << "\"" << std::endl;
std::cout << "Learning options : " << std::endl;
std::cout << " Learn : " << opt_learn << std::endl;
std::cout << " Auto init : " << opt_auto_init << std::endl;
std::cout << " Learning data: " << opt_learning_data << std::endl;
cam.initPersProjWithoutDistortion(839, 839, 325, 243);
#ifdef VISP_HAVE_XML2
if (!opt_intrinsic_file.empty() && !opt_camera_name.empty())
parser.parse(cam, opt_intrinsic_file, opt_camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
#endif
#if defined(VISP_HAVE_V4L2)
std::ostringstream device;
device << "/dev/video" << opt_device;
std::cout << "Use Video 4 Linux grabber on device " << device.str() << std::endl;
g.setDevice(device.str());
g.setScale(1);
g.open(I);
#elif defined(VISP_HAVE_DC1394)
(void)opt_device; // To avoid non used warning
std::cout << "Use DC1394 grabber" << std::endl;
g.open(I);
#elif defined(VISP_HAVE_CMU1394)
(void)opt_device; // To avoid non used warning
std::cout << "Use CMU1394 grabber" << std::endl;
g.open(I);
#elif defined(VISP_HAVE_FLYCAPTURE)
(void)opt_device; // To avoid non used warning
std::cout << "Use FlyCapture grabber" << std::endl;
g.open(I);
#elif defined(VISP_HAVE_REALSENSE2)
(void)opt_device; // To avoid non used warning
std::cout << "Use Realsense 2 grabber" << std::endl;
rs2::config config;
config.disable_stream(RS2_STREAM_DEPTH);
config.disable_stream(RS2_STREAM_INFRARED);
config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
g.open(config);
g.acquire(I);
std::cout << "Read camera parameters from Realsense device" << std::endl;
#elif defined(VISP_HAVE_OPENCV)
std::cout << "Use OpenCV grabber on device " << opt_device << std::endl;
cv::VideoCapture g(opt_device); // Open the default camera
if (!g.isOpened()) { // Check if we succeeded
std::cout << "Failed to open the camera" << std::endl;
return -1;
}
cv::Mat frame;
g >> frame; // get a new frame from camera
#endif
vpDisplay *display = NULL;
#if defined(VISP_HAVE_X11)
display = new vpDisplayX;
#elif defined(VISP_HAVE_GDI)
display = new vpDisplayGDI;
#else
display = new vpDisplayOpenCV;
#endif
display->init(I, 100, 100, "Model-based tracker");
while (true) {
#if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)
g.acquire(I);
#elif defined(VISP_HAVE_OPENCV)
g >> frame;
#endif
vpDisplay::displayText(I, 20, 20, "Click when ready.", vpColor::red);
if (vpDisplay::getClick(I, false)) {
break;
}
}
tracker.setProjectionErrorComputation(true); // To detect tracking failure
if (opt_tracker == 0)
#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV)
else if (opt_tracker == 1)
else
#else
else {
# if !defined(VISP_HAVE_MODULE_KLT)
std::cout << "klt and hybrid model-based tracker are not available since visp_klt module is not available. "
"In CMakeGUI turn visp_klt module ON, configure and build ViSP again."
<< std::endl;
# else
std::cout << "Hybrid tracking is impossible since OpenCV is not enabled. "
<< "Install OpenCV, configure and build ViSP again to run this tutorial."
<< std::endl;
# endif
return EXIT_SUCCESS;
}
#endif
bool usexml = false;
#ifdef VISP_HAVE_XML2
if (vpIoTools::checkFilename(objectname + ".xml")) {
tracker.loadConfigFile(objectname + ".xml");
usexml = true;
}
#endif
if (!usexml) {
if (opt_tracker == 0 || opt_tracker == 2) {
vpMe me;
me.setMaskSize(5);
me.setMaskNumber(180);
me.setRange(8);
me.setThreshold(10000);
me.setMu1(0.5);
me.setMu2(0.5);
tracker.setMovingEdge(me);
}
#ifdef VISP_HAVE_MODULE_KLT
if (opt_tracker == 1 || opt_tracker == 2) {
vpKltOpencv klt_settings;
klt_settings.setMaxFeatures(300);
klt_settings.setWindowSize(5);
klt_settings.setQuality(0.015);
klt_settings.setMinDistance(8);
klt_settings.setHarrisFreeParameter(0.01);
klt_settings.setBlockSize(3);
klt_settings.setPyramidLevels(3);
tracker.setKltOpencv(klt_settings);
tracker.setKltMaskBorder(5);
}
#endif
}
tracker.setCameraParameters(cam);
tracker.loadModel(objectname + ".cao");
tracker.setDisplayFeatures(true);
tracker.setOgreVisibilityTest(opt_use_ogre);
tracker.setScanLineVisibilityTest(opt_use_scanline);
tracker.setProjectionErrorDisplay(opt_display_projection_error);
#if (defined(VISP_HAVE_OPENCV_NONFREE) || defined(VISP_HAVE_OPENCV_XFEATURES2D))
std::string detectorName = "SIFT";
std::string extractorName = "SIFT";
std::string matcherName = "BruteForce";
#else
std::string detectorName = "FAST";
std::string extractorName = "ORB";
std::string matcherName = "BruteForce-Hamming";
#endif
vpKeyPoint keypoint;
if (opt_learn || opt_auto_init) {
keypoint.setDetector(detectorName);
keypoint.setExtractor(extractorName);
keypoint.setMatcher(matcherName);
#if !(defined(VISP_HAVE_OPENCV_NONFREE) || defined(VISP_HAVE_OPENCV_XFEATURES2D))
# if (VISP_HAVE_OPENCV_VERSION < 0x030000)
keypoint.setDetectorParameter("ORB", "nLevels", 1);
# else
cv::Ptr<cv::ORB> orb_detector = keypoint.getDetector("ORB").dynamicCast<cv::ORB>();
if (orb_detector) {
orb_detector->setNLevels(1);
}
# endif
#endif
}
if (opt_auto_init) {
if (!vpIoTools::checkFilename(opt_learning_data)) {
std::cout << "Cannot enable auto detection. Learning file \"" << opt_learning_data << "\" doesn't exist" << std::endl;
return EXIT_FAILURE;
}
keypoint.loadLearningData(opt_learning_data, true);
}
else {
tracker.initClick(I, objectname + ".init", true);
}
bool learn_position = false;
bool run_auto_init = false;
if (opt_auto_init) {
run_auto_init = true;
}
//To be able to display keypoints matching with test-detection-rs2
int learn_id = 1;
bool quit = false;
while (!quit) {
double t_begin = vpTime::measureTimeMs();
bool tracking_failed = false;
#if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)
g.acquire(I);
#elif defined(VISP_HAVE_OPENCV)
g >> frame;
#endif
// Run auto initialization from learned data
if (run_auto_init) {
if (keypoint.matchPoint(I, cam, cMo)) {
std::cout << "Auto init succeed" << std::endl;
tracker.initFromPose(I, cMo);
} else {
continue;
}
}
// Run the tracker
try {
if (run_auto_init) {
// Turn display features off just after auto init to not display wrong moving-edge if the tracker fails
tracker.setDisplayFeatures(false);
run_auto_init = false;
}
tracker.track(I);
} catch (const vpException &e) {
std::cout << "Tracker exception: " << e.getStringMessage() << std::endl;
tracking_failed = true;
if (opt_auto_init) {
std::cout << "Tracker needs to restart (tracking exception)" << std::endl;
run_auto_init = true;
}
}
if (! tracking_failed) {
double proj_error = 0;
// Check tracking errors
proj_error = tracker.getProjectionError();
}
else {
tracker.getPose(cMo);
tracker.getCameraParameters(cam);
proj_error = tracker.computeCurrentProjectionError(I, cMo, cam);
}
if (proj_error > opt_proj_error_threshold) {
std::cout << "Tracker needs to restart (projection error detected: " << proj_error << ")" << std::endl;
if (opt_auto_init) {
run_auto_init = true;
}
tracking_failed = true;
}
}
if (! tracking_failed) {
tracker.setDisplayFeatures(true);
tracker.getPose(cMo);
tracker.getCameraParameters(cam);
tracker.display(I, cMo, cam, vpColor::green, 2, false);
vpDisplay::displayFrame(I, cMo, cam, 0.025, vpColor::none, 3);
{ // Display estimated pose in [m] and [deg]
vpPoseVector pose(cMo);
std::stringstream ss;
ss << "Translation: " << std::setprecision(5) << pose[0] << " " << pose[1] << " " << pose[2] << " [m]";
vpDisplay::displayText(I, 80, 20, ss.str(), vpColor::green);
ss.str(""); // erase ss
ss << "Rotation tu: " << std::setprecision(4) << vpMath::deg(pose[3]) << " " << vpMath::deg(pose[4]) << " " << vpMath::deg(pose[5]) << " [deg]";
vpDisplay::displayText(I, 100, 20, ss.str(), vpColor::green);
}
}
if (learn_position) {
// Detect keypoints on the current image
std::vector<cv::KeyPoint> trainKeyPoints;
keypoint.detect(I, trainKeyPoints);
// Keep only keypoints on the cube
std::vector<vpPolygon> polygons;
std::vector<std::vector<vpPoint> > roisPt;
std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > pair = tracker.getPolygonFaces();
polygons = pair.first;
roisPt = pair.second;
// Compute the 3D coordinates
std::vector<cv::Point3f> points3f;
vpKeyPoint::compute3DForPointsInPolygons(cMo, cam, trainKeyPoints, polygons, roisPt, points3f);
// Build the reference keypoints
keypoint.buildReference(I, trainKeyPoints, points3f, true, learn_id++);
// Display learned data
for (std::vector<cv::KeyPoint>::const_iterator it = trainKeyPoints.begin(); it != trainKeyPoints.end(); ++it) {
vpDisplay::displayCross(I, (int)it->pt.y, (int)it->pt.x, 10, vpColor::yellow, 3);
}
learn_position = false;
std::cout << "Data learned" << std::endl;
}
std::stringstream ss;
ss << "Loop time: " << vpTime::measureTimeMs() - t_begin << " ms";
vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
if (opt_learn)
vpDisplay::displayText(I, 35, 20, "Left click: learn Right click: quit", vpColor::red);
else if (opt_auto_init)
vpDisplay::displayText(I, 35, 20, "Left click: auto_init Right click: quit", vpColor::red);
else
vpDisplay::displayText(I, 35, 20, "Right click: quit", vpColor::red);
if (vpDisplay::getClick(I, button, false)) {
if (button == vpMouseButton::button3) {
quit = true;
} else if (button == vpMouseButton::button1 && opt_learn) {
learn_position = true;
} else if (button == vpMouseButton::button1 && opt_auto_init && !opt_learn) {
run_auto_init = true;
}
}
}
if (opt_learn) {
std::cout << "Save learning file: " << opt_learning_data << std::endl;
keypoint.saveLearningData(opt_learning_data, true, true);
}
delete display;
} catch (const vpException &e) {
std::cout << "Catch a ViSP exception: " << e << std::endl;
}
#elif defined(VISP_HAVE_OPENCV)
(void) argc;
(void) argv;
std::cout << "Install a 3rd party dedicated to frame grabbing (dc1394, cmu1394, v4l2, OpenCV, FlyCapture, Realsense2), configure and build ViSP again to use this example" << std::endl;
#else
(void) argc;
(void) argv;
std::cout << "Install OpenCV 3rd party, configure and build ViSP again to use this example" << std::endl;
#endif
}
vpMbGenericTracker::setKltOpencv
virtual void setKltOpencv(const vpKltOpencv &t)
Definition: vpMbGenericTracker.cpp:3271
vpDisplayX
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition: vpDisplayX.h:150
vpV4l2Grabber::open
void open(vpImage< unsigned char > &I)
Definition: vpV4l2Grabber.cpp:409
vpMbGenericTracker
Real-time 6D object pose tracking using its CAD model.
Definition: vpMbGenericTracker.h:79
vpException::getStringMessage
const std::string & getStringMessage(void) const
Send a reference (constant) related the error message (can be empty).
Definition: vpException.cpp:91
vpXmlParserCamera::parse
int parse(vpCameraParameters &cam, const std::string &filename, const std::string &camera_name, const vpCameraParameters::vpCameraParametersProjType &projModel, const unsigned int image_width=0, const unsigned int image_height=0)
Definition: vpXmlParserCamera.cpp:137
vpKeyPoint::setMatcher
void setMatcher(const std::string &matcherName)
Definition: vpKeyPoint.h:867
vpKltOpencv::setMinDistance
void setMinDistance(double minDistance)
Definition: vpKltOpencv.cpp:380
vpV4l2Grabber::setDevice
void setDevice(const std::string &devname)
Definition: vpV4l2Grabber.h:284
vpKltOpencv::setMaxFeatures
void setMaxFeatures(const int maxCount)
Definition: vpKltOpencv.cpp:333
vpImageConvert::convert
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Definition: vpImageConvert.cpp:78
vpKeyPoint::buildReference
unsigned int buildReference(const vpImage< unsigned char > &I)
Definition: vpKeyPoint.cpp:235
vpMbGenericTracker::setProjectionErrorDisplay
virtual void setProjectionErrorDisplay(const bool display)
Definition: vpMbGenericTracker.cpp:3834
vpCameraParameters
Generic class defining intrinsic camera parameters.
Definition: vpCameraParameters.h:232
vpCameraParameters::perspectiveProjWithoutDistortion
Definition: vpCameraParameters.h:239
vpMouseButton::button1
Definition: vpMouseButton.h:52
vp1394CMUGrabber::open
void open(vpImage< unsigned char > &I)
Definition: vp1394CMUGrabber.cpp:186
vpMe::setMu2
void setMu2(const double &mu_2)
Definition: vpMe.h:247
vpMbGenericTracker::setCameraParameters
virtual void setCameraParameters(const vpCameraParameters &camera)
Definition: vpMbGenericTracker.cpp:2712
vpKeyPoint::loadLearningData
void loadLearningData(const std::string &filename, const bool binaryMode=false, const bool append=false)
Definition: vpKeyPoint.cpp:2398
vpXmlParserCamera
XML parser to load and save intrinsic camera parameters.
Definition: vpXmlParserCamera.h:187
vp1394CMUGrabber
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
Definition: vp1394CMUGrabber.h:150
vpMath::deg
static double deg(double rad)
Definition: vpMath.h:94
vpIoTools::getNameWE
static std::string getNameWE(const std::string &pathname)
Definition: vpIoTools.cpp:1465
vpKltOpencv::setQuality
void setQuality(double qualityLevel)
Definition: vpKltOpencv.cpp:355
vpMbGenericTracker::setMovingEdge
virtual void setMovingEdge(const vpMe &me)
Definition: vpMbGenericTracker.cpp:3481
vpMbGenericTracker::loadModel
virtual void loadModel(const std::string &modelFile, const bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
Definition: vpMbGenericTracker.cpp:2208
vpMbGenericTracker::getCameraParameters
virtual void getCameraParameters(vpCameraParameters &cam1, vpCameraParameters &cam2) const
Definition: vpMbGenericTracker.cpp:752
vpKltOpencv::setHarrisFreeParameter
void setHarrisFreeParameter(double harris_k)
Definition: vpKltOpencv.cpp:363
vpRealSense2::getCameraParameters
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion) const
Definition: vpRealSense2.cpp:239
vpMe::setThreshold
void setThreshold(const double &t)
Definition: vpMe.h:299
vpMbGenericTracker::getTrackerType
virtual int getTrackerType() const
Definition: vpMbGenericTracker.cpp:1534
vpMbGenericTracker::setOgreVisibilityTest
virtual void setOgreVisibilityTest(const bool &v)
Definition: vpMbGenericTracker.cpp:3633
vpMbTracker::getProjectionError
virtual double getProjectionError() const
Definition: vpMbTracker.h:309
vpKeyPoint::setExtractor
void setExtractor(const vpFeatureDescriptorType &extractorType)
Definition: vpKeyPoint.h:791
vpMbGenericTracker::KLT_TRACKER
Definition: vpMbGenericTracker.h:85
vpDisplayGDI
Display for windows using GDI (available on any windows 32 platform).
Definition: vpDisplayGDI.h:128
vpIoTools::checkFilename
static bool checkFilename(const char *filename)
Definition: vpIoTools.cpp:676
vpKeyPoint::matchPoint
unsigned int matchPoint(const vpImage< unsigned char > &I)
Definition: vpKeyPoint.cpp:2886
vpColor::yellow
static const vpColor yellow
Definition: vpColor.h:187
vpMe
Definition: vpMe.h:59
vpKeyPoint::saveLearningData
void saveLearningData(const std::string &filename, const bool binaryMode=false, const bool saveTrainingImages=true)
Definition: vpKeyPoint.cpp:3619
vpMe::setRange
void setRange(const unsigned int &r)
Definition: vpMe.h:270
vpDisplay::displayFrame
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0))
Definition: vpDisplay_uchar.cpp:377
vpRealSense2::acquire
void acquire(vpImage< unsigned char > &grey)
Definition: vpRealSense2.cpp:66
vpRealSense2
Definition: vpRealSense2.h:280
vpDisplayOpenCV
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Definition: vpDisplayOpenCV.h:141
vpKltOpencv
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition: vpKltOpencv.h:77
vpTime::measureTimeMs
VISP_EXPORT double measureTimeMs()
Definition: vpTime.cpp:87
vpMbGenericTracker::loadConfigFile
virtual void loadConfigFile(const std::string &configFile)
Definition: vpMbGenericTracker.cpp:2076
vpColor::green
static const vpColor green
Definition: vpColor.h:182
vpDisplay::display
static void display(const vpImage< unsigned char > &I)
Definition: vpDisplay_uchar.cpp:676
vpDisplay::displayText
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
Definition: vpDisplay_uchar.cpp:600
vpMbGenericTracker::getPolygonFaces
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(const bool orderPolygons=true, const bool useVisibility=true, const bool clipPolygon=false)
Definition: vpMbGenericTracker.cpp:1442
vpMbGenericTracker::initClick
virtual void initClick(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2, const bool displayHelp=false, const vpHomogeneousMatrix &T1=vpHomogeneousMatrix(), const vpHomogeneousMatrix &T2=vpHomogeneousMatrix())
Definition: vpMbGenericTracker.cpp:1606
vpMbGenericTracker::computeCurrentProjectionError
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam)
Definition: vpMbGenericTracker.cpp:193
vpMbGenericTracker::setKltMaskBorder
virtual void setKltMaskBorder(const unsigned int &e)
Definition: vpMbGenericTracker.cpp:3367
vpMbGenericTracker::initFromPose
virtual void initFromPose(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2)
Definition: vpMbGenericTracker.cpp:1873
vpMbGenericTracker::track
virtual void track(const vpImage< unsigned char > &I)
Definition: vpMbGenericTracker.cpp:4042
vpMe::setMaskNumber
void setMaskNumber(const unsigned int &a)
Definition: vpMe.cpp:453
vpKeyPoint
Class that allows keypoints detection (and descriptors extraction) and matching thanks to OpenCV libr...
Definition: vpKeyPoint.h:227
vpKeyPoint::detect
void detect(const vpImage< unsigned char > &I, std::vector< cv::KeyPoint > &keyPoints, const vpRect &rectangle=vpRect())
Definition: vpKeyPoint.cpp:1096
vpPoseVector
Implementation of a pose vector and operations on poses.
Definition: vpPoseVector.h:91
vpKltOpencv::setBlockSize
void setBlockSize(const int blockSize)
Definition: vpKltOpencv.cpp:398
vpKeyPoint::getDetector
cv::Ptr< cv::FeatureDetector > getDetector(const vpFeatureDetectorType &type) const
Definition: vpKeyPoint.h:460
vpKltOpencv::setWindowSize
void setWindowSize(const int winSize)
Definition: vpKltOpencv.cpp:342
vpFlyCaptureGrabber
Definition: vpFlyCaptureGrabber.h:146
vp1394TwoGrabber
Class for firewire ieee1394 video devices using libdc1394-2.x api.
Definition: vp1394TwoGrabber.h:185
vpMbGenericTracker::setTrackerType
virtual void setTrackerType(const int type)
Definition: vpMbGenericTracker.cpp:3907
vpMouseButton::button3
Definition: vpMouseButton.h:54
vpColor::none
static const vpColor none
Definition: vpColor.h:191
vpMbGenericTracker::EDGE_TRACKER
Definition: vpMbGenericTracker.h:83
vpKltOpencv::setPyramidLevels
void setPyramidLevels(const int pyrMaxLevel)
Definition: vpKltOpencv.cpp:408
vpV4l2Grabber
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
Definition: vpV4l2Grabber.h:133
vpMbGenericTracker::getPose
virtual void getPose(vpHomogeneousMatrix &c1Mo, vpHomogeneousMatrix &c2Mo) const
Definition: vpMbGenericTracker.cpp:1500
vpMe::setSampleStep
void setSampleStep(const double &s)
Definition: vpMe.h:277
vpDisplay::flush
static void flush(const vpImage< unsigned char > &I)
Definition: vpDisplay_uchar.cpp:652
vpMbGenericTracker::setScanLineVisibilityTest
virtual void setScanLineVisibilityTest(const bool &v)
Definition: vpMbGenericTracker.cpp:3885
vpDisplay::displayCross
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
Definition: vpDisplay_uchar.cpp:179
vpImage< unsigned char >
vp1394TwoGrabber::open
void open(vpImage< unsigned char > &I)
Definition: vp1394TwoGrabber.cpp:2076
vpRealSense2::open
void open(const rs2::config &cfg=rs2::config())
Definition: vpRealSense2.cpp:618
vpDisplay::getClick
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
Definition: vpDisplay_uchar.cpp:701
vpV4l2Grabber::setScale
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
Definition: vpV4l2Grabber.cpp:386
vpKeyPoint::compute3DForPointsInPolygons
static void compute3DForPointsInPolygons(const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, std::vector< cv::KeyPoint > &candidates, const std::vector< vpPolygon > &polygons, const std::vector< std::vector< vpPoint > > &roisPt, std::vector< cv::Point3f > &points, cv::Mat *descriptors=NULL)
Definition: vpKeyPoint.cpp:527
vpHomogeneousMatrix
Implementation of an homogeneous matrix and operations on such kind of matrices.
Definition: vpHomogeneousMatrix.h:91
vpMe::setMu1
void setMu1(const double &mu_1)
Definition: vpMe.h:240
vpFlyCaptureGrabber::open
void open(vpImage< unsigned char > &I)
Definition: vpFlyCaptureGrabber.cpp:1213
vpDisplay
Class that defines generic functionnalities for display.
Definition: vpDisplay.h:170
vpMouseButton::vpMouseButtonType
vpMouseButtonType
Definition: vpMouseButton.h:51
vpMbGenericTracker::setProjectionErrorComputation
virtual void setProjectionErrorComputation(const bool &flag)
Definition: vpMbGenericTracker.cpp:3820
vpKeyPoint::setDetector
void setDetector(const vpFeatureDetectorType &detectorType)
Definition: vpKeyPoint.h:733
vpMe::setMaskSize
void setMaskSize(const unsigned int &a)
Definition: vpMe.cpp:460
vpException
error that can be emited by ViSP classes.
Definition: vpException.h:70
vpMbGenericTracker::setDisplayFeatures
virtual void setDisplayFeatures(const bool displayF)
Definition: vpMbGenericTracker.cpp:3098
vpColor::red
static const vpColor red
Definition: vpColor.h:179
vpCameraParameters::initPersProjWithoutDistortion
void initPersProjWithoutDistortion(const double px, const double py, const double u0, const double v0)
Definition: vpCameraParameters.cpp:182
vpMbGenericTracker::display
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, const unsigned int thickness=1, const bool displayFullModel=false)
Definition: vpMbGenericTracker.cpp:556
vpIoTools::getParent
static std::string getParent(const std::string &pathname)
Definition: vpIoTools.cpp:1478