Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
tutorial-apriltag-detector-live.cpp
1
2#include <iostream>
3
4#include <visp3/core/vpConfig.h>
5
7// If openCV available, priority to OpenCV capture, otherwise the user has to modify the code uncommenting/commenting
8// one of the following lines
9#if defined(VISP_HAVE_OPENCV) && (((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)) || \
10 ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)))
11#undef VISP_HAVE_V4L2
12#undef VISP_HAVE_DC1394
13#undef VISP_HAVE_CMU1394
14#undef VISP_HAVE_FLYCAPTURE
15#undef VISP_HAVE_REALSENSE2
16// #undef HAVE_OPENCV_HIGHGUI
17// #undef HAVE_OPENCV_VIDEOIO
18#else
19// Use the first grabber that is available. Uncomment/comment the following lines to disable usage of a grabber
20// #undef VISP_HAVE_V4L2
21// #undef VISP_HAVE_DC1394
22// #undef VISP_HAVE_CMU1394
23// #undef VISP_HAVE_FLYCAPTURE
24// #undef VISP_HAVE_REALSENSE2
25#undef HAVE_OPENCV_HIGHGUI
26#undef HAVE_OPENCV_VIDEOIO
27#endif
29
31#if defined(VISP_HAVE_APRILTAG) && \
32 (defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || \
33 defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2) || defined(VISP_HAVE_OPENCV) && \
34 (((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)) || \
35 ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO))))
36
38
39#ifdef VISP_HAVE_MODULE_SENSOR
40#include <visp3/sensor/vp1394CMUGrabber.h>
41#include <visp3/sensor/vp1394TwoGrabber.h>
42#include <visp3/sensor/vpFlyCaptureGrabber.h>
43#include <visp3/sensor/vpRealSense2.h>
44#include <visp3/sensor/vpV4l2Grabber.h>
45#endif
47#include <visp3/detection/vpDetectorAprilTag.h>
49#include <visp3/core/vpXmlParserCamera.h>
50#include <visp3/gui/vpDisplayFactory.h>
51
52#if defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)
53#include <opencv2/highgui/highgui.hpp> // for cv::VideoCapture
54#elif defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)
55#include <opencv2/videoio/videoio.hpp>
56#endif
57
58void usage(const char **argv, int error);
59
60void usage(const char **argv, int error)
61{
62 std::cout << "Synopsis" << std::endl
63 << " " << argv[0]
64 << " [--camera-device <id>]"
65 << " [--tag-size <size>]"
66 << " [--tag-family <family>]"
67 << " [--tag-decision-margin-threshold <threshold>]"
68 << " [--tag-hamming-distance-threshold <threshold>]"
69 << " [--tag-z-aligned]"
70 << " [--tag-quad-decimate <factor>]"
71 << " [--tag-n-threads <number>]"
72 << " [--tag-pose-method <method>]"
73#if defined(VISP_HAVE_PUGIXML)
74 << " [--intrinsic <xmlfile>]"
75 << " [--camera-name <name>]"
76#endif
77#if defined(VISP_HAVE_DISPLAY)
78 << " [--display-tag]"
79 << " [--display-off]"
80 << " [--color <id>]"
81 << " [--thickness <thickness>"
82#endif
83 << " [--help, -h]" << std::endl
84 << std::endl;
85 std::cout << "Description" << std::endl
86 << " Compute the pose of an Apriltag in images acquired with a camera." << std::endl
87 << std::endl
88 << " --camera-device <id>" << std::endl
89 << " Camera device id." << std::endl
90 << " Default: 0" << std::endl
91 << std::endl
92 << " --tag-size <size>" << std::endl
93 << " Apriltag size in [m]." << std::endl
94 << " Default: 0.03" << std::endl
95 << std::endl
96 << " --tag-family <family>" << std::endl
97 << " Apriltag family. Supported values are:" << std::endl
98 << " 0: TAG_36h11" << std::endl
99 << " 1: TAG_36h10 (DEPRECATED)" << std::endl
100 << " 2: TAG_36ARTOOLKIT (DEPRECATED)" << std::endl
101 << " 3: TAG_25h9" << std::endl
102 << " 4: TAG_25h7 (DEPRECATED)" << std::endl
103 << " 5: TAG_16h5" << std::endl
104 << " 6: TAG_CIRCLE21h7" << std::endl
105 << " 7: TAG_CIRCLE49h12" << std::endl
106 << " 8: TAG_CUSTOM48h12" << std::endl
107 << " 9: TAG_STANDARD41h12" << std::endl
108 << " 10: TAG_STANDARD52h13" << std::endl
109 << " 11: TAG_ARUCO_4x4_50" << std::endl
110 << " 12: TAG_ARUCO_4x4_100" << std::endl
111 << " 13: TAG_ARUCO_4x4_250" << std::endl
112 << " 14: TAG_ARUCO_4x4_1000" << std::endl
113 << " 15: TAG_ARUCO_5x5_50" << std::endl
114 << " 16: TAG_ARUCO_5x5_100" << std::endl
115 << " 17: TAG_ARUCO_5x5_250" << std::endl
116 << " 18: TAG_ARUCO_5x5_1000" << std::endl
117 << " 19: TAG_ARUCO_6x6_50" << std::endl
118 << " 20: TAG_ARUCO_6x6_100" << std::endl
119 << " 21: TAG_ARUCO_6x6_250" << std::endl
120 << " 22: TAG_ARUCO_6x6_1000" << std::endl
121 << " 23: TAG_ARUCO_7x7_50" << std::endl
122 << " 24: TAG_ARUCO_7x7_100" << std::endl
123 << " 25: TAG_ARUCO_7x7_250" << std::endl
124 << " 26: TAG_ARUCO_7x7_1000" << std::endl
125 << " 27: TAG_ARUCO_MIP_36h12" << std::endl
126 << " Default: 0 (36h11)" << std::endl
127 << std::endl
128 << " --tag-decision-margin-threshold <threshold>" << std::endl
129 << " Threshold used to discard low-confident detections. A typical value is " << std::endl
130 << " around 100. The higher this value, the more false positives will be filtered" << std::endl
131 << " out. When this value is set to -1, false positives are not filtered out." << std::endl
132 << " Default: 50" << std::endl
133 << std::endl
134 << " --tag-hamming-distance-threshold <threshold>" << std::endl
135 << " Threshold used to discard low-confident detections with corrected bits." << std::endl
136 << " A typical value is between 0 and 3. The lower this value, the more false" << std::endl
137 << " positives will be filtered out." << std::endl
138 << " Default: 0" << std::endl
139 << std::endl
140 << " --tag-quad-decimate <factor>" << std::endl
141 << " Decimation factor used to detect a tag. " << std::endl
142 << " Default: 1" << std::endl
143 << std::endl
144 << " --tag-n-threads <number>" << std::endl
145 << " Number of threads used to detect a tag." << std::endl
146 << " Default: 1" << std::endl
147 << std::endl
148 << " --tag-z-aligned" << std::endl
149 << " When enabled, tag z-axis and camera z-axis are aligned." << std::endl
150 << " Default: false" << std::endl
151 << std::endl
152 << " --tag-pose-method <method>" << std::endl
153 << " Algorithm used to compute the tag pose from its 4 corners." << std::endl
154 << " Possible values are:" << std::endl
155 << " 0: HOMOGRAPHY" << std::endl
156 << " 1: HOMOGRAPHY_VIRTUAL_VS" << std::endl
157 << " 2: DEMENTHON_VIRTUAL_VS" << std::endl
158 << " 3: LAGRANGE_VIRTUAL_VS" << std::endl
159 << " 4: BEST_RESIDUAL_VIRTUAL_VS" << std::endl
160 << " 5: HOMOGRAPHY_ORTHOGONAL_ITERATION" << std::endl
161 << " Default: 1 (HOMOGRAPHY_VIRTUAL_VS)" << std::endl
162 << std::endl
163#if defined(VISP_HAVE_PUGIXML)
164 << " --intrinsic <xmlfile>" << std::endl
165 << " Camera intrinsic parameters file in xml format." << std::endl
166 << " Default: empty" << std::endl
167 << std::endl
168 << " --camera-name <name>" << std::endl
169 << " Camera name in the intrinsic parameters file in xml format." << std::endl
170 << " Default: empty" << std::endl
171 << std::endl
172#endif
173#if defined(VISP_HAVE_DISPLAY)
174 << " --display-tag" << std::endl
175 << " Flag used to enable displaying the edges of a tag." << std::endl
176 << " Default: disabled" << std::endl
177 << std::endl
178 << " --display-off" << std::endl
179 << " Flag used to turn display off." << std::endl
180 << " Default: enabled" << std::endl
181 << std::endl
182 << " --color <id>" << std::endl
183 << " Color id used to display the frame over each tag." << std::endl
184 << " Possible values are:" << std::endl
185 << " -1: R-G-B colors for X, Y, Z axis respectively" << std::endl
186 << " 0: all axis in black" << std::endl
187 << " 1: all axis in white" << std::endl
188 << " ..." << std::endl
189 << " Default: -1" << std::endl
190 << std::endl
191 << " --thickness <thickness>" << std::endl
192 << " Thickness of the drawings in overlay." << std::endl
193 << " Default: 2" << std::endl
194 << std::endl
195#endif
196 << " --verbose, -v" << std::endl
197 << " Enable verbosity." << std::endl
198 << std::endl
199 << " --help, -h" << std::endl
200 << " Print this helper message." << std::endl
201 << std::endl;
202
203 if (error) {
204 std::cout << "Error" << std::endl
205 << " "
206 << "Unsupported parameter " << argv[error] << std::endl;
207 }
208}
209
210int main(int argc, const char **argv)
211{
212#ifdef ENABLE_VISP_NAMESPACE
213 using namespace VISP_NAMESPACE_NAME;
214#endif
215
216 int opt_device = 0; // For OpenCV and V4l2 grabber to set the camera device
219 double opt_tag_size = 0.053;
220 float opt_tag_quad_decimate = 1.0;
221 float opt_tag_decision_margin_threshold = 50;
222 int opt_tag_hamming_distance_threshold = 2;
223 int opt_tag_nThreads = 1;
224 std::string intrinsic_file = "";
225 std::string camera_name = "";
226 bool opt_display_tag = false;
227 int opt_color_id = -1;
228 unsigned int thickness = 2;
229 bool opt_tag_z_align_frame = false;
230 bool opt_verbose = false;
231
232#if !(defined(VISP_HAVE_DISPLAY))
233 bool display_off = true;
234 std::cout << "Warning: There is no 3rd party (X11, GDI or openCV) to display images..." << std::endl;
235#else
236 bool display_off = false;
237#endif
238
240
241 for (int i = 1; i < argc; ++i) {
242 if (std::string(argv[i]) == "--camera-device" && i + 1 < argc) {
243 opt_device = atoi(argv[++i]);
244 }
245 else if (std::string(argv[i]) == "--tag-size" && i + 1 < argc) {
246 opt_tag_size = atof(argv[++i]);
247 }
248 else if (std::string(argv[i]) == "--tag-family" && i + 1 < argc) {
249 opt_tag_family = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[++i]);
250 }
251 else if (std::string(argv[i]) == "--tag-quad-decimate" && i + 1 < argc) {
252 opt_tag_quad_decimate = static_cast<float>(atof(argv[++i]));
253 }
254 else if (std::string(argv[i]) == "--tag-n-threads" && i + 1 < argc) {
255 opt_tag_nThreads = atoi(argv[++i]);
256 }
257 else if (std::string(argv[i]) == "--tag-z-aligned") {
258 opt_tag_z_align_frame = true;
259 }
260 else if (std::string(argv[i]) == "--tag-pose-method" && i + 1 < argc) {
261 opt_tag_pose_estimation_method = (vpDetectorAprilTag::vpPoseEstimationMethod)atoi(argv[++i]);
262 }
263 else if (std::string(argv[i]) == "--tag-decision-margin-threshold" && i + 1 < argc) {
264 opt_tag_decision_margin_threshold = static_cast<float>(atof(argv[++i]));
265 }
266 else if (std::string(argv[i]) == "--tag-hamming-distance-threshold" && i + 1 < argc) {
267 opt_tag_hamming_distance_threshold = atoi(argv[++i]);
268 }
269#if defined(VISP_HAVE_PUGIXML)
270 else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
271 intrinsic_file = std::string(argv[++i]);
272 }
273 else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) {
274 camera_name = std::string(argv[++i]);
275 }
276#endif
277#if defined(VISP_HAVE_DISPLAY)
278 else if (std::string(argv[i]) == "--display-tag") {
279 opt_display_tag = true;
280 }
281 else if (std::string(argv[i]) == "--display-off") {
282 display_off = true;
283 }
284 else if (std::string(argv[i]) == "--color" && i + 1 < argc) {
285 opt_color_id = atoi(argv[++i]);
286 }
287 else if (std::string(argv[i]) == "--thickness" && i + 1 < argc) {
288 thickness = static_cast<unsigned int>(atoi(argv[++i]));
289 }
290#endif
291 else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") {
292 opt_verbose = true;
293 }
294 else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
295 usage(argv, 0);
296 return EXIT_SUCCESS;
297 }
298 else {
299 usage(argv, i);
300 return EXIT_FAILURE;
301 }
302 }
303
304#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
305 std::shared_ptr<vpDisplay> display;
306#else
307 vpDisplay *display = nullptr;
308#endif
309
310 try {
312 cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, 312.1889954, 243.4373779);
313#if defined(VISP_HAVE_PUGIXML)
315 if (!intrinsic_file.empty() && !camera_name.empty())
316 parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
317#endif
318
320#if defined(VISP_HAVE_V4L2)
322 std::ostringstream device;
323 device << "/dev/video" << opt_device;
324 std::cout << "Use Video 4 Linux grabber on device " << device.str() << std::endl;
325 g.setDevice(device.str());
326 g.setScale(1);
327 g.open(I);
328#elif defined(VISP_HAVE_DC1394)
329 (void)opt_device; // To avoid non used warning
330 std::cout << "Use DC1394 grabber" << std::endl;
332 g.open(I);
333#elif defined(VISP_HAVE_CMU1394)
334 (void)opt_device; // To avoid non used warning
335 std::cout << "Use CMU1394 grabber" << std::endl;
337 g.open(I);
338#elif defined(VISP_HAVE_FLYCAPTURE)
339 (void)opt_device; // To avoid non used warning
340 std::cout << "Use FlyCapture grabber" << std::endl;
342 g.open(I);
343#elif defined(VISP_HAVE_REALSENSE2)
344 (void)opt_device; // To avoid non used warning
345 std::cout << "Use Realsense 2 grabber" << std::endl;
346 vpRealSense2 g;
347 rs2::config config;
348 config.disable_stream(RS2_STREAM_DEPTH);
349 config.disable_stream(RS2_STREAM_INFRARED);
350 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
351 g.open(config);
352 g.acquire(I);
353
354 std::cout << "Read camera parameters from Realsense device" << std::endl;
356#elif defined(VISP_HAVE_OPENCV) && \
357 (((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)) || \
358 ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)))
359 std::cout << "Use OpenCV grabber on device " << opt_device << std::endl;
360 cv::VideoCapture g(opt_device); // Open the default camera
361 if (!g.isOpened()) { // Check if we succeeded
362 std::cout << "Failed to open the camera" << std::endl;
363 return EXIT_FAILURE;
364 }
365 cv::Mat frame;
366 g >> frame; // get a new frame from camera
367 vpImageConvert::convert(frame, I);
368#endif
370
371 std::cout << cam << std::endl;
372 std::cout << "Tag detector settings" << std::endl;
373 std::cout << " Tag size [m] : " << opt_tag_size << std::endl;
374 std::cout << " Tag family : " << opt_tag_family << std::endl;
375 std::cout << " Quad decimate : " << opt_tag_quad_decimate << std::endl;
376 std::cout << " Decision margin threshold : " << opt_tag_decision_margin_threshold << std::endl;
377 std::cout << " Hamming distance threshold: " << opt_tag_hamming_distance_threshold << std::endl;
378 std::cout << " Num threads : " << opt_tag_nThreads << std::endl;
379 std::cout << " Z aligned : " << opt_tag_z_align_frame << std::endl;
380 std::cout << " Pose estimation : " << opt_tag_pose_estimation_method << std::endl;
381
382 if (!display_off) {
383#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
385#else
387#endif
388 }
389
391 vpDetectorAprilTag detector(opt_tag_family);
393
395 detector.setAprilTagQuadDecimate(opt_tag_quad_decimate);
396 detector.setAprilTagPoseEstimationMethod(opt_tag_pose_estimation_method);
397 detector.setAprilTagNbThreads(opt_tag_nThreads);
398 detector.setDisplayTag(opt_display_tag, opt_color_id < 0 ? vpColor::none : vpColor::getColor(opt_color_id), thickness);
399 detector.setZAlignedWithCameraAxis(opt_tag_z_align_frame);
400 detector.setAprilTagDecisionMarginThreshold(opt_tag_decision_margin_threshold);
401 detector.setAprilTagHammingDistanceThreshold(opt_tag_hamming_distance_threshold);
403
404 std::vector<double> time_vec;
405 bool quit = false;
406 while (!quit) {
408#if defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_DC1394) || defined(VISP_HAVE_CMU1394) || \
409 defined(VISP_HAVE_FLYCAPTURE) || defined(VISP_HAVE_REALSENSE2)
410 g.acquire(I);
411#elif defined(VISP_HAVE_OPENCV) && \
412 (((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)) || \
413 ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)))
414 g >> frame;
415 vpImageConvert::convert(frame, I);
416#endif
418 if (opt_verbose) {
419 std::cout << "-- Process new image --" << std::endl;
420 }
421
423
424 double t = vpTime::measureTimeMs();
426 std::vector<vpHomogeneousMatrix> cMo_vec;
427 detector.detect(I, opt_tag_size, cam, cMo_vec);
430 time_vec.push_back(t);
431
432 std::stringstream ss;
433 ss << "Detection time: " << t << " ms for " << detector.getNbObjects() << " tags";
434 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
435
437 for (size_t i = 0; i < cMo_vec.size(); ++i) {
438 vpDisplay::displayFrame(I, cMo_vec[i], cam, opt_tag_size / 2, vpColor::none, 3);
439 }
441
443 std::vector< std::vector<vpImagePoint> > tags_corners = detector.getTagsCorners();
444 detector.displayTags(I, tags_corners);
446
448 std::vector<int> tags_id = detector.getTagsId();
449 for (size_t i = 0; i < tags_id.size(); ++i) {
450 std::stringstream ss;
451 ss << "id=" << tags_id[i];
452 vpDisplay::displayText(I, detector.getCog(i) + vpImagePoint(-10, 10), ss.str(), vpColor::blue);
453 }
455 vpDisplay::displayText(I, 20, 20, "Click to quit.", vpColor::red);
457 if (vpDisplay::getClick(I, false)) {
458 quit = true;
459 }
460
461 if (opt_verbose) {
462 std::vector<float> tag_decision_margins = detector.getTagsDecisionMargin();
463 std::vector<int> tag_hamming_distances = detector.getTagsHammingDistance();
464 for (size_t i = 0; i < tags_id.size(); ++i) {
465 std::string message = detector.getMessage(i);
466 std::stringstream ss;
467 ss << "Found " << message << std::endl
468 << "- with decision margin: " << tag_decision_margins[i]
469 << " and hamming distance: " << tag_hamming_distances[i] << std::endl
470 << "- and cMo:\n" << cMo_vec[i] << std::endl;
471 std::cout << ss.str() << std::endl;
472 }
473 }
474 }
475
476 std::cout << "Benchmark computation time" << std::endl;
477 std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms"
478 << " ; " << vpMath::getMedian(time_vec) << " ms"
479 << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl;
480 }
481 catch (const vpException &e) {
482 std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
483 }
484
485#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
486 if (display != nullptr) {
487 delete display;
488 }
489#endif
490
491 return EXIT_SUCCESS;
492}
493
494#else
495
496int main()
497{
498#ifndef VISP_HAVE_APRILTAG
499 std::cout << "Enable Apriltag support, configure and build ViSP to run this tutorial" << std::endl;
500#else
501 std::cout << "Install a 3rd party dedicated to frame grabbing (dc1394, cmu1394, v4l2, OpenCV, FlyCapture, "
502 << "Realsense2), configure and build ViSP again to use this example"
503 << std::endl;
504#endif
505 return EXIT_SUCCESS;
506}
507
508#endif
Firewire cameras video capture based on CMU 1394 Digital Camera SDK.
void open(vpImage< unsigned char > &I)
Class for firewire ieee1394 video devices using libdc1394-2.x api.
void open(vpImage< unsigned char > &I)
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
static vpColor getColor(const unsigned int &i)
Definition vpColor.h:300
static const vpColor red
Definition vpColor.h:198
static const vpColor none
Definition vpColor.h:210
static const vpColor blue
Definition vpColor.h:204
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
void open(vpImage< unsigned char > &I)
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:131
static double getMedian(const std::vector< double > &v)
Definition vpMath.cpp:343
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition vpMath.cpp:374
static double getMean(const std::vector< double > &v)
Definition vpMath.cpp:323
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
XML parser to load and save intrinsic camera parameters.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()