Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
mbot-apriltag-ibvs.cpp
1
2#include <visp3/core/vpConfig.h>
3#include <visp3/core/vpMomentAreaNormalized.h>
4#include <visp3/core/vpMomentBasic.h>
5#include <visp3/core/vpMomentCentered.h>
6#include <visp3/core/vpMomentDatabase.h>
7#include <visp3/core/vpMomentGravityCenter.h>
8#include <visp3/core/vpMomentGravityCenterNormalized.h>
9#include <visp3/core/vpMomentObject.h>
10#include <visp3/core/vpPixelMeterConversion.h>
11#include <visp3/core/vpPoint.h>
12#include <visp3/core/vpSerial.h>
13#include <visp3/core/vpXmlParserCamera.h>
14#include <visp3/detection/vpDetectorAprilTag.h>
15#include <visp3/gui/vpDisplayFactory.h>
16#include <visp3/io/vpImageIo.h>
17#include <visp3/robot/vpUnicycle.h>
18#include <visp3/sensor/vpV4l2Grabber.h>
19#include <visp3/visual_features/vpFeatureMomentAreaNormalized.h>
20#include <visp3/visual_features/vpFeatureMomentGravityCenterNormalized.h>
21#include <visp3/vs/vpServo.h>
22
23void usage(const char **argv, int error)
24{
25 std::cout << "Synopsis" << std::endl
26 << " " << argv[0]
27 << " [--camera-device <id>]"
28 << " [--tag-size <size>]"
29 << " [--tag-family <family>]"
30 << " [--tag-decision-margin-threshold <threshold>]"
31 << " [--tag-hamming-distance-threshold <threshold>]"
32 << " [--tag-quad-decimate <factor>]"
33 << " [--tag-n-threads <number>]"
34 << " [--tag-pose-method <method>]"
35#if defined(VISP_HAVE_PUGIXML)
36 << " [--intrinsic <xmlfile>]"
37 << " [--camera-name <name>]"
38#endif
39#if defined(VISP_HAVE_DISPLAY)
40 << " [--display-tag]"
41 << " [--display-on]"
42 << " [--save-image>]"
43#endif
44 << " [--serial-off]"
45 << " [--help, -h]" << std::endl
46 << std::endl;
47 std::cout << "Description" << std::endl
48 << " Image-based visual servoing using an Apriltag." << std::endl
49 << std::endl
50 << " --camera-device <id>" << std::endl
51 << " Camera device id." << std::endl
52 << " Default: 0" << std::endl
53 << std::endl
54 << " --tag-size <size>" << std::endl
55 << " Apriltag size in [m]." << std::endl
56 << " Default: 0.03" << std::endl
57 << std::endl
58 << " --tag-family <family>" << std::endl
59 << " Apriltag family. Supported values are:" << std::endl
60 << " 0: TAG_36h11" << std::endl
61 << " 1: TAG_36h10 (DEPRECATED)" << std::endl
62 << " 2: TAG_36ARTOOLKIT (DEPRECATED)" << std::endl
63 << " 3: TAG_25h9" << std::endl
64 << " 4: TAG_25h7 (DEPRECATED)" << std::endl
65 << " 5: TAG_16h5" << std::endl
66 << " 6: TAG_CIRCLE21h7" << std::endl
67 << " 7: TAG_CIRCLE49h12" << std::endl
68 << " 8: TAG_CUSTOM48h12" << std::endl
69 << " 9: TAG_STANDARD41h12" << std::endl
70 << " 10: TAG_STANDARD52h13" << std::endl
71 << " 11: TAG_ARUCO_4x4_50" << std::endl
72 << " 12: TAG_ARUCO_4x4_100" << std::endl
73 << " 13: TAG_ARUCO_4x4_250" << std::endl
74 << " 14: TAG_ARUCO_4x4_1000" << std::endl
75 << " 15: TAG_ARUCO_5x5_50" << std::endl
76 << " 16: TAG_ARUCO_5x5_100" << std::endl
77 << " 17: TAG_ARUCO_5x5_250" << std::endl
78 << " 18: TAG_ARUCO_5x5_1000" << std::endl
79 << " 19: TAG_ARUCO_6x6_50" << std::endl
80 << " 20: TAG_ARUCO_6x6_100" << std::endl
81 << " 21: TAG_ARUCO_6x6_250" << std::endl
82 << " 22: TAG_ARUCO_6x6_1000" << std::endl
83 << " 23: TAG_ARUCO_7x7_50" << std::endl
84 << " 24: TAG_ARUCO_7x7_100" << std::endl
85 << " 25: TAG_ARUCO_7x7_250" << std::endl
86 << " 26: TAG_ARUCO_7x7_1000" << std::endl
87 << " 27: TAG_ARUCO_MIP_36h12" << std::endl
88 << " Default: 0 (36h11)" << std::endl
89 << std::endl
90 << " --tag-decision-margin-threshold <threshold>" << std::endl
91 << " Threshold used to discard low-confident detections. A typical value is " << std::endl
92 << " around 100. The higher this value, the more false positives will be filtered" << std::endl
93 << " out. When this value is set to -1, false positives are not filtered out." << std::endl
94 << " Default: 50" << std::endl
95 << std::endl
96 << " --tag-hamming-distance-threshold <threshold>" << std::endl
97 << " Threshold used to discard low-confident detections with corrected bits." << std::endl
98 << " A typical value is between 0 and 3. The lower this value, the more false" << std::endl
99 << " positives will be filtered out." << std::endl
100 << " Default: 0" << std::endl
101 << std::endl
102 << " --tag-quad-decimate <factor>" << std::endl
103 << " Decimation factor used to detect a tag. " << std::endl
104 << " Default: 1" << std::endl
105 << std::endl
106 << " --tag-n-threads <number>" << std::endl
107 << " Number of threads used to detect a tag." << std::endl
108 << " Default: 1" << std::endl
109 << std::endl
110#if defined(VISP_HAVE_PUGIXML)
111 << " --intrinsic <xmlfile>" << std::endl
112 << " Camera intrinsic parameters file in xml format." << std::endl
113 << " Default: empty" << std::endl
114 << std::endl
115 << " --camera-name <name>" << std::endl
116 << " Camera name in the intrinsic parameters file in xml format." << std::endl
117 << " Default: empty" << std::endl
118 << std::endl
119#endif
120#if defined(VISP_HAVE_DISPLAY)
121 << " --display-tag" << std::endl
122 << " Flag used to enable displaying the edges of a tag." << std::endl
123 << " Default: disabled" << std::endl
124 << std::endl
125 << " --display-on" << std::endl
126 << " Flag used to turn display on." << std::endl
127 << " Default: disabled" << std::endl
128 << std::endl
129 << " --save-image" << std::endl
130 << " Flag used to save images with overlay drawings." << std::endl
131 << " Default: disabled" << std::endl
132 << std::endl
133#endif
134 << " --serial-off" << std::endl
135 << " Flag used to disable serial link." << std::endl
136 << " Default: enabled" << std::endl
137 << std::endl
138 << " --help, -h" << std::endl
139 << " Print this helper message." << std::endl
140 << std::endl;
141
142 if (error) {
143 std::cout << "Error" << std::endl
144 << " "
145 << "Unsupported parameter " << argv[error] << std::endl;
146 }
147}
148
149int main(int argc, const char **argv)
150{
151#if defined(VISP_HAVE_APRILTAG) && defined(VISP_HAVE_V4L2)
152#ifdef ENABLE_VISP_NAMESPACE
153 using namespace VISP_NAMESPACE_NAME;
154#endif
155
156 int device = 0;
158 double opt_tag_size = 0.065;
159 float opt_tag_quad_decimate = 4.0;
160 float opt_tag_decision_margin_threshold = 50;
161 float opt_tag_hamming_distance_threshold = 2;
162 int opt_tag_nThreads = 2;
163 std::string intrinsic_file = "";
164 std::string camera_name = "";
165 bool display_on = false;
166 bool serial_off = false;
167#if defined(VISP_HAVE_DISPLAY)
168 bool display_tag = false;
169 bool save_image = false; // Only possible if display_on = true
170#endif
171
172 for (int i = 1; i < argc; i++) {
173 if (std::string(argv[i]) == "--camera-device" && i + 1 < argc) {
174 device = std::atoi(argv[++i]);
175 }
176 else if (std::string(argv[i]) == "--tag-size" && i + 1 < argc) {
177 opt_tag_size = std::atof(argv[++i]);
178 }
179 else if (std::string(argv[i]) == "--tag-family" && i + 1 < argc) {
180 opt_tag_family = (vpDetectorAprilTag::vpAprilTagFamily)std::atoi(argv[++i]);
181 }
182 else if (std::string(argv[i]) == "--tag-decision-margin-threshold" && i + 1 < argc) {
183 opt_tag_decision_margin_threshold = static_cast<float>(atof(argv[++i]));
184 }
185 else if (std::string(argv[i]) == "--tag-hamming-distance-threshold" && i + 1 < argc) {
186 opt_tag_hamming_distance_threshold = atoi(argv[++i]);
187 }
188 else if (std::string(argv[i]) == "--tag-quad-decimate" && i + 1 < argc) {
189 opt_tag_quad_decimate = static_cast<float>(atof(argv[++i]));
190 }
191 else if (std::string(argv[i]) == "--tag-n-threads" && i + 1 < argc) {
192 opt_tag_nThreads = std::atoi(argv[++i]);
193 }
194#if defined(VISP_HAVE_PUGIXML)
195 else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
196 intrinsic_file = std::string(argv[++i]);
197 }
198 else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) {
199 camera_name = std::string(argv[++i]);
200 }
201#endif
202#if defined(VISP_HAVE_DISPLAY)
203 else if (std::string(argv[i]) == "--display-tag") {
204 display_tag = true;
205 }
206 else if (std::string(argv[i]) == "--display-on") {
207 display_on = true;
208 }
209 else if (std::string(argv[i]) == "--save-image") {
210 save_image = true;
211 }
212#endif
213 else if (std::string(argv[i]) == "--serial-off") {
214 serial_off = true;
215 }
216 else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
217 usage(argv, 0);
218 return EXIT_SUCCESS;
219 }
220 else {
221 usage(argv, i);
222 return EXIT_FAILURE;
223 }
224 }
225
226 // Me Auriga led ring
227 // if serial com ok: led 1 green
228 // if exception: led 1 red
229 // if tag detected: led 2 green, else led 2 red
230 // if motor left: led 3 blue
231 // if motor right: led 4 blue
232
233 vpSerial *serial = nullptr;
234 if (!serial_off) {
235 serial = new vpSerial("/dev/ttyAMA0", 115200);
236
237 serial->write("LED_RING=0,0,0,0\n"); // Switch off all led
238 serial->write("LED_RING=1,0,10,0\n"); // Switch on led 1 to green: serial ok
239 }
240
241 try {
243
245 std::ostringstream device_name;
246 device_name << "/dev/video" << device;
247 g.setDevice(device_name.str());
248 g.setScale(1);
249 g.acquire(I);
250
251 vpDisplay *d = nullptr;
253#ifdef VISP_HAVE_DISPLAY
254 if (display_on) {
256 }
257#endif
258
260 cam.initPersProjWithoutDistortion(615.1674805, 615.1675415, I.getWidth() / 2., I.getHeight() / 2.);
261
262#if defined(VISP_HAVE_PUGIXML)
264 if (!intrinsic_file.empty() && !camera_name.empty()) {
265 parser.parse(cam, intrinsic_file, camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
266 }
267#endif
268
269 std::cout << cam << std::endl;
270 std::cout << "Tag detector settings" << std::endl;
271 std::cout << " Tag size [m] : " << opt_tag_size << std::endl;
272 std::cout << " Tag family : " << opt_tag_family << std::endl;
273 std::cout << " Quad decimate : " << opt_tag_quad_decimate << std::endl;
274 std::cout << " Decision margin threshold : " << opt_tag_decision_margin_threshold << std::endl;
275 std::cout << " Hamming distance threshold: " << opt_tag_hamming_distance_threshold << std::endl;
276 std::cout << " Num threads : " << opt_tag_nThreads << std::endl;
277
278 vpDetectorAprilTag detector(opt_tag_family);
279
280 detector.setAprilTagQuadDecimate(opt_tag_quad_decimate);
281 detector.setAprilTagNbThreads(opt_tag_nThreads);
282 detector.setAprilTagDecisionMarginThreshold(opt_tag_decision_margin_threshold);
283 detector.setAprilTagHammingDistanceThreshold(opt_tag_hamming_distance_threshold);
284#ifdef VISP_HAVE_DISPLAY
285 detector.setDisplayTag(display_tag);
286#endif
287
289 vpAdaptiveGain lambda;
290 if (display_on) {
291 lambda.initStandard(2.5, 0.4, 30); // lambda(0)=2.5, lambda(oo)=0.4 and lambda'(0)=30
292 }
293 else {
294 lambda.initStandard(4, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
295 }
296
297 vpUnicycle robot;
299 task.setInteractionMatrixType(vpServo::CURRENT);
300 task.setLambda(lambda);
302 cRe[0][0] = 0;
303 cRe[0][1] = -1;
304 cRe[0][2] = 0;
305 cRe[1][0] = 0;
306 cRe[1][1] = 0;
307 cRe[1][2] = -1;
308 cRe[2][0] = 1;
309 cRe[2][1] = 0;
310 cRe[2][2] = 0;
311
313 vpVelocityTwistMatrix cVe(cMe);
314 task.set_cVe(cVe);
315
316 vpMatrix eJe(6, 2, 0);
317 eJe[0][0] = eJe[5][1] = 1.0;
318
319 std::cout << "eJe: \n" << eJe << std::endl;
320
321 // Desired distance to the target
322 double Z_d = 0.4;
323
324 // Define the desired polygon corresponding the the AprilTag CLOCKWISE
325 double X[4] = { opt_tag_size / 2., +opt_tag_size / 2., -opt_tag_size / 2., -opt_tag_size / 2. };
326 double Y[4] = { opt_tag_size / 2., -opt_tag_size / 2., -opt_tag_size / 2., +opt_tag_size / 2. };
327 std::vector<vpPoint> vec_P, vec_P_d;
328
329 for (int i = 0; i < 4; i++) {
330 vpPoint P_d(X[i], Y[i], 0);
331 vpHomogeneousMatrix cdMo(0, 0, Z_d, 0, 0, 0);
332 P_d.track(cdMo); //
333 vec_P_d.push_back(P_d);
334 }
335
336 vpMomentObject m_obj(3), m_obj_d(3);
337 vpMomentDatabase mdb, mdb_d;
338 vpMomentBasic mb_d; // Here only to get the desired area m00
339 vpMomentGravityCenter mg, mg_d;
340 vpMomentCentered mc, mc_d;
341 vpMomentAreaNormalized man(0, Z_d),
342 man_d(0, Z_d); // Declare normalized area. Desired area parameter will be updated below with m00
343 vpMomentGravityCenterNormalized mgn, mgn_d; // Declare normalized gravity center
344
345 // Desired moments
346 m_obj_d.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
347 m_obj_d.fromVector(vec_P_d); // Initialize the object with the points coordinates
348
349 mb_d.linkTo(mdb_d); // Add basic moments to database
350 mg_d.linkTo(mdb_d); // Add gravity center to database
351 mc_d.linkTo(mdb_d); // Add centered moments to database
352 man_d.linkTo(mdb_d); // Add area normalized to database
353 mgn_d.linkTo(mdb_d); // Add gravity center normalized to database
354 mdb_d.updateAll(m_obj_d); // All of the moments must be updated, not just an_d
355 mg_d.compute(); // Compute gravity center moment
356 mc_d.compute(); // Compute centered moments AFTER gravity center
357
358 double area = 0;
359 if (m_obj_d.getType() == vpMomentObject::DISCRETE)
360 area = mb_d.get(2, 0) + mb_d.get(0, 2);
361 else
362 area = mb_d.get(0, 0);
363 // Update an moment with the desired area
364 man_d.setDesiredArea(area);
365
366 man_d.compute(); // Compute area normalized moment AFTER centered moments
367 mgn_d.compute(); // Compute gravity center normalized moment AFTER area normalized moment
368
369 // Desired plane
370 double A = 0.0;
371 double B = 0.0;
372 double C = 1.0 / Z_d;
373
374 // Construct area normalized features
375 vpFeatureMomentGravityCenterNormalized s_mgn(mdb, A, B, C), s_mgn_d(mdb_d, A, B, C);
376 vpFeatureMomentAreaNormalized s_man(mdb, A, B, C), s_man_d(mdb_d, A, B, C);
377
378 // Add the features
379 task.addFeature(s_mgn, s_mgn_d, vpFeatureMomentGravityCenterNormalized::selectXn());
380 task.addFeature(s_man, s_man_d);
381
382 // Update desired gravity center normalized feature
383 s_mgn_d.update(A, B, C);
384 s_mgn_d.compute_interaction();
385 // Update desired area normalized feature
386 s_man_d.update(A, B, C);
387 s_man_d.compute_interaction();
388
389 std::vector<double> time_vec;
390 for (;;) {
391 g.acquire(I);
392
393#ifdef VISP_HAVE_DISPLAY
395#endif
396
397 double t = vpTime::measureTimeMs();
398 std::vector<vpHomogeneousMatrix> cMo_vec;
399 detector.detect(I, opt_tag_size, cam, cMo_vec);
401 time_vec.push_back(t);
402
403 {
404 std::stringstream ss;
405 ss << "Detection time: " << t << " ms";
406#ifdef VISP_HAVE_DISPLAY
407 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
408#endif
409 }
410
411 if (detector.getNbObjects() == 1) {
412 if (!serial_off) {
413 serial->write("LED_RING=2,0,10,0\n"); // Switch on led 2 to green: tag detected
414 }
415
416 // Update current points used to compute the moments
417 std::vector<vpImagePoint> vec_ip = detector.getPolygon(0);
418 vec_P.clear();
419 for (size_t i = 0; i < vec_ip.size(); i++) { // size = 4
420 double x = 0, y = 0;
421 vpPixelMeterConversion::convertPoint(cam, vec_ip[i], x, y);
422 vpPoint P;
423 P.set_x(x);
424 P.set_y(y);
425 vec_P.push_back(P);
426 }
427
428#ifdef VISP_HAVE_DISPLAY
429 // Display visual features
430 vpDisplay::displayPolygon(I, vec_ip, vpColor::green, 3); // Current polygon used to compure an moment
431 vpDisplay::displayCross(I, detector.getCog(0), 15, vpColor::green,
432 3); // Current polygon used to compure an moment
433 vpDisplay::displayLine(I, 0, cam.get_u0(), I.getHeight() - 1, cam.get_u0(), vpColor::red,
434 3); // Vertical line as desired x position
435#endif
436
437 // Current moments
438 m_obj.setType(vpMomentObject::DENSE_POLYGON); // Consider the AprilTag as a polygon
439 m_obj.fromVector(vec_P); // Initialize the object with the points coordinates
440
441 mg.linkTo(mdb); // Add gravity center to database
442 mc.linkTo(mdb); // Add centered moments to database
443 man.linkTo(mdb); // Add area normalized to database
444 mgn.linkTo(mdb); // Add gravity center normalized to database
445 mdb.updateAll(m_obj); // All of the moments must be updated, not just an_d
446 mg.compute(); // Compute gravity center moment
447 mc.compute(); // Compute centered moments AFTER gravity center
448 man.setDesiredArea(
449 area); // Since desired area was init at 0, because unknow at construction, need to be updated here
450 man.compute(); // Compute area normalized moment AFTER centered moment
451 mgn.compute(); // Compute gravity center normalized moment AFTER area normalized moment
452
453 s_mgn.update(A, B, C);
454 s_mgn.compute_interaction();
455 s_man.update(A, B, C);
456 s_man.compute_interaction();
457
458 task.set_cVe(cVe);
459 task.set_eJe(eJe);
460
461 // Compute the control law. Velocities are computed in the mobile robot reference frame
462 vpColVector v = task.computeControlLaw();
463
464 std::cout << "Send velocity to the mbot: " << v[0] << " m/s " << vpMath::deg(v[1]) << " deg/s" << std::endl;
465
466 task.print();
467 double radius = 0.0325;
468 double L = 0.0725;
469 double motor_left = (-v[0] - L * v[1]) / radius;
470 double motor_right = (v[0] - L * v[1]) / radius;
471 std::cout << "motor left vel: " << motor_left << " motor right vel: " << motor_right << std::endl;
472 if (!serial_off) {
473 // serial->write("LED_RING=3,0,0,10\n"); // Switch on led 3 to blue: motor left servoed
474 // serial->write("LED_RING=4,0,0,10\n"); // Switch on led 4 to blue: motor right servoed
475 }
476 std::stringstream ss;
477 double rpm_left = motor_left * 30. / M_PI;
478 double rpm_right = motor_right * 30. / M_PI;
479 ss << "MOTOR_RPM=" << vpMath::round(rpm_left) << "," << vpMath::round(rpm_right) << "\n";
480 std::cout << "Send: " << ss.str() << std::endl;
481 if (!serial_off) {
482 serial->write(ss.str());
483 }
484 }
485 else {
486 // stop the robot
487 if (!serial_off) {
488 serial->write("LED_RING=2,10,0,0\n"); // Switch on led 2 to red: tag not detected
489 // serial->write("LED_RING=3,0,0,0\n"); // Switch on led 3 to blue: motor left not servoed
490 // serial->write("LED_RING=4,0,0,0\n"); // Switch on led 4 to blue: motor right not servoed
491 serial->write("MOTOR_RPM=0,-0\n"); // Stop the robot
492 }
493 }
494
495#ifdef VISP_HAVE_DISPLAY
496 vpDisplay::displayText(I, 20, 20, "Click to quit.", vpColor::red);
498
499 if (display_on && save_image) {
501 vpImageIo::write(O, "image.png");
502 }
503 if (vpDisplay::getClick(I, false)) {
504 break;
505 }
506#endif
507 }
508
509
510 if (!serial_off) {
511 serial->write("LED_RING=0,0,0,0\n"); // Switch off all led
512 }
513
514 std::cout << "Benchmark computation time" << std::endl;
515 std::cout << "Mean / Median / Std: " << vpMath::getMean(time_vec) << " ms"
516 << " ; " << vpMath::getMedian(time_vec) << " ms"
517 << " ; " << vpMath::getStdev(time_vec) << " ms" << std::endl;
518
519 if (display_on) {
520 delete d;
521 }
522 if (!serial_off) {
523 delete serial;
524 }
525 }
526 catch (const vpException &e) {
527 std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
528 if (!serial_off) {
529 serial->write("LED_RING=1,10,0,0\n"); // Switch on led 1 to red
530 }
531 }
532
533 return EXIT_SUCCESS;
534#else
535 (void)argc;
536 (void)argv;
537#ifndef VISP_HAVE_APRILTAG
538 std::cout << "ViSP is not build with Apriltag support" << std::endl;
539#endif
540#ifndef VISP_HAVE_V4L2
541 std::cout << "ViSP is not build with v4l2 support" << std::endl;
542#endif
543 std::cout << "Install missing 3rd parties, configure and build ViSP to run this tutorial" << std::endl;
544 return EXIT_SUCCESS;
545#endif
546}
Adaptive gain computation.
void initStandard(double gain_at_zero, double gain_at_infinity, double slope_at_zero)
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:198
static const vpColor green
Definition vpColor.h:201
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void getImage(const vpImage< unsigned char > &Is, vpImage< vpRGBa > &Id)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
static void displayPolygon(const vpImage< unsigned char > &I, const std::vector< vpImagePoint > &vip, const vpColor &color, unsigned int thickness=1, bool closed=true)
error that can be emitted by ViSP classes.
Definition vpException.h:60
Functionality computation for normalized surface moment feature. Computes the interaction matrix asso...
Functionality computation for centered and normalized moment feature. Computes the interaction matrix...
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void write(const vpImage< unsigned char > &I, const std::string &filename, int backend=IO_DEFAULT_BACKEND)
Definition of the vpImage class member functions.
Definition vpImage.h:131
static double getMedian(const std::vector< double > &v)
Definition vpMath.cpp:343
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition vpMath.cpp:374
static int round(double x)
Definition vpMath.h:413
static double getMean(const std::vector< double > &v)
Definition vpMath.cpp:323
static double deg(double rad)
Definition vpMath.h:119
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:175
Class handling the normalized surface moment that is invariant in scale and used to estimate depth.
This class defines the 2D basic moment . This class is a wrapper for vpMomentObject which allows to u...
const std::vector< double > & get() const
This class defines the double-indexed centered moment descriptor .
void compute() VP_OVERRIDE
This class allows to register all vpMoments so they can access each other according to their dependen...
virtual void updateAll(vpMomentObject &object)
Class describing 2D normalized gravity center moment.
Class describing 2D gravity center moment.
Class for generic objects.
void linkTo(vpMomentDatabase &moments)
Definition vpMoment.cpp:114
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:79
void set_x(double x)
Set the point x coordinate in the image plane.
Definition vpPoint.cpp:471
void set_y(double y)
Set the point y coordinate in the image plane.
Definition vpPoint.cpp:473
Implementation of a rotation matrix and operations on such kind of matrices.
void write(const std::string &s)
Definition vpSerial.cpp:332
@ EYEINHAND_L_cVe_eJe
Definition vpServo.h:183
@ CURRENT
Definition vpServo.h:217
Class that consider the case of a translation vector.
Generic functions for unicycle mobile robots.
Definition vpUnicycle.h:52
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
void acquire(vpImage< unsigned char > &I)
XML parser to load and save intrinsic camera parameters.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()