Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
catchRBT.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2024 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * Test vpMbGenericTracker JSON parse / save.
32 */
33
39
40#include <visp3/core/vpConfig.h>
41
42#if defined(VISP_HAVE_CATCH2)
43
44#include <visp3/core/vpIoTools.h>
45#include <visp3/core/vpImageConvert.h>
46#include <visp3/rbt/vpRBTracker.h>
47
48#include <visp3/rbt/vpRBSilhouetteMeTracker.h>
49#include <visp3/rbt/vpRBSilhouetteCCDTracker.h>
50#include <visp3/rbt/vpRBKltTracker.h>
51#include <visp3/rbt/vpRBDenseDepthTracker.h>
52#include <visp3/ar/vpPanda3DFrameworkManager.h>
53
54#include "test_utils.h"
55
56#if defined(VISP_HAVE_NLOHMANN_JSON)
57#include VISP_NLOHMANN_JSON(json.hpp)
58#endif
59
60#define CATCH_CONFIG_RUNNER
61#include <catch_amalgamated.hpp>
62
63#ifdef ENABLE_VISP_NAMESPACE
64using namespace VISP_NAMESPACE_NAME;
65#endif
66
67const std::string objCube =
68"o Cube\n"
69"v -0.050000 -0.050000 0.050000\n"
70"v -0.050000 0.050000 0.050000\n"
71"v -0.050000 -0.050000 -0.050000\n"
72"v -0.050000 0.050000 -0.050000\n"
73"v 0.050000 -0.050000 0.050000\n"
74"v 0.050000 0.050000 0.050000\n"
75"v 0.050000 -0.050000 -0.050000\n"
76"v 0.050000 0.050000 -0.050000\n"
77"vn -1.0000 -0.0000 -0.0000\n"
78"vn -0.0000 -0.0000 -1.0000\n"
79"vn 1.0000 -0.0000 -0.0000\n"
80"vn -0.0000 -0.0000 1.0000\n"
81"vn -0.0000 -1.0000 -0.0000\n"
82"vn -0.0000 1.0000 -0.0000\n"
83"vt 0.375000 0.000000\n"
84"vt 0.375000 1.000000\n"
85"vt 0.125000 0.750000\n"
86"vt 0.625000 0.000000\n"
87"vt 0.625000 1.000000\n"
88"vt 0.875000 0.750000\n"
89"vt 0.125000 0.500000\n"
90"vt 0.375000 0.250000\n"
91"vt 0.625000 0.250000\n"
92"vt 0.875000 0.500000\n"
93"vt 0.375000 0.750000\n"
94"vt 0.625000 0.750000\n"
95"vt 0.375000 0.500000\n"
96"vt 0.625000 0.500000\n"
97"s 0\n"
98"f 2/4/1 3/8/1 1/1/1\n"
99"f 4/9/2 7/13/2 3/8/2\n"
100"f 8/14/3 5/11/3 7/13/3\n"
101"f 6/12/4 1/2/4 5/11/4\n"
102"f 7/13/5 1/3/5 3/7/5\n"
103"f 4/10/6 6/12/6 8/14/6\n"
104"f 2/4/1 4/9/1 3/8/1\n"
105"f 4/9/2 8/14/2 7/13/2\n"
106"f 8/14/3 6/12/3 5/11/3\n"
107"f 6/12/4 2/5/4 1/2/4\n"
108"f 7/13/5 5/11/5 1/3/5\n"
109"f 4/10/6 2/6/6 6/12/6\n";
110
111bool opt_no_display = false; // If true, disable display or tests requiring display
112
113std::string createObjFile()
114{
115 const std::string tempDir = vpIoTools::makeTempDirectory("visp_test_rbt_obj");
116 const std::string objFile = vpIoTools::createFilePath(tempDir, "cube.obj");
117 std::ofstream f(objFile);
118 f << objCube;
119 f.close();
120
121 return objFile;
122}
123
124
125SCENARIO("Instantiating a silhouette me tracker", "[rbt]")
126{
127 GIVEN("A base me tracker")
128 {
130 WHEN("Changing mask parameters")
131 {
132 THEN("Enabling mask is seen")
133 {
134 bool useMaskDefault = tracker.shouldUseMask();
135 tracker.setShouldUseMask(!useMaskDefault);
136 REQUIRE(useMaskDefault != tracker.shouldUseMask());
137 }
138 THEN("Changing mask min confidence with a correct value is Ok")
139 {
140 tracker.setMinimumMaskConfidence(0.0);
141 REQUIRE(tracker.getMinimumMaskConfidence() == 0.0);
142 tracker.setMinimumMaskConfidence(1.0);
143 REQUIRE(tracker.getMinimumMaskConfidence() == 1.0);
144 tracker.setMinimumMaskConfidence(0.5);
145 REQUIRE(tracker.getMinimumMaskConfidence() == 0.5);
146 }
147 THEN("Setting incorrect mask confidence value fails")
148 {
149 REQUIRE_THROWS(tracker.setMinimumMaskConfidence(-1.0));
150 }
151 }
152 WHEN("Changing robust threshold")
153 {
154 THEN("Setting correct value works")
155 {
156 tracker.setMinRobustThreshold(0.5);
157 REQUIRE(tracker.getMinRobustThreshold() == 0.5);
158 }
159 THEN("Setting negative value throws")
160 {
161 REQUIRE_THROWS(tracker.setMinRobustThreshold(-0.5));
162 }
163 }
164 WHEN("Changing number of candidates")
165 {
166 THEN("Setting correct value works")
167 {
168 tracker.setNumCandidates(3);
169 REQUIRE(tracker.getNumCandidates() == 3);
170 }
171 THEN("Setting incorrect value throws")
172 {
173 REQUIRE_THROWS(tracker.setNumCandidates(0));
174 }
175 }
176 WHEN("Changing convergence settings")
177 {
178 THEN("Setting correct single point value works")
179 {
180 tracker.setSinglePointConvergenceThreshold(1.0);
181 REQUIRE(tracker.getSinglePointConvergenceThreshold() == 1.0);
182 }
183 THEN("Setting incorrect single point value throws")
184 {
185 REQUIRE_THROWS(tracker.setSinglePointConvergenceThreshold(-1.0));
186 }
187 THEN("Setting correct global value works")
188 {
189 tracker.setGlobalConvergenceMinimumRatio(0.0);
190 REQUIRE(tracker.getGlobalConvergenceMinimumRatio() == 0.0);
191 tracker.setGlobalConvergenceMinimumRatio(1.0);
192 REQUIRE(tracker.getGlobalConvergenceMinimumRatio() == 1.0);
193 tracker.setGlobalConvergenceMinimumRatio(0.5);
194 REQUIRE(tracker.getGlobalConvergenceMinimumRatio() == 0.5);
195 }
196 }
197#if defined(VISP_HAVE_NLOHMANN_JSON)
198 WHEN("defining JSON parameters")
199 {
200 nlohmann::json j = {
201 {"type", "silhouetteMe"},
202 { "numCandidates", 1 },
203 { "weight", 0.5 },
204 { "convergencePixelThreshold", 0.5 },
205 { "convergenceRatio", 0.99},
206 { "useMask", true},
207 { "minMaskConfidence", 0.5},
208 { "movingEdge", {
209 {"maskSign", 0},
210 {"maskSize" , 5},
211 {"minSampleStep" , 4.0},
212 {"mu" , {0.5, 0.5}},
213 {"nMask" , 90},
214 {"ntotalSample" , 0},
215 {"pointsToTrack" , 200},
216 {"range" , 5},
217 {"sampleStep" , 4.0},
218 {"strip" , 2},
219 {"thresholdType" , "normalized"},
220 {"threshold" , 20.0}
221 }}
222 };
223 THEN("Loading correct settings works")
224 {
225 tracker.loadJsonConfiguration(j);
226 REQUIRE(tracker.getNumCandidates() == 1);
227 REQUIRE(tracker.shouldUseMask() == true);
228 REQUIRE(tracker.getMinimumMaskConfidence() == 0.5);
229 REQUIRE(tracker.getMe().getMaskNumber() == 90);
230 REQUIRE(tracker.getMe().getThreshold() == 20.0);
231 }
232 THEN("Setting incorrect candidate number throws")
233 {
234 j["numCandidates"] = 0;
235 REQUIRE_THROWS(tracker.loadJsonConfiguration(j));
236 }
237 THEN("Setting incorrect mask confidence throws")
238 {
239 j["minMaskConfidence"] = 5.0;
240 REQUIRE_THROWS(tracker.loadJsonConfiguration(j));
241 }
242 THEN("Setting incorrect single point convergence vlaue confidence throws")
243 {
244 j["convergencePixelThreshold"] = -1.0;
245 REQUIRE_THROWS(tracker.loadJsonConfiguration(j));
246 }
247 THEN("Setting incorrect global convergence vlaue confidence throws")
248 {
249 j["convergenceRatio"] = 2.0;
250 REQUIRE_THROWS(tracker.loadJsonConfiguration(j));
251 }
252 }
253#endif
254 }
255}
256
257SCENARIO("Instantiating a silhouette CCD tracker", "[rbt]")
258{
259 GIVEN("A base ccd tracker")
260 {
262 WHEN("Setting smoothing factor")
263 {
264 THEN("Setting value above 0 works")
265 {
266 tracker.setTemporalSmoothingFactor(0.5);
267 REQUIRE(tracker.getTemporalSmoothingFactor() == 0.5);
268 }
269 THEN("Setting value below 0 throws")
270 {
271 REQUIRE_THROWS(tracker.setTemporalSmoothingFactor(-2.0));
272 }
273 }
274 WHEN("Updating CCD parameters")
275 {
276 vpCCDParameters ccd = tracker.getCCDParameters();
277 ccd.h += 4;
278 ccd.delta_h += 2;
279 tracker.setCCDParameters(ccd);
280 THEN("Changes are propagated to tracker")
281 {
282 REQUIRE(tracker.getCCDParameters().h == ccd.h);
283 REQUIRE(tracker.getCCDParameters().delta_h == ccd.delta_h);
284 }
285 }
286
287#if defined(VISP_HAVE_NLOHMANN_JSON)
288 WHEN("Defining associated json")
289 {
290 nlohmann::json j = {
291 {"type", "silhouetteCCD"},
292 {"weight", 0.01},
293 {"temporalSmoothing", 0.1},
294 {"convergenceThreshold", 0.1},
295 {"ccd", {
296 {"h", 64},
297 {"delta_h", 16},
298 {"gamma", { 0.1, 0.2, 0.3, 0.4 } }
299 }}
300 };
301 THEN("Loading correct json works")
302 {
303 tracker.loadJsonConfiguration(j);
304 REQUIRE(tracker.getTemporalSmoothingFactor() == 0.1);
305 vpCCDParameters ccd = tracker.getCCDParameters();
306 REQUIRE(ccd.h == 64);
307 REQUIRE(ccd.delta_h == 16);
308 REQUIRE((ccd.gamma_1 == 0.1 && ccd.gamma_2 == 0.2 && ccd.gamma_3 == 0.3 && ccd.gamma_4 == 0.4));
309 }
310 THEN("Loading invalid temporal smoothing factor throws")
311 {
312 j["temporalSmoothing"] = -3.14;
313 REQUIRE_THROWS(tracker.loadJsonConfiguration(j));
314 }
315 THEN("Loading invalid ccd gamma throws")
316 {
317 j["ccd"]["gamma"] = -3.14;
318 REQUIRE_THROWS(tracker.loadJsonConfiguration(j));
319 }
320 }
321#endif
322 }
323}
324
325#if defined(VP_HAVE_RB_KLT_TRACKER)
326SCENARIO("Instantiating KLT tracker")
327{
329 WHEN("Modifying basic settings")
330 {
331 tracker.setFilteringBorderSize(2);
332 tracker.setFilteringMaxReprojectionError(0.024);
333 tracker.setMinimumDistanceNewPoints(0.005);
334 tracker.setMinimumNumberOfPoints(20);
335 tracker.setShouldUseMask(true);
336 tracker.setMinimumMaskConfidence(0.5);
337 THEN("Every change is visible")
338 {
339 REQUIRE(tracker.getFilteringBorderSize() == 2);
340 REQUIRE(tracker.getFilteringMaxReprojectionError() == 0.024);
341 REQUIRE(tracker.getMinimumDistanceNewPoints() == 0.005);
342 REQUIRE(tracker.getMinimumNumberOfPoints() == 20);
343 REQUIRE(tracker.shouldUseMask());
344 REQUIRE(tracker.getMinimumMaskConfidence() == 0.5);
345 }
346 THEN("Setting incorrect Mask confidence throws")
347 {
348 REQUIRE_THROWS(tracker.setMinimumMaskConfidence(-1.0));
349 }
350 }
351
352#if defined(VISP_HAVE_NLOHMANN_JSON)
353 WHEN("Defining associated json")
354 {
355 nlohmann::json j = {
356 {"type", "klt"},
357 {"weight", 0.01},
358 {"minimumNumPoints", 25},
359 {"newPointsMinPixelDistance", 5},
360 {"maxReprojectionErrorPixels", 0.01},
361 {"useMask", true},
362 {"minMaskConfidence", 0.1},
363 { "windowSize", 7 },
364 { "quality", 0.01 },
365 { "maxFeatures", 500 }
366 };
367 THEN("Loading correct json works")
368 {
369 tracker.loadJsonConfiguration(j);
370 REQUIRE(tracker.getMinimumNumberOfPoints() == 25);
371 REQUIRE(tracker.getMinimumDistanceNewPoints() == 5);
372 REQUIRE(tracker.getFilteringMaxReprojectionError() == 0.01);
373 REQUIRE(tracker.shouldUseMask() == true);
374 REQUIRE(tracker.getMinimumMaskConfidence() == 0.1f);
375 REQUIRE(tracker.getKltTracker().getWindowSize() == 7);
376 REQUIRE(tracker.getKltTracker().getQuality() == 0.01);
377 REQUIRE(tracker.getKltTracker().getMaxFeatures() == 500);
378 }
379 THEN("Loading invalid mask confidence throws")
380 {
381 j["minMaskConfidence"] = -3.14;
382 REQUIRE_THROWS(tracker.loadJsonConfiguration(j));
383 }
384 }
385#endif
386}
387#endif
388
389SCENARIO("Instantiating depth tracker", "[rbt]")
390{
392 WHEN("Setting steps")
393 {
394 THEN("Setting positive value works")
395 {
396 tracker.setStep(4);
397 REQUIRE(tracker.getStep() == 4);
398 }
399 THEN("Setting 0 step is invalid")
400 {
401 REQUIRE_THROWS(tracker.setStep(0));
402 }
403 }
404 WHEN("Setting confidence")
405 {
406 THEN("Setting incorrect mask confidence value")
407 {
408 REQUIRE_THROWS(tracker.setMinimumMaskConfidence(-1.0));
409 }
410 THEN("Setting correct mask confidence value")
411 {
412 tracker.setMinimumMaskConfidence(0.8);
413 REQUIRE(tracker.getMinimumMaskConfidence() == 0.8f);
414 }
415 THEN("Toggling mask works")
416 {
417 tracker.setShouldUseMask(true);
418 REQUIRE(tracker.shouldUseMask());
419 }
420 }
421#if defined(VISP_HAVE_NLOHMANN_JSON)
422 WHEN("Defining associated json")
423 {
424 nlohmann::json j = {
425 {"type", "klt"},
426 {"weight", 0.01},
427 {"step", 16},
428 {"useMask", true},
429 {"minMaskConfidence", 0.1}
430 };
431 THEN("Loading correct json works")
432 {
433 tracker.loadJsonConfiguration(j);
434 REQUIRE(tracker.getStep() == 16);
435 REQUIRE(tracker.shouldUseMask());
436 REQUIRE(tracker.getMinimumMaskConfidence() == 0.1f);
437 }
438 THEN("Loading invalid mask confidence throws")
439 {
440 j["minMaskConfidence"] = -3.14;
441 REQUIRE_THROWS(tracker.loadJsonConfiguration(j));
442 }
443 THEN("Loading invalid step throws")
444 {
445 j["step"] = 0;
446 REQUIRE_THROWS(tracker.loadJsonConfiguration(j));
447 }
448 }
449#endif
450}
451
452SCENARIO("Instantiating a render-based tracker", "[rbt]")
453{
455
456 WHEN("Setting optimization parameters")
457 {
458 THEN("Max num iter cannot be zero")
459 {
460 REQUIRE_THROWS(tracker.setMaxOptimizationIters(0));
461 }
462 THEN("Setting num iter is ok")
463 {
464 tracker.setMaxOptimizationIters(10);
465 REQUIRE(tracker.getMaxOptimizationIters() == 10);
466 }
467 THEN("Gain cannot be negative")
468 {
469 REQUIRE_THROWS(tracker.setOptimizationGain(-0.5));
470 }
471 THEN("Positive gain is ok")
472 {
473 tracker.setOptimizationGain(0.5);
474 REQUIRE(tracker.getOptimizationGain() == 0.5);
475 }
476 THEN("Initial mu cannot be negative")
477 {
478 REQUIRE_THROWS(tracker.setOptimizationInitialMu(-0.5));
479 }
480 THEN("Initial mu can be zero (gauss newton)")
481 {
482 tracker.setOptimizationInitialMu(0.0);
483 REQUIRE(tracker.getOptimizationInitialMu() == 0.0);
484 }
485 THEN("Initial mu can be above zero")
486 {
487 tracker.setOptimizationInitialMu(0.1);
488 REQUIRE(tracker.getOptimizationInitialMu() == 0.1);
489 }
490
491 THEN("Mu factor cannot be negative")
492 {
493 REQUIRE_THROWS(tracker.setOptimizationMuIterFactor(-0.5));
494 }
495 THEN("Mu factor can be zero")
496 {
497 tracker.setOptimizationMuIterFactor(0.0);
498 REQUIRE(tracker.getOptimizationMuIterFactor() == 0.0);
499 }
500 THEN("Mu factor can be positive")
501 {
502 tracker.setOptimizationMuIterFactor(0.1);
503 REQUIRE(tracker.getOptimizationMuIterFactor() == 0.1);
504 }
505 }
506
507 WHEN("Setting camera parameters and resolution")
508 {
509 unsigned int h = 480, w = 640;
510 vpCameraParameters cam(600, 600, 320, 240);
511 THEN("Image height cannot be zero")
512 {
513 REQUIRE_THROWS(tracker.setCameraParameters(cam, 0, w));
514 }
515 THEN("Image width cannot be zero")
516 {
517 REQUIRE_THROWS(tracker.setCameraParameters(cam, h, 0));
518 }
519 THEN("Camera model cannot have distortion")
520 {
521 cam.initPersProjWithDistortion(600, 600, 320, 240, 0.01, 0.01);
522 REQUIRE_THROWS(tracker.setCameraParameters(cam, h, w));
523 }
524 THEN("Loading with perspective model with no distortion and correct resolution is ok")
525 {
526 tracker.setCameraParameters(cam, h, w);
527 REQUIRE(tracker.getCameraParameters() == cam);
528 REQUIRE(tracker.getImageHeight() == h);
529 REQUIRE(tracker.getImageWidth() == w);
530 }
531 }
532
533#if defined(VISP_HAVE_NLOHMANN_JSON)
534 WHEN("Loading JSON configuration")
535 {
536 const std::string jsonLiteral = R"JSON({
537 "camera": {
538 "intrinsics": {
539 "model": "perspectiveWithoutDistortion",
540 "px" : 302.573,
541 "py" : 302.396,
542 "u0" : 162.776,
543 "v0" : 122.475
544 },
545 "height": 240,
546 "width" : 320
547 },
548 "vvs": {
549 "gain": 1.0,
550 "maxIterations" : 10,
551 "mu": 0.5,
552 "muIterFactor": 0.1
553 },
554 "model" : "path/to/model.obj",
555 "silhouetteExtractionSettings" : {
556 "threshold": {
557 "type": "relative",
558 "value" : 0.1
559 },
560 "sampling" : {
561 "type": "fixed",
562 "samplingRate": 2,
563 "numPoints" : 128,
564 "reusePreviousPoints": true
565 }
566 },
567 "features": [
568 {
569 "type": "silhouetteMe",
570 "weight" : 0.5,
571 "numCandidates" : 3,
572 "convergencePixelThreshold" : 3,
573 "convergenceRatio" : 0.99,
574 "movingEdge" : {
575 "maskSign": 0,
576 "maskSize" : 5,
577 "minSampleStep" : 4.0,
578 "mu" : [
579 0.5,
580 0.5
581 ] ,
582 "nMask" : 90,
583 "ntotalSample" : 0,
584 "pointsToTrack" : 200,
585 "range" : 5,
586 "sampleStep" : 4.0,
587 "strip" : 2,
588 "thresholdType" : "normalized",
589 "threshold" : 20.0
590 }
591 },
592 {
593 "type": "silhouetteColor",
594 "weight" : 0.5,
595 "convergenceThreshold" : 0.1,
596 "temporalSmoothing" : 0.1,
597 "ccd" : {
598 "h": 4,
599 "delta_h" : 1
600 }
601 }
602 ],
603 "verbose": {
604 "enabled": true
605 }
606 })JSON";
607 const auto verifyBase = [&tracker]() {
608 REQUIRE((tracker.getImageHeight() == 240 && tracker.getImageWidth() == 320));
609 REQUIRE((tracker.getOptimizationGain() == 1.0 && tracker.getMaxOptimizationIters() == 10));
610 vpSilhouettePointsExtractionSettings silset = tracker.getSilhouetteExtractionParameters();
611 REQUIRE((silset.thresholdIsRelative() && silset.getThreshold() == 0.1));
612 REQUIRE((silset.getSampleStep() == 2 && silset.getMaxCandidates() == 128));
613 REQUIRE((silset.preferPreviousPoints()));
614
615 REQUIRE((tracker.getOptimizationGain() == 1.0 && tracker.getMaxOptimizationIters() == 10));
616 REQUIRE((tracker.getOptimizationInitialMu() == 0.5 && tracker.getOptimizationMuIterFactor() == 0.1));
617 };
618 nlohmann::json j = nlohmann::json::parse(jsonLiteral);
619 THEN("Loading configuration with trackers")
620 {
621 tracker.loadConfiguration(j);
622 verifyBase();
623 REQUIRE(tracker.getModelPath() == "path/to/model.obj");
624 if (!opt_no_display) {
625 AND_THEN("Initializing tracking fails since object does not exist")
626 {
627 REQUIRE_THROWS(tracker.startTracking());
628 }
629 }
630 }
631 THEN("Loading configuration without model also works")
632 {
633 j.erase("model");
634 tracker.loadConfiguration(j);
635 verifyBase();
636 REQUIRE(tracker.getModelPath() == "");
637 if (!opt_no_display) {
638 AND_THEN("Initializing tracking fails since path is not specified")
639 {
640 REQUIRE_THROWS(tracker.startTracking());
641 }
642 }
643 }
644 THEN("Loading configuration with real 3D model also works")
645 {
646 std::string objFile = createObjFile();
647 j["model"] = objFile;
648 tracker.loadConfiguration(j);
649 verifyBase();
650 REQUIRE(tracker.getModelPath() == objFile);
651 if (!opt_no_display) {
652 AND_THEN("Initializing tracker works")
653 {
654 REQUIRE_NOTHROW(tracker.startTracking());
655 }
656 }
657 }
658 }
659
660 WHEN("Adding trackers")
661 {
662 THEN("Adding nullptr is not allowed")
663 {
664 REQUIRE_THROWS(tracker.addTracker(nullptr));
665 }
666 THEN("Adding a tracker works")
667 {
668 auto ccdTracker = std::make_shared<vpRBSilhouetteCCDTracker>();
669 tracker.addTracker(ccdTracker);
670 }
671 }
672#endif
673}
674
675SCENARIO("Running tracker on static synthetic sequences", "[rbt]")
676{
677 if (opt_no_display) {
678 std::cout << "Display is disabled for tests, skipping..." << std::endl;
679 }
680 else {
681 unsigned int h = 480, w = 640;
682 vpCameraParameters cam(600, 600, 320, 240);
683 vpPanda3DRenderParameters renderParams(cam, h, w, 0.01, 1.0);
684
685 std::string objFile = createObjFile();
686 const auto setupScene = [&objFile](vpPanda3DRendererSet &renderer) {
687 renderer.addNodeToScene(renderer.loadObject("object", objFile));
688 renderer.addLight(vpPanda3DAmbientLight("ambient", vpRGBf(1.f)));
689 };
690 const unsigned int n = 100;
691
692 std::vector<vpHomogeneousMatrix> cTw;
693 std::vector<vpHomogeneousMatrix> oTw;
694 for (unsigned int i = 0; i < n; ++i) {
695 oTw.push_back(vpHomogeneousMatrix(0.0, 0.0, 0.0, 0.0, vpMath::rad(60.0), vpMath::rad(45.0)));
696 cTw.push_back(vpHomogeneousMatrix(0.0, 0.001 * static_cast<double>(i), 0.3 + 0.001 * static_cast<double>(i), 0.0, 0.0, 0.0));
697 }
698
699 TrajectoryData traj1 = generateTrajectory(renderParams, setupScene, cTw, oTw);
700
702 tracker.setCameraParameters(cam, h, w);
703 std::shared_ptr<vpRBSilhouetteCCDTracker> silTracker = std::make_shared<vpRBSilhouetteCCDTracker>();
704 silTracker->setTemporalSmoothingFactor(0.1);
705 vpCCDParameters ccdParams = silTracker->getCCDParameters();
706 ccdParams.h = 16;
707 silTracker->setCCDParameters(ccdParams);
708
709 tracker.addTracker(silTracker);
710 // std::shared_ptr<vpRBDenseDepthTracker> denseDepthTracker = std::make_shared<vpRBDenseDepthTracker>();
711 // denseDepthTracker->setStep(4);
712 // tracker.addTracker(denseDepthTracker);
713
714 vpSilhouettePointsExtractionSettings silhouetteSettings;
715 silhouetteSettings.setSampleStep(1);
716 silhouetteSettings.setThresholdIsRelative(true);
717 silhouetteSettings.setThreshold(0.1);
718 silhouetteSettings.setPreferPreviousPoints(false);
719 silhouetteSettings.setMaxCandidates(1024);
720 tracker.setSilhouetteExtractionParameters(silhouetteSettings);
721 tracker.setOptimizationGain(0.5);
722 tracker.setMaxOptimizationIters(10);
723 tracker.setOptimizationInitialMu(0.01);
724 tracker.setModelPath(objFile);
725 tracker.startTracking();
726 tracker.setPose(traj1.cTo[0]);
727
729
730 for (unsigned int i = 0; i < traj1.cTo.size(); ++i) {
731 vpImageConvert::convert(traj1.rgb[i], I);
732 vpHomogeneousMatrix tracker_cTo;
733 tracker.track(I, traj1.rgb[i], traj1.depth[i]);
734 tracker.getPose(tracker_cTo);
735 vpHomogeneousMatrix odTo = traj1.cTo[i].inverse() * tracker_cTo;
736 double errorT = odTo.getTranslationVector().frobeniusNorm();
737 double errorR = odTo.getThetaUVector().getTheta();
738 std::cout << "Translation error = " << errorT << " m" << ", rotation error = " << vpMath::deg(errorR) << " deg" << std::endl;
739 REQUIRE((errorT < 0.005 && errorR < vpMath::deg(2.1)));
740 }
741 }
742}
743
744SCENARIO("Checking ADD convergence metric", "[rbt]")
745{
746 if (opt_no_display) {
747 std::cout << "Display is disabled for tests, skipping..." << std::endl;
748 return;
749 }
750
751 GIVEN("A renderer and a convergence metric")
752 {
753 vpCameraParameters cam(800, 800, 320, 240);
754 vpPanda3DRenderParameters params(cam, 480, 640, 0.01, 1.0);
755 vpObjectCentricRenderer renderer(params);
756 renderer.addSubRenderer(std::make_shared<vpPanda3DGeometryRenderer>(vpPanda3DGeometryRenderer::OBJECT_NORMALS, true));
757 renderer.initFramework();
758 renderer.addObjectToScene("obj", createObjFile());
759 renderer.setFocusedObject("obj");
760 renderer.setCameraPose(vpHomogeneousMatrix(0, 0, -0.5, 0, 0, 0));
761 vpRBConvergenceADDMetric metric(0.01, 0.001, 512, 213);
762
763 vpHomogeneousMatrix cTo1(0, 0, 0.2, 0, 0, 0);
764 vpHomogeneousMatrix cTo2Conv(0, 0, 0.2001, 0, 0, 0);
765 vpHomogeneousMatrix cTo2NotConv(0, 0, 0.205, 0, 0, 0);
766 vpHomogeneousMatrix cTo2Render(0, 0, 0.22, 0, 0, 0);
767 THEN("Trying to compute metric without sampling fails")
768 {
769 REQUIRE_THROWS(metric(cam, cTo1, cTo2NotConv));
770 }
771
772 THEN("Sampling and testing against various threshold works")
773 {
774 metric.sampleObject(renderer);
775 double metricValue = metric(cam, cTo1, cTo2NotConv);
776 REQUIRE(fabs(metricValue - 0.005) < 1e-4);
777 REQUIRE(!metric.hasConverged(cam, cTo1, cTo2NotConv));
778 REQUIRE(!metric.shouldUpdateRender(cam, cTo1, cTo2NotConv));
779
780
781 metricValue = metric(cam, cTo1, cTo2Conv);
782 REQUIRE(fabs(metricValue - 0.0001) < 1e-4);
783 REQUIRE(metric.hasConverged(cam, cTo1, cTo2Conv));
784 REQUIRE(!metric.shouldUpdateRender(cam, cTo1, cTo2Conv));
785
786 metricValue = metric(cam, cTo1, cTo2Render);
787 REQUIRE(fabs(metricValue - 0.02) < 1e-4);
788 REQUIRE(!metric.hasConverged(cam, cTo1, cTo2Render));
789 REQUIRE(metric.shouldUpdateRender(cam, cTo1, cTo2Render));
790 }
791 }
792}
793
794SCENARIO("Testing point map", "[rbt]")
795{
796 vpPointMap map(512, 0.0, 0.001, 0.02, 2.0);
797 map.setThresholdNormalVisibiltyCriterion(45.0);
798 REQUIRE(map.getNumMaxPoints() == 512);
799 REQUIRE(vpMath::equal(map.getMinDistanceAddNewPoints(), 0.0, 1e-6));
800 REQUIRE(vpMath::equal(map.getOutlierReprojectionErrorThreshold(), 2.0, 1e-6));
801 REQUIRE(vpMath::equal(map.getMaxDepthErrorCandidate(), 0.02, 1e-6));
802 REQUIRE(vpMath::equal(map.getThresholdNormalVisibiltyCriterion(), 45.0, 1e-6));
803 REQUIRE(map.getPoints().getRows() == 0);
804
805 unsigned int h = 480, w = 640;
806 vpCameraParameters cam(800, 800, w / 2, h / 2);
807
808 std::vector<int> removedIndices;
809 vpArray2D<int> indicesToRemove;
810
811 vpMatrix pointsToAdd;
812 vpMatrix normalsToAdd;
813
814 vpHomogeneousMatrix cTo(0, 0, 0.5, 0.0, 0.0, 0.0);
815 unsigned int N = 100;
816
817 vpMatrix baseUV(N, 2);
818 vpMatrix baseXY(N, 2);
819 vpMatrix cX(N, 3), cN(N, 3);
820
821 vpMatrix oX(N, 3), oN(N, 3);
822
823 vpUniRand random(421);
824
825 vpImage<float> depthImage(h, w, 0.0);
826
827 for (unsigned int i = 0; i < N; i++) {
828 bool good = false; // Ensure two points do no lie in same image pixel
829 double Z;
830 while (!good) {
831 baseUV[i][0] = random.uniform(static_cast<double>(w) / 5 * 2, static_cast<double>(w) / 5 * 3);
832 baseUV[i][1] = random.uniform(static_cast<double>(h) / 5 * 2, static_cast<double>(h) / 5 * 3);
833 unsigned uu = static_cast<unsigned int>(baseUV[i][0]), vu = static_cast<unsigned int>(baseUV[i][1]);
834 if (depthImage[vu][uu] > 0.0) {
835 good = false;
836 }
837 else {
838 Z = 0.5 + random.uniform(-0.05, 0.05);
839 // Set Z in a neighbourhood to ensure that reprojection and aliasing artifacts don't impact results
840 // True depth data is far more continuous
841 for (int i = -1; i < 2; ++i) {
842 for (int j = -1; j < 2; ++j) {
843 depthImage[vu + i][uu + j] = Z;
844 }
845 }
846
847 good = true;
848 }
849 }
850 vpPixelMeterConversion::convertPoint(cam, baseUV[i][0], baseUV[i][1], baseXY[i][0], baseXY[i][1]);
851 cX[i][0] = baseXY[i][0] * Z, cX[i][1] = baseXY[i][1] * Z, cX[i][2] = Z;
852 cN[i][0] = 0.0, cN[i][1] = 0.0, cN[i][2] = -1.0;
853
854 vpColVector c(4, 1.0);
855 c[0] = cX[i][0], c[1] = cX[i][1], c[2] = cX[i][2];
856
857 const vpColVector ox = cTo.inverse() * c;
858 oX[i][0] = ox[0] / ox[3], oX[i][1] = ox[1] / ox[3], oX[i][2] = ox[2] / ox[3];
859 const vpColVector on = cTo.inverse().getRotationMatrix() * cN.getRow(i).t();
860 oN[i][0] = on[0], oN[i][1] = on[1], oN[i][2] = on[2];
861 }
862 std::cout << oN << std::endl;
863 unsigned int numAddedPoints;
864 map.updatePoints(indicesToRemove, oX, oN, removedIndices, numAddedPoints);
865 REQUIRE(numAddedPoints == N);
866
867 vpMatrix reprojcX, reprojXY, reprojUV;
868 vpArray2D<int> allPoints(N, 1);
869 for (unsigned int i = 0; i < N; ++i) {
870 allPoints[i][0] = i;
871 }
872 map.project(cam, allPoints, cTo, reprojcX, reprojXY, reprojUV);
873
874 REQUIRE(((baseUV - reprojUV).frobeniusNorm() / (N * 2)) < 1e-3);
875 REQUIRE(((baseXY - reprojXY).frobeniusNorm() / (N * 2)) < 1e-3);
876 REQUIRE(((cX - reprojcX).frobeniusNorm() / (N * 3)) < 1e-3);
877
878 std::vector<int> visibleIndices;
879 map.getVisiblePoints(h, w, cam, cTo, depthImage, visibleIndices);
880 REQUIRE(visibleIndices.size() == N); // All points should be visible when seen at pose where they were added
881
882 // Test that points are no longer visible (keeping the old depth map)
883 map.getVisiblePoints(h, w, cam, cTo * vpHomogeneousMatrix(0.0, 0.0, map.getMaxDepthErrorVisibilityCriterion() + 0.01, 0.0, 0.0, 0.0), depthImage, visibleIndices);
884 REQUIRE(visibleIndices.size() == 0);
885
886 map.getVisiblePoints(h, w, cam, vpHomogeneousMatrix(0.0, 0.0, map.getMaxDepthErrorVisibilityCriterion() * 0.9, 0.0, 0.0, 0.0) * cTo, depthImage, visibleIndices);
887 REQUIRE(visibleIndices.size() == N);
888
889 map.setMaxDepthErrorVisibilityCriterion(10);
890
891 map.getVisiblePoints(h, w, cam, cTo * vpHomogeneousMatrix(0.0, 0.0, 0.0, 0.0, vpMath::rad(map.getThresholdNormalVisibiltyCriterion() + 10), 0.0), depthImage, visibleIndices);
892 REQUIRE(visibleIndices.size() == 0);
893 std::cout << map.getThresholdNormalVisibiltyCriterion() << std::endl;
894 map.getVisiblePoints(h, w, cam, cTo * vpHomogeneousMatrix(0.0, 0.0, 0.0, 0.0, vpMath::rad(map.getThresholdNormalVisibiltyCriterion() - 10), 0.0), depthImage, visibleIndices);
895 REQUIRE(visibleIndices.size() == N);
896
897 map.setThresholdNormalVisibiltyCriterion(180); // Disable threshold
898 map.getVisiblePoints(h, w, cam, cTo * vpHomogeneousMatrix(0.0, 0.0, 0.0, 0.0, vpMath::rad(map.getThresholdNormalVisibiltyCriterion() + 10), 0.0), depthImage, visibleIndices);
899 REQUIRE(visibleIndices.size() == N);
900
901
902}
903int main(int argc, char *argv[])
904{
905 Catch::Session session; // There must be exactly one instance
906 auto cli = session.cli()
907 | Catch::Clara::Opt(opt_no_display)["--no-display"]("Disable display");
908 session.cli(cli);
909
910 const int returnCode = session.applyCommandLine(argc, argv);
911 if (returnCode != 0) { // Indicates a command line error
912 return returnCode;
913 }
914
915 const int numFailed = session.run();
917 return numFailed;
918}
919
920#else
921
922int main()
923{
924 return EXIT_SUCCESS;
925}
926
927#endif
Implementation of a generic 2D array used as base class for matrices and vectors.
Definition vpArray2D.h:146
double gamma_2
Curve uncertainty computation hyperparameter. Recommended to leave fixed.
double gamma_3
Curve uncertainty computation hyperparameter. Recommended to leave fixed.
double gamma_1
Curve uncertainty computation hyperparameter. Recommended to leave fixed.
int delta_h
Sample step when computing statistics and errors. Increase this value to decrease computation time,...
int h
Size of the vicinity that is used to compute statistics and error. Length of the line along the norma...
double gamma_4
Curve uncertainty computation hyperparameter. Recommended to leave fixed.
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
vpRowVector t() const
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
vpTranslationVector getTranslationVector() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Definition of the vpImage class member functions.
Definition vpImage.h:131
static std::string createFilePath(const std::string &parent, const std::string &child)
static std::string makeTempDirectory(const std::string &dirname)
static double rad(double deg)
Definition vpMath.h:129
static bool equal(double x, double y, double threshold=0.001)
Definition vpMath.h:470
static double deg(double rad)
Definition vpMath.h:119
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:175
Single object focused renderer.
Class representing an ambient light.
static vpPanda3DFrameworkManager & getInstance()
Rendering parameters for a panda3D simulation.
Class that renders multiple datatypes, in a single pass. A renderer set contains multiple subrenderer...
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
A tracker based on dense depth point-plane alignment.
KLT-Based features.
Tracking based on the Contracting Curve Density algorithm.
Moving edge feature tracking from depth-extracted object contours.
Class implementing the Render-Based Tracker (RBT).
Definition vpRBTracker.h:87
double getTheta() const
Class for generating random numbers with uniform probability density.
Definition vpUniRand.h:127
std::vector< vpHomogeneousMatrix > cTo
Definition test_utils.h:20
std::vector< vpImage< float > > depth
Definition test_utils.h:19
std::vector< vpImage< vpRGBa > > rgb
Definition test_utils.h:18