34#include <visp3/core/vpConfig.h>
36#if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
41#include <visp3/core/vpImageConvert.h>
42#include <visp3/sensor/vpRealSense2.h>
44#define MANUAL_POINTCLOUD 1
46#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
49bool operator==(
const rs2_extrinsics &lhs,
const rs2_extrinsics &rhs)
51 for (
int i = 0;
i < 3;
i++) {
52 for (
int j = 0;
j < 3;
j++) {
53 if (std::fabs(lhs.rotation[i * 3 + j] - rhs.rotation[i * 3 + j]) > std::numeric_limits<float>::epsilon()) {
58 if (std::fabs(lhs.translation[i] - rhs.translation[i]) > std::numeric_limits<float>::epsilon()) {
90 auto data =
m_pipe.wait_for_frames();
91 auto color_frame = data.get_color_frame();
94 *ts = data.get_timestamp();
105 auto data =
m_pipe.wait_for_frames();
106 auto color_frame = data.get_color_frame();
109 *ts = data.get_timestamp();
124 std::vector<vpColVector> *
const data_pointCloud,
unsigned char *
const data_infrared,
125 rs2::align *
const align_to,
double *ts)
127 acquire(data_image, data_depth, data_pointCloud, data_infrared,
nullptr, align_to, ts);
193 std::vector<vpColVector> *
const data_pointCloud,
unsigned char *
const data_infrared1,
194 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
196 auto data =
m_pipe.wait_for_frames();
197 if (align_to !=
nullptr) {
200#if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
201 data = align_to->process(data);
203 data = align_to->proccess(data);
207 if (data_image !=
nullptr) {
208 auto color_frame = data.get_color_frame();
212 if (data_depth !=
nullptr || data_pointCloud !=
nullptr) {
213 auto depth_frame = data.get_depth_frame();
214 if (data_depth !=
nullptr) {
218 if (data_pointCloud !=
nullptr) {
223 if (data_infrared1 !=
nullptr) {
224 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
228 if (data_infrared2 !=
nullptr) {
229 auto infrared_frame = data.get_infrared_frame(2);
234 *ts = data.get_timestamp();
238#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
261 auto data =
m_pipe.wait_for_frames();
263 if (left !=
nullptr) {
264 auto left_fisheye_frame = data.get_fisheye_frame(1);
265 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
266 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
267 left->
resize(height, width);
271 if (right !=
nullptr) {
272 auto right_fisheye_frame = data.get_fisheye_frame(2);
273 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
274 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
275 right->
resize(height, width);
280 *ts = data.get_timestamp();
315 auto data =
m_pipe.wait_for_frames();
317 if (left !=
nullptr) {
318 auto left_fisheye_frame = data.get_fisheye_frame(1);
319 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
320 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
321 left->
resize(height, width);
325 if (right !=
nullptr) {
326 auto right_fisheye_frame = data.get_fisheye_frame(2);
327 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
328 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
329 right->
resize(height, width);
333 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
334 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
337 *ts = data.get_timestamp();
340 if (cMw !=
nullptr) {
341 m_pos[0] =
static_cast<double>(pose_data.translation.x);
342 m_pos[1] =
static_cast<double>(pose_data.translation.y);
343 m_pos[2] =
static_cast<double>(pose_data.translation.z);
345 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
346 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
347 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
348 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
353 if (odo_vel !=
nullptr) {
354 odo_vel->
resize(6,
false);
355 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
356 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
357 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
358 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
359 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
360 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
363 if (odo_acc !=
nullptr) {
364 odo_acc->
resize(6,
false);
365 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
366 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
367 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
368 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
369 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
370 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
373 if (confidence !=
nullptr) {
374 *confidence = pose_data.tracker_confidence;
393 unsigned int *confidence,
double *ts)
395 auto data =
m_pipe.wait_for_frames();
397 if (left !=
nullptr) {
398 auto left_fisheye_frame = data.get_fisheye_frame(1);
399 unsigned int width =
static_cast<unsigned int>(left_fisheye_frame.get_width());
400 unsigned int height =
static_cast<unsigned int>(left_fisheye_frame.get_height());
401 left->
resize(height, width);
405 if (right !=
nullptr) {
406 auto right_fisheye_frame = data.get_fisheye_frame(2);
407 unsigned int width =
static_cast<unsigned int>(right_fisheye_frame.get_width());
408 unsigned int height =
static_cast<unsigned int>(right_fisheye_frame.get_height());
409 right->
resize(height, width);
413 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
414 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
417 *ts = data.get_timestamp();
420 if (cMw !=
nullptr) {
421 m_pos[0] =
static_cast<double>(pose_data.translation.x);
422 m_pos[1] =
static_cast<double>(pose_data.translation.y);
423 m_pos[2] =
static_cast<double>(pose_data.translation.z);
425 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
426 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
427 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
428 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
433 if (odo_vel !=
nullptr) {
434 odo_vel->
resize(6,
false);
435 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
436 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
437 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
438 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
439 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
440 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
443 if (odo_acc !=
nullptr) {
444 odo_acc->
resize(6,
false);
445 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
446 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
447 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
448 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
449 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
450 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
453 auto accel_frame = data.first_or_default(RS2_STREAM_ACCEL);
454 auto accel_data = accel_frame.as<rs2::motion_frame>().get_motion_data();
456 if (imu_acc !=
nullptr) {
457 imu_acc->
resize(3,
false);
458 (*imu_acc)[0] =
static_cast<double>(accel_data.x);
459 (*imu_acc)[1] =
static_cast<double>(accel_data.y);
460 (*imu_acc)[2] =
static_cast<double>(accel_data.z);
463 auto gyro_frame = data.first_or_default(RS2_STREAM_GYRO);
464 auto gyro_data = gyro_frame.as<rs2::motion_frame>().get_motion_data();
466 if (imu_vel !=
nullptr) {
467 imu_vel->
resize(3,
false);
468 (*imu_vel)[0] =
static_cast<double>(gyro_data.x);
469 (*imu_vel)[1] =
static_cast<double>(gyro_data.y);
470 (*imu_vel)[2] =
static_cast<double>(gyro_data.z);
473 if (confidence !=
nullptr) {
474 *confidence = pose_data.tracker_confidence;
479#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
493 std::vector<vpColVector> *
const data_pointCloud,
494 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
unsigned char *
const data_infrared,
495 rs2::align *
const align_to,
double *ts)
497 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared,
nullptr, align_to, ts);
515 std::vector<vpColVector> *
const data_pointCloud,
516 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud,
unsigned char *
const data_infrared1,
517 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
519 auto data =
m_pipe.wait_for_frames();
520 if (align_to !=
nullptr) {
523#if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
524 data = align_to->process(data);
526 data = align_to->proccess(data);
530 if (data_image !=
nullptr) {
531 auto color_frame = data.get_color_frame();
535 if (data_depth !=
nullptr || data_pointCloud !=
nullptr || pointcloud !=
nullptr) {
536 auto depth_frame = data.get_depth_frame();
537 if (data_depth !=
nullptr) {
541 if (data_pointCloud !=
nullptr) {
545 if (pointcloud !=
nullptr) {
550 if (data_infrared1 !=
nullptr) {
551 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
555 if (data_infrared2 !=
nullptr) {
556 auto infrared_frame = data.get_infrared_frame(2);
561 *ts = data.get_timestamp();
578 std::vector<vpColVector> *
const data_pointCloud,
579 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
unsigned char *
const data_infrared,
580 rs2::align *
const align_to,
double *ts)
582 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared,
nullptr, align_to, ts);
600 std::vector<vpColVector> *
const data_pointCloud,
601 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
unsigned char *
const data_infrared1,
602 unsigned char *
const data_infrared2, rs2::align *
const align_to,
double *ts)
604 auto data =
m_pipe.wait_for_frames();
605 if (align_to !=
nullptr) {
608#if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
609 data = align_to->process(data);
611 data = align_to->proccess(data);
615 auto color_frame = data.get_color_frame();
616 if (data_image !=
nullptr) {
620 if (data_depth !=
nullptr || data_pointCloud !=
nullptr || pointcloud !=
nullptr) {
621 auto depth_frame = data.get_depth_frame();
622 if (data_depth !=
nullptr) {
626 if (data_pointCloud !=
nullptr) {
630 if (pointcloud !=
nullptr) {
635 if (data_infrared1 !=
nullptr) {
636 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
640 if (data_infrared2 !=
nullptr) {
641 auto infrared_frame = data.get_infrared_frame(2);
646 *ts = data.get_timestamp();
683 auto rs_stream =
m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
684 auto intrinsics = rs_stream.get_intrinsics();
687 double u0 = intrinsics.ppx;
688 double v0 = intrinsics.ppy;
689 double px = intrinsics.fx;
690 double py = intrinsics.fy;
694 double kdu = intrinsics.coeffs[0];
695 cam.initPersProjWithDistortion(px, py, u0, v0, -kdu, kdu);
699 std::vector<double> tmp_coefs;
700 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[0]));
701 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[1]));
702 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[2]));
703 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[3]));
704 tmp_coefs.push_back(
static_cast<double>(intrinsics.coeffs[4]));
706 cam.initProjWithKannalaBrandtDistortion(px, py, u0, v0, tmp_coefs);
711 cam.initPersProjWithoutDistortion(px, py, u0, v0);
728 auto vsp =
m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
729 return vsp.get_intrinsics();
734 auto vf = frame.as<rs2::video_frame>();
735 unsigned int width =
static_cast<unsigned int>(vf.get_width());
736 unsigned int height =
static_cast<unsigned int>(vf.get_height());
737 color.resize(height, width);
739 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
741 reinterpret_cast<unsigned char *
>(color.bitmap), width, height);
743 else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
744 memcpy(
reinterpret_cast<unsigned char *
>(color.bitmap),
745 const_cast<unsigned char *
>(
static_cast<const unsigned char *
>(frame.get_data())),
746 width * height *
sizeof(
vpRGBa));
748 else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
750 reinterpret_cast<unsigned char *
>(color.bitmap), width, height);
764 rs2::pipeline *pipe =
new rs2::pipeline;
765 rs2::pipeline_profile *pipelineProfile =
new rs2::pipeline_profile;
766 *pipelineProfile = pipe->start();
768 rs2::device dev = pipelineProfile->get_device();
771 for (rs2::sensor &sensor : dev.query_sensors()) {
773 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
780 delete pipelineProfile;
788 auto vf = frame.as<rs2::video_frame>();
789 unsigned int width =
static_cast<unsigned int>(vf.get_width());
790 unsigned int height =
static_cast<unsigned int>(vf.get_height());
791 grey.
resize(height, width);
793 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
795 grey.
bitmap, width, height);
797 else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
799 grey.
bitmap, width * height);
801 else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
803 grey.
bitmap, width, height);
812 auto vf = frame.as<rs2::video_frame>();
813 int size = vf.get_width() * vf.get_height();
815 switch (frame.get_profile().format()) {
816 case RS2_FORMAT_RGB8:
817 case RS2_FORMAT_BGR8:
818 memcpy(data, frame.get_data(), size * 3);
821 case RS2_FORMAT_RGBA8:
822 case RS2_FORMAT_BGRA8:
823 memcpy(data, frame.get_data(), size * 4);
828 memcpy(data, frame.get_data(), size * 2);
832 memcpy(data, frame.get_data(), size);
842 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
843 std::stringstream ss;
848 auto vf = depth_frame.as<rs2::video_frame>();
849 const int width = vf.get_width();
850 const int height = vf.get_height();
851 pointcloud.resize(
static_cast<size_t>(width * height));
853 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
854 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
858#pragma omp parallel for schedule(dynamic)
859 for (
int i = 0; i < height; i++) {
860 auto depth_pixel_index = i * width;
862 for (
int j = 0; j < width; j++, depth_pixel_index++) {
863 if (p_depth_frame[depth_pixel_index] == 0) {
864 pointcloud[
static_cast<size_t>(depth_pixel_index)].resize(4,
false);
868 pointcloud[
static_cast<size_t>(depth_pixel_index)][3] = 1.0;
873 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
876 const float pixel[] = {
static_cast<float>(j),
static_cast<float>(i) };
877 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
882 pointcloud[
static_cast<size_t>(depth_pixel_index)].resize(4,
false);
883 pointcloud[
static_cast<size_t>(depth_pixel_index)][0] = points[0];
884 pointcloud[
static_cast<size_t>(depth_pixel_index)][1] = points[1];
885 pointcloud[
static_cast<size_t>(depth_pixel_index)][2] = points[2];
886 pointcloud[
static_cast<size_t>(depth_pixel_index)][3] = 1.0;
891#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
894 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
895 std::stringstream ss;
900 auto vf = depth_frame.as<rs2::video_frame>();
901 const int width = vf.get_width();
902 const int height = vf.get_height();
903 pointcloud->width =
static_cast<uint32_t
>(width);
904 pointcloud->height =
static_cast<uint32_t
>(height);
905 pointcloud->resize(
static_cast<size_t>(width * height));
908 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
909 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
913#pragma omp parallel for schedule(dynamic)
914 for (
int i = 0; i < height; i++) {
915 auto depth_pixel_index = i * width;
917 for (
int j = 0; j < width; j++, depth_pixel_index++) {
918 if (p_depth_frame[depth_pixel_index] == 0) {
926 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
929 const float pixel[] = {
static_cast<float>(j),
static_cast<float>(i) };
930 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
935 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].x = points[0];
936 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].y = points[1];
937 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].z = points[2];
942 auto vertices =
m_points.get_vertices();
944 for (
size_t i = 0; i <
m_points.size(); i++) {
945 if (vertices[i].z <= 0.0f || vertices[i].z >
m_max_Z) {
951 pointcloud->points[i].x = vertices[i].x;
952 pointcloud->points[i].y = vertices[i].y;
953 pointcloud->points[i].z = vertices[i].z;
960 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
962 if (
m_depthScale <= std::numeric_limits<float>::epsilon()) {
966 auto vf = depth_frame.as<rs2::video_frame>();
967 const int depth_width = vf.get_width();
968 const int depth_height = vf.get_height();
969 pointcloud->width =
static_cast<uint32_t
>(depth_width);
970 pointcloud->height =
static_cast<uint32_t
>(depth_height);
971 pointcloud->resize(
static_cast<uint32_t
>(depth_width * depth_height));
973 vf = color_frame.as<rs2::video_frame>();
974 const int color_width = vf.get_width();
975 const int color_height = vf.get_height();
977 const uint16_t *p_depth_frame =
reinterpret_cast<const uint16_t *
>(depth_frame.get_data());
978 const rs2_extrinsics depth2ColorExtrinsics =
979 depth_frame.get_profile().as<rs2::video_stream_profile>().get_extrinsics_to(
980 color_frame.get_profile().as<rs2::video_stream_profile>());
981 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
982 const rs2_intrinsics color_intrinsics = color_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
984 auto color_format = color_frame.as<rs2::video_frame>().get_profile().format();
985 const bool swap_rb = color_format == RS2_FORMAT_BGR8 || color_format == RS2_FORMAT_BGRA8;
986 unsigned int nb_color_pixel = (color_format == RS2_FORMAT_RGB8 || color_format == RS2_FORMAT_BGR8) ? 3 : 4;
987 const unsigned char *p_color_frame =
static_cast<const unsigned char *
>(color_frame.get_data());
988 rs2_extrinsics identity;
989 memset(identity.rotation, 0,
sizeof(identity.rotation));
990 memset(identity.translation, 0,
sizeof(identity.translation));
991 for (
int i = 0; i < 3; i++) {
992 identity.rotation[i * 3 + i] = 1;
994 const bool registered_streams =
995 (depth2ColorExtrinsics == identity) && (color_width == depth_width) && (color_height == depth_height);
999#pragma omp parallel for schedule(dynamic)
1000 for (
int i = 0; i < depth_height; i++) {
1001 auto depth_pixel_index = i * depth_width;
1003 for (
int j = 0; j < depth_width; j++, depth_pixel_index++) {
1004 if (p_depth_frame[depth_pixel_index] == 0) {
1012#if (VISP_HAVE_PCL_VERSION < 0x010100)
1013 unsigned int r = 96, g = 157, b = 198;
1014 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 |
static_cast<uint32_t
>(g) << 8 |
static_cast<uint32_t
>(b));
1016 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].rgb = *
reinterpret_cast<float *
>(&rgb);
1018 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].r = (uint8_t)96;
1019 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].g = (uint8_t)157;
1020 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].b = (uint8_t)198;
1026 auto pixels_distance =
m_depthScale * p_depth_frame[depth_pixel_index];
1028 float depth_point[3];
1029 const float pixel[] = {
static_cast<float>(j),
static_cast<float>(i) };
1030 rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, pixel, pixels_distance);
1032 if (pixels_distance >
m_max_Z) {
1036 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].x = depth_point[0];
1037 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].y = depth_point[1];
1038 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].z = depth_point[2];
1040 if (!registered_streams) {
1041 float color_point[3];
1042 rs2_transform_point_to_point(color_point, &depth2ColorExtrinsics, depth_point);
1043 float color_pixel[2];
1044 rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point);
1046 if (color_pixel[1] < 0 || color_pixel[1] >= color_height || color_pixel[0] < 0 ||
1047 color_pixel[0] >= color_width) {
1051#if (VISP_HAVE_PCL_VERSION < 0x010100)
1052 unsigned int r = 96, g = 157, b = 198;
1053 uint32_t rgb = (
static_cast<uint32_t
>(r) << 16 |
static_cast<uint32_t
>(g) << 8 |
static_cast<uint32_t
>(b));
1055 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].rgb = *
reinterpret_cast<float *
>(&rgb);
1057 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].r = (uint8_t)96;
1058 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].g = (uint8_t)157;
1059 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].b = (uint8_t)198;
1063 unsigned int i_ =
static_cast<unsigned int>(color_pixel[1]);
1064 unsigned int j_ =
static_cast<unsigned int>(color_pixel[0]);
1066#if (VISP_HAVE_PCL_VERSION < 0x010100)
1070 (
static_cast<uint32_t
>(p_color_frame[(i_ *
static_cast<unsigned int>(color_width) + j_) * nb_color_pixel]) |
1071 static_cast<uint32_t
>(p_color_frame[(i_ *
static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 1]) << 8 |
1072 static_cast<uint32_t
>(p_color_frame[(i_ *
static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 2])
1077 (
static_cast<uint32_t
>(p_color_frame[(i_ *
static_cast<unsigned int>(color_width) + j_) * nb_color_pixel]) << 16 |
1078 static_cast<uint32_t
>(p_color_frame[(i_ *
static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 1]) << 8 |
1079 static_cast<uint32_t
>(p_color_frame[(i_ *
static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 2]));
1082 pointcloud->points[
static_cast<size_t>(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
1085 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].b =
1086 p_color_frame[(i_ *
static_cast<unsigned int>(color_width) + j_) * nb_color_pixel];
1087 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].g =
1088 p_color_frame[(i_ *
static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 1];
1089 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].r =
1090 p_color_frame[(i_ *
static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 2];
1093 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].r =
1094 p_color_frame[(i_ *
static_cast<unsigned int>(color_width) + j_) * nb_color_pixel];
1095 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].g =
1096 p_color_frame[(i_ *
static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 1];
1097 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].b =
1098 p_color_frame[(i_ *
static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 2];
1104#if (VISP_HAVE_PCL_VERSION < 0x010100)
1107 rgb = (
static_cast<uint32_t
>(p_color_frame[(i *
static_cast<unsigned int>(color_width) + j) * nb_color_pixel]) |
1108 static_cast<uint32_t
>(p_color_frame[(i *
static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 1]) << 8 |
1109 static_cast<uint32_t
>(p_color_frame[(i *
static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 2]) << 16);
1112 rgb = (
static_cast<uint32_t
>(p_color_frame[(i *
static_cast<unsigned int>(color_width) + j) * nb_color_pixel]) << 16 |
1113 static_cast<uint32_t
>(p_color_frame[(i *
static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 1]) << 8 |
1114 static_cast<uint32_t
>(p_color_frame[(i *
static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 2]));
1117 pointcloud->points[
static_cast<size_t>(i * depth_width + j)].rgb = *
reinterpret_cast<float *
>(&rgb);
1120 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].b =
1121 p_color_frame[(i *
static_cast<unsigned int>(color_width) + j) * nb_color_pixel];
1122 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].g =
1123 p_color_frame[(i *
static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 1];
1124 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].r =
1125 p_color_frame[(i *
static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 2];
1128 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].r =
1129 p_color_frame[(i *
static_cast<unsigned int>(color_width) + j) * nb_color_pixel];
1130 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].g =
1131 p_color_frame[(i *
static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 1];
1132 pointcloud->points[
static_cast<size_t>(depth_pixel_index)].b =
1133 p_color_frame[(i *
static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 2];
1155 if (from_index != -1)
1157 if (from_index == 1)
1159 else if (from_index == 2)
1166 rs2_extrinsics extrinsics = from_stream.get_extrinsics_to(to_stream);
1170 for (
unsigned int i = 0; i < 3; i++) {
1171 t[i] = extrinsics.translation[i];
1172 for (
unsigned int j = 0; j < 3; j++)
1173 R[i][j] = extrinsics.rotation[j * 3 + i];
1180#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1193 auto frame =
m_pipe.wait_for_frames();
1194 auto f = frame.first_or_default(RS2_STREAM_POSE);
1195 auto pose_data = f.as<rs2::pose_frame>().get_pose_data();
1198 *ts = frame.get_timestamp();
1200 if (cMw !=
nullptr) {
1201 m_pos[0] =
static_cast<double>(pose_data.translation.x);
1202 m_pos[1] =
static_cast<double>(pose_data.translation.y);
1203 m_pos[2] =
static_cast<double>(pose_data.translation.z);
1205 m_quat[0] =
static_cast<double>(pose_data.rotation.x);
1206 m_quat[1] =
static_cast<double>(pose_data.rotation.y);
1207 m_quat[2] =
static_cast<double>(pose_data.rotation.z);
1208 m_quat[3] =
static_cast<double>(pose_data.rotation.w);
1213 if (odo_vel !=
nullptr) {
1214 odo_vel->
resize(6,
false);
1215 (*odo_vel)[0] =
static_cast<double>(pose_data.velocity.x);
1216 (*odo_vel)[1] =
static_cast<double>(pose_data.velocity.y);
1217 (*odo_vel)[2] =
static_cast<double>(pose_data.velocity.z);
1218 (*odo_vel)[3] =
static_cast<double>(pose_data.angular_velocity.x);
1219 (*odo_vel)[4] =
static_cast<double>(pose_data.angular_velocity.y);
1220 (*odo_vel)[5] =
static_cast<double>(pose_data.angular_velocity.z);
1223 if (odo_acc !=
nullptr) {
1224 odo_acc->
resize(6,
false);
1225 (*odo_acc)[0] =
static_cast<double>(pose_data.acceleration.x);
1226 (*odo_acc)[1] =
static_cast<double>(pose_data.acceleration.y);
1227 (*odo_acc)[2] =
static_cast<double>(pose_data.acceleration.z);
1228 (*odo_acc)[3] =
static_cast<double>(pose_data.angular_acceleration.x);
1229 (*odo_acc)[4] =
static_cast<double>(pose_data.angular_acceleration.y);
1230 (*odo_acc)[5] =
static_cast<double>(pose_data.angular_acceleration.z);
1233 return pose_data.tracker_confidence;
1258 auto frame =
m_pipe.wait_for_frames();
1259 auto f = frame.first_or_default(RS2_STREAM_ACCEL);
1260 auto imu_acc_data = f.as<rs2::motion_frame>().get_motion_data();
1263 *ts = f.get_timestamp();
1265 if (imu_acc !=
nullptr) {
1266 imu_acc->
resize(3,
false);
1267 (*imu_acc)[0] =
static_cast<double>(imu_acc_data.x);
1268 (*imu_acc)[1] =
static_cast<double>(imu_acc_data.y);
1269 (*imu_acc)[2] =
static_cast<double>(imu_acc_data.z);
1295 auto frame =
m_pipe.wait_for_frames();
1296 auto f = frame.first_or_default(RS2_STREAM_GYRO);
1297 auto imu_vel_data = f.as<rs2::motion_frame>().get_motion_data();
1300 *ts = f.get_timestamp();
1302 if (imu_vel !=
nullptr) {
1303 imu_vel->
resize(3,
false);
1304 (*imu_vel)[0] =
static_cast<double>(imu_vel_data.x);
1305 (*imu_vel)[1] =
static_cast<double>(imu_vel_data.x);
1306 (*imu_vel)[2] =
static_cast<double>(imu_vel_data.x);
1332 auto data =
m_pipe.wait_for_frames();
1335 *ts = data.get_timestamp();
1337 if (imu_acc !=
nullptr) {
1338 auto acc_data = data.first_or_default(RS2_STREAM_ACCEL);
1339 auto imu_acc_data = acc_data.as<rs2::motion_frame>().get_motion_data();
1341 imu_acc->
resize(3,
false);
1342 (*imu_acc)[0] =
static_cast<double>(imu_acc_data.x);
1343 (*imu_acc)[1] =
static_cast<double>(imu_acc_data.y);
1344 (*imu_acc)[2] =
static_cast<double>(imu_acc_data.z);
1347 if (imu_vel !=
nullptr) {
1348 auto vel_data = data.first_or_default(RS2_STREAM_GYRO);
1349 auto imu_vel_data = vel_data.as<rs2::motion_frame>().get_motion_data();
1351 imu_vel->
resize(3,
false);
1352 (*imu_vel)[0] =
static_cast<double>(imu_vel_data.x);
1353 (*imu_vel)[1] =
static_cast<double>(imu_vel_data.y);
1354 (*imu_vel)[2] =
static_cast<double>(imu_vel_data.z);
1372#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1378 for (rs2::sensor &sensor : dev.query_sensors()) {
1380 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1405#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1411 for (rs2::sensor &sensor : dev.query_sensors()) {
1413 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1428#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1438 auto list = ctx.query_devices();
1439 if (list.size() > 0) {
1441 auto dev = list.front();
1442 auto sensors = dev.query_sensors();
1443 if (dev.supports(RS2_CAMERA_INFO_PRODUCT_LINE)) {
1450 return (std::string(
"unknown"));
1465 std::ostringstream oss;
1473void print(
const rs2_extrinsics &extrinsics, std::ostream &os)
1475 std::stringstream ss;
1476 ss <<
"Rotation Matrix:\n";
1478 for (
auto i = 0; i < 3; ++i) {
1479 for (
auto j = 0; j < 3; ++j) {
1480 ss << std::left << std::setw(15) << std::setprecision(5) << extrinsics.rotation[j * 3 + i];
1485 ss <<
"\nTranslation Vector: ";
1486 for (
size_t i = 0;
i <
sizeof(extrinsics.translation) /
sizeof(extrinsics.translation[0]); ++
i)
1487 ss << std::setprecision(15) << extrinsics.translation[
i] <<
" ";
1489 os << ss.str() <<
"\n\n";
1492void print(
const rs2_intrinsics &intrinsics, std::ostream &os)
1494 std::stringstream ss;
1495 ss << std::left << std::setw(14) <<
"Width: "
1496 <<
"\t" << intrinsics.width <<
"\n"
1497 << std::left << std::setw(14) <<
"Height: "
1498 <<
"\t" << intrinsics.height <<
"\n"
1499 << std::left << std::setw(14) <<
"PPX: "
1500 <<
"\t" << std::setprecision(15) << intrinsics.ppx <<
"\n"
1501 << std::left << std::setw(14) <<
"PPY: "
1502 <<
"\t" << std::setprecision(15) << intrinsics.ppy <<
"\n"
1503 << std::left << std::setw(14) <<
"Fx: "
1504 <<
"\t" << std::setprecision(15) << intrinsics.fx <<
"\n"
1505 << std::left << std::setw(14) <<
"Fy: "
1506 <<
"\t" << std::setprecision(15) << intrinsics.fy <<
"\n"
1507 << std::left << std::setw(14) <<
"Distortion: "
1508 <<
"\t" << rs2_distortion_to_string(intrinsics.model) <<
"\n"
1509 << std::left << std::setw(14) <<
"Coeffs: ";
1511 for (
size_t i = 0;
i <
sizeof(intrinsics.coeffs) /
sizeof(intrinsics.coeffs[0]); ++
i)
1512 ss <<
"\t" << std::setprecision(15) << intrinsics.coeffs[
i] <<
" ";
1514 os << ss.str() <<
"\n\n";
1517void safe_get_intrinsics(
const rs2::video_stream_profile &profile, rs2_intrinsics &intrinsics)
1520 intrinsics = profile.get_intrinsics();
1526bool operator==(
const rs2_intrinsics &lhs,
const rs2_intrinsics &rhs)
1528 return lhs.width == rhs.width && lhs.height == rhs.height && lhs.ppx == rhs.ppx && lhs.ppy == rhs.ppy &&
1529 lhs.fx == rhs.fx && lhs.fy == rhs.fy && lhs.model == rhs.model &&
1530 !std::memcmp(lhs.coeffs, rhs.coeffs,
sizeof(rhs.coeffs));
1533std::string get_str_formats(
const std::set<rs2_format> &formats)
1535 std::stringstream ss;
1536 for (
auto format = formats.begin(); format != formats.end(); ++format) {
1537 ss << *
format << ((
format != formats.end()) && (next(format) == formats.end()) ?
"" :
"/");
1542struct stream_and_resolution
1548 std::string stream_name;
1550 bool operator<(
const stream_and_resolution &obj)
const
1552 return (std::make_tuple(stream, stream_index, width, height) <
1553 std::make_tuple(obj.stream, obj.stream_index, obj.width, obj.height));
1557struct stream_and_index
1562 bool operator<(
const stream_and_index &obj)
const
1564 return (std::make_tuple(stream, stream_index) < std::make_tuple(obj.stream, obj.stream_index));
1596 os << std::left << std::setw(30) << dev.get_info(RS2_CAMERA_INFO_NAME) << std::setw(20)
1597 << dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER) << std::setw(20) << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION)
1601 os <<
" Device info: \n";
1602 for (
auto j = 0; j < RS2_CAMERA_INFO_COUNT; ++j) {
1603 auto param =
static_cast<rs2_camera_info
>(j);
1604 if (dev.supports(param))
1605 os <<
" " << std::left << std::setw(30) << rs2_camera_info_to_string(rs2_camera_info(param)) <<
": \t"
1606 << dev.get_info(param) <<
"\n";
1611 for (
auto &&sensor : dev.query_sensors()) {
1612 os <<
"Options for " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
1614 os << std::setw(55) <<
" Supported options:" << std::setw(10) <<
"min" << std::setw(10) <<
" max" << std::setw(6)
1615 <<
" step" << std::setw(10) <<
" default" << std::endl;
1616 for (
auto j = 0; j < RS2_OPTION_COUNT; ++j) {
1617 auto opt =
static_cast<rs2_option
>(j);
1618 if (sensor.supports(opt)) {
1619 auto range = sensor.get_option_range(opt);
1620 os <<
" " << std::left << std::setw(50) << opt <<
" : " << std::setw(5) << range.min <<
"... "
1621 << std::setw(12) << range.max << std::setw(6) << range.step << std::setw(10) << range.def <<
"\n";
1628 for (
auto &&sensor : dev.query_sensors()) {
1629 os <<
"Stream Profiles supported by " << sensor.get_info(RS2_CAMERA_INFO_NAME) <<
"\n";
1631 os << std::setw(55) <<
" Supported modes:" << std::setw(10) <<
"stream" << std::setw(10) <<
" resolution"
1632 << std::setw(6) <<
" fps" << std::setw(10) <<
" format"
1635 for (
auto &&profile : sensor.get_stream_profiles()) {
1636 if (
auto video = profile.as<rs2::video_stream_profile>()) {
1637 os <<
" " << profile.stream_name() <<
"\t " << video.width() <<
"x" << video.height() <<
"\t@ "
1638 << profile.fps() <<
"Hz\t" << profile.format() <<
"\n";
1641 os <<
" " << profile.stream_name() <<
"\t@ " << profile.fps() <<
"Hz\t" << profile.format() <<
"\n";
1648 std::map<stream_and_index, rs2::stream_profile> streams;
1649 std::map<stream_and_resolution, std::vector<std::pair<std::set<rs2_format>, rs2_intrinsics> > > intrinsics_map;
1650 for (
auto &&sensor : dev.query_sensors()) {
1652 for (
auto &&profile : sensor.get_stream_profiles()) {
1653 if (
auto video = profile.as<rs2::video_stream_profile>()) {
1654 if (streams.find(stream_and_index { profile.stream_type(), profile.stream_index() }) == streams.end()) {
1655 streams[stream_and_index { profile.stream_type(), profile.stream_index() }] = profile;
1658 rs2_intrinsics intrinsics {};
1659 stream_and_resolution stream_res { profile.stream_type(), profile.stream_index(), video.width(), video.height(),
1660 profile.stream_name() };
1661 safe_get_intrinsics(video, intrinsics);
1662 auto it = std::find_if(
1663 (intrinsics_map[stream_res]).begin(), (intrinsics_map[stream_res]).end(),
1664 [&](
const std::pair<std::set<rs2_format>, rs2_intrinsics> &kvp) {
return intrinsics == kvp.second; });
1665 if (it == (intrinsics_map[stream_res]).end()) {
1666 (intrinsics_map[stream_res]).push_back({ {profile.format()}, intrinsics });
1669 it->first.insert(profile.format());
1677 os <<
"Provided Intrinsic:\n";
1678 for (
auto &kvp : intrinsics_map) {
1679 auto stream_res = kvp.first;
1680 for (
auto &intrinsics : kvp.second) {
1681 auto formats = get_str_formats(intrinsics.first);
1682 os <<
"Intrinsic of \"" << stream_res.stream_name <<
"\"\t " << stream_res.width <<
"x" << stream_res.height
1683 <<
"\t " << formats <<
"\n";
1684 if (intrinsics.second == rs2_intrinsics {}) {
1685 os <<
"Intrinsic NOT available!\n\n";
1688 print(intrinsics.second, os);
1694 os <<
"\nProvided Extrinsic:\n";
1695 for (
auto kvp1 = streams.begin(); kvp1 != streams.end(); ++kvp1) {
1696 for (
auto kvp2 = streams.begin(); kvp2 != streams.end(); ++kvp2) {
1697 os <<
"Extrinsic from \"" << kvp1->second.stream_name() <<
"\"\t "
1699 <<
"\t \"" << kvp2->second.stream_name() <<
"\"\n";
1700 auto extrinsics = kvp1->second.get_extrinsics_to(kvp2->second);
1701 print(extrinsics, os);
1708#elif !defined(VISP_BUILD_SHARED_LIBS)
1710void dummy_vpRealSense2() { }
Generic class defining intrinsic camera parameters.
vpCameraParametersProjType
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ ProjWithKannalaBrandtDistortion
Projection with Kannala-Brandt distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
void resize(unsigned int i, bool flagNullify=true)
error that can be emitted by ViSP classes.
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToGrey(unsigned char *rgb, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false)
static void RGBaToGrey(unsigned char *rgba, unsigned char *grey, unsigned int width, unsigned int height, unsigned int nThreads=0)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
static void BGRToGrey(unsigned char *bgr, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false, unsigned int nThreads=0)
static void BGRToRGBa(unsigned char *bgr, unsigned char *rgba, unsigned int width, unsigned int height, bool flip=false)
Definition of the vpImage class member functions.
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Type * bitmap
points toward the bitmap
void getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
std::string getSensorInfo()
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
vpQuaternionVector m_quat
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
rs2::pointcloud m_pointcloud
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
void getPointcloud(const rs2::depth_frame &depth_frame, std::vector< vpColVector > &pointcloud)
void getColorFrame(const rs2::frame &frame, vpImage< vpRGBa > &color)
void getIMUVelocity(vpColVector *imu_vel, double *ts)
friend VISP_EXPORT std::ostream & operator<<(std::ostream &os, const vpRealSense2 &rs)
vpTranslationVector m_pos
std::string m_product_line
std::string getProductLine()
unsigned int getOdometryData(vpHomogeneousMatrix *cMw, vpColVector *odo_vel, vpColVector *odo_acc, double *ts=nullptr)
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts)
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
void getGreyFrame(const rs2::frame &frame, vpImage< unsigned char > &grey)
float m_invalidDepthValue
rs2::pipeline_profile m_pipelineProfile
Implementation of a rotation matrix and operations on such kind of matrices.
Class that consider the case of a translation vector.