Visual Servoing Platform  version 3.5.0
vpRealSense2.cpp
1 /****************************************************************************
2  *
3  * ViSP, open source Visual Servoing Platform software.
4  * Copyright (C) 2005 - 2019 by Inria. All rights reserved.
5  *
6  * This software is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; either version 2 of the License, or
9  * (at your option) any later version.
10  * See the file LICENSE.txt at the root directory of this source
11  * distribution for additional information about the GNU GPL.
12  *
13  * For using ViSP with software that can not be combined with the GNU
14  * GPL, please contact Inria about acquiring a ViSP Professional
15  * Edition License.
16  *
17  * See http://visp.inria.fr for more information.
18  *
19  * This software was developed at:
20  * Inria Rennes - Bretagne Atlantique
21  * Campus Universitaire de Beaulieu
22  * 35042 Rennes Cedex
23  * France
24  *
25  * If you have questions regarding the use of this file, please contact
26  * Inria at visp@inria.fr
27  *
28  * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29  * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30  *
31  * Description:
32  * librealSense2 interface.
33  *
34  *****************************************************************************/
35 
36 #include <visp3/core/vpConfig.h>
37 
38 #if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
39 #include <iomanip>
40 #include <map>
41 #include <set>
42 #include <cstring>
43 #include <visp3/core/vpImageConvert.h>
44 #include <visp3/sensor/vpRealSense2.h>
45 
46 #define MANUAL_POINTCLOUD 1
47 
48 namespace {
49 bool operator==(const rs2_extrinsics &lhs, const rs2_extrinsics &rhs)
50 {
51  for (int i = 0; i < 3; i++) {
52  for (int j = 0; j < 3; j++) {
53  if (std::fabs(lhs.rotation[i*3 + j] - rhs.rotation[i*3 + j]) >
54  std::numeric_limits<float>::epsilon()) {
55  return false;
56  }
57  }
58 
59  if (std::fabs(lhs.translation[i] - rhs.translation[i]) >
60  std::numeric_limits<float>::epsilon()) {
61  return false;
62  }
63  }
64 
65  return true;
66 }
67 }
68 
73  : m_depthScale(0.0f), m_invalidDepthValue(0.0f), m_max_Z(8.0f), m_pipe(), m_pipelineProfile(), m_pointcloud(),
74  m_points(), m_pos(), m_quat(), m_rot(), m_product_line(), m_init(false)
75 {
76 }
77 
83 
90 {
91  auto data = m_pipe.wait_for_frames();
92  auto color_frame = data.get_color_frame();
93  getGreyFrame(color_frame, grey);
94  if (ts != NULL) {
95  *ts = data.get_timestamp();
96  }
97 }
98 
104 void vpRealSense2::acquire(vpImage<vpRGBa> &color, double *ts)
105 {
106  auto data = m_pipe.wait_for_frames();
107  auto color_frame = data.get_color_frame();
108  getColorFrame(color_frame, color);
109  if (ts != NULL) {
110  *ts = data.get_timestamp();
111  }
112 }
113 
124 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
125  std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared,
126  rs2::align *const align_to, double *ts)
127 {
128  acquire(data_image, data_depth, data_pointCloud, data_infrared, NULL, align_to, ts);
129 }
130 
188 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
189  std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared1,
190  unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
191 {
192  auto data = m_pipe.wait_for_frames();
193  if (align_to != NULL) {
194  // Infrared stream is not aligned
195  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
196 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
197  data = align_to->process(data);
198 #else
199  data = align_to->proccess(data);
200 #endif
201  }
202 
203  if (data_image != NULL) {
204  auto color_frame = data.get_color_frame();
205  getNativeFrameData(color_frame, data_image);
206  }
207 
208  if (data_depth != NULL || data_pointCloud != NULL) {
209  auto depth_frame = data.get_depth_frame();
210  if (data_depth != NULL) {
211  getNativeFrameData(depth_frame, data_depth);
212  }
213 
214  if (data_pointCloud != NULL) {
215  getPointcloud(depth_frame, *data_pointCloud);
216  }
217  }
218 
219  if (data_infrared1 != NULL) {
220  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
221  getNativeFrameData(infrared_frame, data_infrared1);
222  }
223 
224  if (data_infrared2 != NULL) {
225  auto infrared_frame = data.get_infrared_frame(2);
226  getNativeFrameData(infrared_frame, data_infrared2);
227  }
228 
229  if (ts != NULL) {
230  *ts = data.get_timestamp();
231  }
232 }
233 
234 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
256 {
257  auto data = m_pipe.wait_for_frames();
258 
259  if(left != NULL) {
260  auto left_fisheye_frame = data.get_fisheye_frame(1);
261  unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
262  unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
263  left->resize(height, width);
264  getNativeFrameData(left_fisheye_frame, (*left).bitmap);
265  }
266 
267  if(right != NULL) {
268  auto right_fisheye_frame = data.get_fisheye_frame(2);
269  unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
270  unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
271  right->resize(height, width);
272  getNativeFrameData(right_fisheye_frame, (*right).bitmap);
273  }
274 
275  if(ts != NULL) {
276  *ts = data.get_timestamp();
277  }
278 }
279 
309  vpColVector *odo_vel, vpColVector *odo_acc, unsigned int *confidence, double *ts)
310 {
311  auto data = m_pipe.wait_for_frames();
312 
313  if(left != NULL) {
314  auto left_fisheye_frame = data.get_fisheye_frame(1);
315  unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
316  unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
317  left->resize(height, width);
318  getNativeFrameData(left_fisheye_frame, (*left).bitmap);
319  }
320 
321  if(right != NULL) {
322  auto right_fisheye_frame = data.get_fisheye_frame(2);
323  unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
324  unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
325  right->resize(height, width);
326  getNativeFrameData(right_fisheye_frame, (*right).bitmap);
327  }
328 
329  auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
330  auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
331 
332  if(ts != NULL) {
333  *ts = data.get_timestamp();
334  }
335 
336  if(cMw != NULL) {
337  m_pos[0] = static_cast<double>(pose_data.translation.x);
338  m_pos[1] = static_cast<double>(pose_data.translation.y);
339  m_pos[2] = static_cast<double>(pose_data.translation.z);
340 
341  m_quat[0] = static_cast<double>(pose_data.rotation.x);
342  m_quat[1] = static_cast<double>(pose_data.rotation.y);
343  m_quat[2] = static_cast<double>(pose_data.rotation.z);
344  m_quat[3] = static_cast<double>(pose_data.rotation.w);
345 
347  }
348 
349  if(odo_vel != NULL) {
350  odo_vel->resize(6, false);
351  (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
352  (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
353  (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
354  (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
355  (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
356  (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
357  }
358 
359  if(odo_acc != NULL) {
360  odo_acc->resize(6, false);
361  (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
362  (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
363  (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
364  (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
365  (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
366  (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
367  }
368 
369  if(confidence != NULL) {
370  *confidence = pose_data.tracker_confidence;
371  }
372 }
373 
388  vpColVector *odo_vel, vpColVector *odo_acc, vpColVector *imu_vel, vpColVector *imu_acc,
389  unsigned int *confidence, double *ts)
390 {
391  auto data = m_pipe.wait_for_frames();
392 
393  if(left != NULL) {
394  auto left_fisheye_frame = data.get_fisheye_frame(1);
395  unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
396  unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
397  left->resize(height, width);
398  getNativeFrameData(left_fisheye_frame, (*left).bitmap);
399  }
400 
401  if(right != NULL) {
402  auto right_fisheye_frame = data.get_fisheye_frame(2);
403  unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
404  unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
405  right->resize(height, width);
406  getNativeFrameData(right_fisheye_frame, (*right).bitmap);
407  }
408 
409  auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
410  auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
411 
412  if(ts != NULL) {
413  *ts = data.get_timestamp();
414  }
415 
416  if(cMw != NULL) {
417  m_pos[0] = static_cast<double>(pose_data.translation.x);
418  m_pos[1] = static_cast<double>(pose_data.translation.y);
419  m_pos[2] = static_cast<double>(pose_data.translation.z);
420 
421  m_quat[0] = static_cast<double>(pose_data.rotation.x);
422  m_quat[1] = static_cast<double>(pose_data.rotation.y);
423  m_quat[2] = static_cast<double>(pose_data.rotation.z);
424  m_quat[3] = static_cast<double>(pose_data.rotation.w);
425 
427  }
428 
429  if(odo_vel != NULL) {
430  odo_vel->resize(6, false);
431  (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
432  (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
433  (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
434  (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
435  (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
436  (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
437  }
438 
439  if(odo_acc != NULL) {
440  odo_acc->resize(6, false);
441  (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
442  (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
443  (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
444  (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
445  (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
446  (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
447  }
448 
449  auto accel_frame = data.first_or_default(RS2_STREAM_ACCEL);
450  auto accel_data = accel_frame.as<rs2::motion_frame>().get_motion_data();
451 
452  if(imu_acc != NULL) {
453  imu_acc->resize(3, false);
454  (*imu_acc)[0] = static_cast<double>(accel_data.x);
455  (*imu_acc)[1] = static_cast<double>(accel_data.y);
456  (*imu_acc)[2] = static_cast<double>(accel_data.z);
457  }
458 
459  auto gyro_frame = data.first_or_default(RS2_STREAM_GYRO);
460  auto gyro_data = gyro_frame.as<rs2::motion_frame>().get_motion_data();
461 
462  if(imu_vel != NULL) {
463  imu_vel->resize(3, false);
464  (*imu_vel)[0] = static_cast<double>(gyro_data.x);
465  (*imu_vel)[1] = static_cast<double>(gyro_data.y);
466  (*imu_vel)[2] = static_cast<double>(gyro_data.z);
467  }
468 
469  if (confidence != NULL) {
470  *confidence = pose_data.tracker_confidence;
471  }
472 }
473 #endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
474 
475 #ifdef VISP_HAVE_PCL
488 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
489  std::vector<vpColVector> *const data_pointCloud,
490  pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared,
491  rs2::align *const align_to, double *ts)
492 {
493  acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, NULL, align_to, ts);
494 }
495 
510 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
511  std::vector<vpColVector> *const data_pointCloud,
512  pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared1,
513  unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
514 {
515  auto data = m_pipe.wait_for_frames();
516  if (align_to != NULL) {
517  // Infrared stream is not aligned
518  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
519 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
520  data = align_to->process(data);
521 #else
522  data = align_to->proccess(data);
523 #endif
524  }
525 
526  if (data_image != NULL) {
527  auto color_frame = data.get_color_frame();
528  getNativeFrameData(color_frame, data_image);
529  }
530 
531  if (data_depth != NULL || data_pointCloud != NULL || pointcloud != NULL) {
532  auto depth_frame = data.get_depth_frame();
533  if (data_depth != NULL) {
534  getNativeFrameData(depth_frame, data_depth);
535  }
536 
537  if (data_pointCloud != NULL) {
538  getPointcloud(depth_frame, *data_pointCloud);
539  }
540 
541  if (pointcloud != NULL) {
542  getPointcloud(depth_frame, pointcloud);
543  }
544  }
545 
546  if (data_infrared1 != NULL) {
547  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
548  getNativeFrameData(infrared_frame, data_infrared1);
549  }
550 
551  if (data_infrared2 != NULL) {
552  auto infrared_frame = data.get_infrared_frame(2);
553  getNativeFrameData(infrared_frame, data_infrared2);
554  }
555 
556  if (ts != NULL) {
557  *ts = data.get_timestamp();
558  }
559 }
560 
573 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
574  std::vector<vpColVector> *const data_pointCloud,
575  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared,
576  rs2::align *const align_to, double *ts)
577 {
578  acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, NULL, align_to, ts);
579 }
580 
595 void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
596  std::vector<vpColVector> *const data_pointCloud,
597  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared1,
598  unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
599 {
600  auto data = m_pipe.wait_for_frames();
601  if (align_to != NULL) {
602  // Infrared stream is not aligned
603  // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
604 #if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
605  data = align_to->process(data);
606 #else
607  data = align_to->proccess(data);
608 #endif
609  }
610 
611  auto color_frame = data.get_color_frame();
612  if (data_image != NULL) {
613  getNativeFrameData(color_frame, data_image);
614  }
615 
616  if (data_depth != NULL || data_pointCloud != NULL || pointcloud != NULL) {
617  auto depth_frame = data.get_depth_frame();
618  if (data_depth != NULL) {
619  getNativeFrameData(depth_frame, data_depth);
620  }
621 
622  if (data_pointCloud != NULL) {
623  getPointcloud(depth_frame, *data_pointCloud);
624  }
625 
626  if (pointcloud != NULL) {
627  getPointcloud(depth_frame, color_frame, pointcloud);
628  }
629  }
630 
631  if (data_infrared1 != NULL) {
632  auto infrared_frame = data.first(RS2_STREAM_INFRARED);
633  getNativeFrameData(infrared_frame, data_infrared1);
634  }
635 
636  if (data_infrared2 != NULL) {
637  auto infrared_frame = data.get_infrared_frame(2);
638  getNativeFrameData(infrared_frame, data_infrared2);
639  }
640 
641  if (ts != NULL) {
642  *ts = data.get_timestamp();
643  }
644 }
645 #endif
646 
659 {
660  if (m_init) {
661  m_pipe.stop();
662  m_init = false;
663  }
664 }
665 
677 {
678  auto rs_stream = m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
679  auto intrinsics = rs_stream.get_intrinsics();
680 
681  vpCameraParameters cam;
682  double u0 = intrinsics.ppx;
683  double v0 = intrinsics.ppy;
684  double px = intrinsics.fx;
685  double py = intrinsics.fy;
686 
687  switch (type)
688  {
690  {
691  double kdu = intrinsics.coeffs[0];
692  cam.initPersProjWithDistortion(px, py, u0, v0, -kdu, kdu);
693  }
694  break;
695 
697  {
698  std::vector<double> tmp_coefs;
699  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[0]));
700  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[1]));
701  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[2]));
702  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[3]));
703  tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[4]));
704 
705  cam.initProjWithKannalaBrandtDistortion(px, py, u0, v0, tmp_coefs);
706  }
707  break;
708 
710  default:
711  cam.initPersProjWithoutDistortion(px, py, u0, v0);
712  break;
713  }
714 
715  return cam;
716 }
717 
726 rs2_intrinsics vpRealSense2::getIntrinsics(const rs2_stream &stream, int index) const
727 {
728  auto vsp = m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
729  return vsp.get_intrinsics();
730 }
731 
732 void vpRealSense2::getColorFrame(const rs2::frame &frame, vpImage<vpRGBa> &color)
733 {
734  auto vf = frame.as<rs2::video_frame>();
735  unsigned int width = (unsigned int)vf.get_width();
736  unsigned int height = (unsigned int)vf.get_height();
737  color.resize(height, width);
738 
739  if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
740  vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
741  reinterpret_cast<unsigned char *>(color.bitmap), width, height);
742  } else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
743  memcpy(reinterpret_cast<unsigned char *>(color.bitmap),
744  const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
745  width * height * sizeof(vpRGBa));
746  } else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
747  vpImageConvert::BGRToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
748  reinterpret_cast<unsigned char *>(color.bitmap), width, height);
749  } else {
750  throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
751  }
752 }
753 
759 {
760  if (! m_init) { // If pipe is not yet created, create it. Otherwise, we already know depth scale.
761  rs2::pipeline *pipe = new rs2::pipeline;
762  rs2::pipeline_profile *pipelineProfile = new rs2::pipeline_profile;
763  *pipelineProfile = pipe->start();
764 
765  rs2::device dev = pipelineProfile->get_device();
766 
767  // Go over the device's sensors
768  for (rs2::sensor &sensor : dev.query_sensors()) {
769  // Check if the sensor is a depth sensor
770  if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
771  m_depthScale = dpt.get_depth_scale();
772  }
773  }
774 
775  pipe->stop();
776  delete pipe;
777  delete pipelineProfile;
778  }
779 
780  return m_depthScale;
781 }
782 
783 void vpRealSense2::getGreyFrame(const rs2::frame &frame, vpImage<unsigned char> &grey)
784 {
785  auto vf = frame.as<rs2::video_frame>();
786  unsigned int width = (unsigned int)vf.get_width();
787  unsigned int height = (unsigned int)vf.get_height();
788  grey.resize(height, width);
789 
790  if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
791  vpImageConvert::RGBToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
792  grey.bitmap, width, height);
793  } else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
794  vpImageConvert::RGBaToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
795  grey.bitmap, width * height);
796  } else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
797  vpImageConvert::BGRToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
798  grey.bitmap, width, height);
799  } else {
800  throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
801  }
802 }
803 
804 void vpRealSense2::getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
805 {
806  auto vf = frame.as<rs2::video_frame>();
807  int size = vf.get_width() * vf.get_height();
808 
809  switch (frame.get_profile().format()) {
810  case RS2_FORMAT_RGB8:
811  case RS2_FORMAT_BGR8:
812  memcpy(data, frame.get_data(), size * 3);
813  break;
814 
815  case RS2_FORMAT_RGBA8:
816  case RS2_FORMAT_BGRA8:
817  memcpy(data, frame.get_data(), size * 4);
818  break;
819 
820  case RS2_FORMAT_Y16:
821  case RS2_FORMAT_Z16:
822  memcpy(data, frame.get_data(), size * 2);
823  break;
824 
825  case RS2_FORMAT_Y8:
826  memcpy(data, frame.get_data(), size);
827  break;
828 
829  default:
830  break;
831  }
832 }
833 
834 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, std::vector<vpColVector> &pointcloud)
835 {
836  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
837  std::stringstream ss;
838  ss << "Error, depth scale <= 0: " << m_depthScale;
839  throw vpException(vpException::fatalError, ss.str());
840  }
841 
842  auto vf = depth_frame.as<rs2::video_frame>();
843  const int width = vf.get_width();
844  const int height = vf.get_height();
845  pointcloud.resize((size_t)(width * height));
846 
847  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
848  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
849 
850  // Multi-threading if OpenMP
851  // Concurrent writes at different locations are safe
852  #pragma omp parallel for schedule(dynamic)
853  for (int i = 0; i < height; i++) {
854  auto depth_pixel_index = i * width;
855 
856  for (int j = 0; j < width; j++, depth_pixel_index++) {
857  if (p_depth_frame[depth_pixel_index] == 0) {
858  pointcloud[(size_t)depth_pixel_index].resize(4, false);
859  pointcloud[(size_t)depth_pixel_index][0] = m_invalidDepthValue;
860  pointcloud[(size_t)depth_pixel_index][1] = m_invalidDepthValue;
861  pointcloud[(size_t)depth_pixel_index][2] = m_invalidDepthValue;
862  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
863  continue;
864  }
865 
866  // Get the depth value of the current pixel
867  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
868 
869  float points[3];
870  const float pixel[] = {(float)j, (float)i};
871  rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
872 
873  if (pixels_distance > m_max_Z)
874  points[0] = points[1] = points[2] = m_invalidDepthValue;
875 
876  pointcloud[(size_t)depth_pixel_index].resize(4, false);
877  pointcloud[(size_t)depth_pixel_index][0] = points[0];
878  pointcloud[(size_t)depth_pixel_index][1] = points[1];
879  pointcloud[(size_t)depth_pixel_index][2] = points[2];
880  pointcloud[(size_t)depth_pixel_index][3] = 1.0;
881  }
882  }
883 }
884 
885 
886 #ifdef VISP_HAVE_PCL
887 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud)
888 {
889  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
890  std::stringstream ss;
891  ss << "Error, depth scale <= 0: " << m_depthScale;
892  throw vpException(vpException::fatalError, ss.str());
893  }
894 
895  auto vf = depth_frame.as<rs2::video_frame>();
896  const int width = vf.get_width();
897  const int height = vf.get_height();
898  pointcloud->width = (uint32_t)width;
899  pointcloud->height = (uint32_t)height;
900  pointcloud->resize((size_t)(width * height));
901 
902 #if MANUAL_POINTCLOUD // faster to compute manually when tested
903  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
904  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
905 
906  // Multi-threading if OpenMP
907  // Concurrent writes at different locations are safe
908 #pragma omp parallel for schedule(dynamic)
909  for (int i = 0; i < height; i++) {
910  auto depth_pixel_index = i * width;
911 
912  for (int j = 0; j < width; j++, depth_pixel_index++) {
913  if (p_depth_frame[depth_pixel_index] == 0) {
914  pointcloud->points[(size_t)(depth_pixel_index)].x = m_invalidDepthValue;
915  pointcloud->points[(size_t)(depth_pixel_index)].y = m_invalidDepthValue;
916  pointcloud->points[(size_t)(depth_pixel_index)].z = m_invalidDepthValue;
917  continue;
918  }
919 
920  // Get the depth value of the current pixel
921  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
922 
923  float points[3];
924  const float pixel[] = {(float)j, (float)i};
925  rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
926 
927  if (pixels_distance > m_max_Z)
928  points[0] = points[1] = points[2] = m_invalidDepthValue;
929 
930  pointcloud->points[(size_t)(depth_pixel_index)].x = points[0];
931  pointcloud->points[(size_t)(depth_pixel_index)].y = points[1];
932  pointcloud->points[(size_t)(depth_pixel_index)].z = points[2];
933  }
934  }
935 #else
936  m_points = m_pointcloud.calculate(depth_frame);
937  auto vertices = m_points.get_vertices();
938 
939  for (size_t i = 0; i < m_points.size(); i++) {
940  if (vertices[i].z <= 0.0f || vertices[i].z > m_max_Z) {
941  pointcloud->points[i].x = m_invalidDepthValue;
942  pointcloud->points[i].y = m_invalidDepthValue;
943  pointcloud->points[i].z = m_invalidDepthValue;
944  } else {
945  pointcloud->points[i].x = vertices[i].x;
946  pointcloud->points[i].y = vertices[i].y;
947  pointcloud->points[i].z = vertices[i].z;
948  }
949  }
950 #endif
951 }
952 
953 void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, const rs2::frame &color_frame,
954  pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
955 {
956  if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
957  throw vpException(vpException::fatalError, "Error, depth scale <= 0: %f", m_depthScale);
958  }
959 
960  auto vf = depth_frame.as<rs2::video_frame>();
961  const int depth_width = vf.get_width();
962  const int depth_height = vf.get_height();
963  pointcloud->width = static_cast<uint32_t>(depth_width);
964  pointcloud->height = static_cast<uint32_t>(depth_height);
965  pointcloud->resize(static_cast<uint32_t>(depth_width * depth_height));
966 
967  vf = color_frame.as<rs2::video_frame>();
968  const int color_width = vf.get_width();
969  const int color_height = vf.get_height();
970 
971  const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
972  const rs2_extrinsics depth2ColorExtrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().
973  get_extrinsics_to(color_frame.get_profile().as<rs2::video_stream_profile>());
974  const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
975  const rs2_intrinsics color_intrinsics = color_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
976 
977  auto color_format = color_frame.as<rs2::video_frame>().get_profile().format();
978  const bool swap_rb = color_format == RS2_FORMAT_BGR8 || color_format == RS2_FORMAT_BGRA8;
979  unsigned int nb_color_pixel = (color_format == RS2_FORMAT_RGB8 || color_format == RS2_FORMAT_BGR8) ? 3 : 4;
980  const unsigned char *p_color_frame = static_cast<const unsigned char *>(color_frame.get_data());
981  rs2_extrinsics identity;
982  memset(identity.rotation, 0, sizeof(identity.rotation));
983  memset(identity.translation, 0, sizeof(identity.translation));
984  for (int i = 0; i < 3; i++) {
985  identity.rotation[i*3 + i] = 1;
986  }
987  const bool registered_streams = (depth2ColorExtrinsics == identity) &&
988  (color_width == depth_width) && (color_height == depth_height);
989 
990  // Multi-threading if OpenMP
991  // Concurrent writes at different locations are safe
992 #pragma omp parallel for schedule(dynamic)
993  for (int i = 0; i < depth_height; i++) {
994  auto depth_pixel_index = i * depth_width;
995 
996  for (int j = 0; j < depth_width; j++, depth_pixel_index++) {
997  if (p_depth_frame[depth_pixel_index] == 0) {
998  pointcloud->points[(size_t)depth_pixel_index].x = m_invalidDepthValue;
999  pointcloud->points[(size_t)depth_pixel_index].y = m_invalidDepthValue;
1000  pointcloud->points[(size_t)depth_pixel_index].z = m_invalidDepthValue;
1001 
1002  // For out of bounds color data, default to a shade of blue in order to
1003  // visually distinguish holes. This color value is same as the librealsense
1004  // out of bounds color value.
1005 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1006  unsigned int r = 96, g = 157, b = 198;
1007  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1008 
1009  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1010 #else
1011  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1012  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1013  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1014 #endif
1015  continue;
1016  }
1017 
1018  // Get the depth value of the current pixel
1019  auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
1020 
1021  float depth_point[3];
1022  const float pixel[] = {(float)j, (float)i};
1023  rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, pixel, pixels_distance);
1024 
1025  if (pixels_distance > m_max_Z) {
1026  depth_point[0] = depth_point[1] = depth_point[2] = m_invalidDepthValue;
1027  }
1028 
1029  pointcloud->points[(size_t)depth_pixel_index].x = depth_point[0];
1030  pointcloud->points[(size_t)depth_pixel_index].y = depth_point[1];
1031  pointcloud->points[(size_t)depth_pixel_index].z = depth_point[2];
1032 
1033  if (!registered_streams) {
1034  float color_point[3];
1035  rs2_transform_point_to_point(color_point, &depth2ColorExtrinsics, depth_point);
1036  float color_pixel[2];
1037  rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point);
1038 
1039  if (color_pixel[1] < 0 || color_pixel[1] >= color_height || color_pixel[0] < 0 || color_pixel[0] >= color_width) {
1040  // For out of bounds color data, default to a shade of blue in order to
1041  // visually distinguish holes. This color value is same as the librealsense
1042  // out of bounds color value.
1043 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1044  unsigned int r = 96, g = 157, b = 198;
1045  uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1046 
1047  pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1048 #else
1049  pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1050  pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1051  pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1052 #endif
1053  } else {
1054  unsigned int i_ = (unsigned int)color_pixel[1];
1055  unsigned int j_ = (unsigned int)color_pixel[0];
1056 
1057 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1058  uint32_t rgb = 0;
1059  if (swap_rb) {
1060  rgb =
1061  (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) |
1062  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1063  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2]) << 16);
1064  } else {
1065  rgb = (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
1066  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1067  static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2]));
1068  }
1069 
1070  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1071 #else
1072  if (swap_rb) {
1073  pointcloud->points[(size_t)depth_pixel_index].b =
1074  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1075  pointcloud->points[(size_t)depth_pixel_index].g =
1076  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1077  pointcloud->points[(size_t)depth_pixel_index].r =
1078  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1079  } else {
1080  pointcloud->points[(size_t)depth_pixel_index].r =
1081  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1082  pointcloud->points[(size_t)depth_pixel_index].g =
1083  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1084  pointcloud->points[(size_t)depth_pixel_index].b =
1085  p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1086  }
1087 #endif
1088  }
1089  } else {
1090 #if PCL_VERSION_COMPARE(<, 1, 1, 0)
1091  uint32_t rgb = 0;
1092  if (swap_rb) {
1093  rgb =
1094  (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) |
1095  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1096  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
1097  } else {
1098  rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) << 16 |
1099  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1100  static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]));
1101  }
1102 
1103  pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1104 #else
1105  if (swap_rb) {
1106  pointcloud->points[(size_t)depth_pixel_index].b =
1107  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1108  pointcloud->points[(size_t)depth_pixel_index].g =
1109  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1110  pointcloud->points[(size_t)depth_pixel_index].r =
1111  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1112  } else {
1113  pointcloud->points[(size_t)depth_pixel_index].r =
1114  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1115  pointcloud->points[(size_t)depth_pixel_index].g =
1116  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1117  pointcloud->points[(size_t)depth_pixel_index].b =
1118  p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1119  }
1120 #endif
1121  }
1122  }
1123  }
1124 }
1125 
1126 #endif
1127 
1134 vpHomogeneousMatrix vpRealSense2::getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index) const
1135 {
1136  int to_index = -1;
1137 
1138  if(from_index != -1) // If we have to specify indices for streams. (Ex.: T265 device having 2 fisheyes)
1139  {
1140  if(from_index == 1) // From left => To right.
1141  to_index = 2;
1142  else
1143  if(from_index == 2) // From right => To left.
1144  to_index = 1;
1145  }
1146 
1147  auto from_stream = m_pipelineProfile.get_stream(from, from_index);
1148  auto to_stream = m_pipelineProfile.get_stream(to, to_index);
1149 
1150  rs2_extrinsics extrinsics = from_stream.get_extrinsics_to(to_stream);
1151 
1153  vpRotationMatrix R;
1154  for (unsigned int i = 0; i < 3; i++) {
1155  t[i] = extrinsics.translation[i];
1156  for (unsigned int j = 0; j < 3; j++)
1157  R[i][j] = extrinsics.rotation[j * 3 + i]; //rotation is column-major order
1158  }
1159 
1160  vpHomogeneousMatrix to_M_from(t, R);
1161  return to_M_from;
1162 }
1163 
1164 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1174 unsigned int vpRealSense2::getOdometryData(vpHomogeneousMatrix *cMw, vpColVector *odo_vel, vpColVector *odo_acc, double *ts)
1175 {
1176  auto frame = m_pipe.wait_for_frames();
1177  auto f = frame.first_or_default(RS2_STREAM_POSE);
1178  auto pose_data = f.as<rs2::pose_frame>().get_pose_data();
1179 
1180  if(ts != NULL)
1181  *ts = frame.get_timestamp();
1182 
1183  if(cMw != NULL)
1184  {
1185  m_pos[0] = static_cast<double>(pose_data.translation.x);
1186  m_pos[1] = static_cast<double>(pose_data.translation.y);
1187  m_pos[2] = static_cast<double>(pose_data.translation.z);
1188 
1189  m_quat[0] = static_cast<double>(pose_data.rotation.x);
1190  m_quat[1] = static_cast<double>(pose_data.rotation.y);
1191  m_quat[2] = static_cast<double>(pose_data.rotation.z);
1192  m_quat[3] = static_cast<double>(pose_data.rotation.w);
1193 
1194  *cMw = vpHomogeneousMatrix(m_pos, m_quat);
1195  }
1196 
1197  if(odo_vel != NULL)
1198  {
1199  odo_vel->resize(6, false);
1200  (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
1201  (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
1202  (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
1203  (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
1204  (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
1205  (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
1206  }
1207 
1208  if(odo_acc != NULL)
1209  {
1210  odo_acc->resize(6, false);
1211  (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
1212  (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
1213  (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
1214  (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
1215  (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
1216  (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
1217  }
1218 
1219  return pose_data.tracker_confidence;
1220 }
1221 
1243 {
1244  auto frame = m_pipe.wait_for_frames();
1245  auto f = frame.first_or_default(RS2_STREAM_ACCEL);
1246  auto imu_acc_data = f.as<rs2::motion_frame>().get_motion_data();
1247 
1248  if(ts != NULL)
1249  *ts = f.get_timestamp();
1250 
1251  if(imu_acc != NULL)
1252  {
1253  imu_acc->resize(3, false);
1254  (*imu_acc)[0] = static_cast<double>(imu_acc_data.x);
1255  (*imu_acc)[1] = static_cast<double>(imu_acc_data.y);
1256  (*imu_acc)[2] = static_cast<double>(imu_acc_data.z);
1257  }
1258 }
1259 
1280 void vpRealSense2::getIMUVelocity(vpColVector *imu_vel, double *ts)
1281 {
1282  auto frame = m_pipe.wait_for_frames();
1283  auto f = frame.first_or_default(RS2_STREAM_GYRO);
1284  auto imu_vel_data = f.as<rs2::motion_frame>().get_motion_data();
1285 
1286  if(ts != NULL)
1287  *ts = f.get_timestamp();
1288 
1289  if(imu_vel != NULL)
1290  {
1291  imu_vel->resize(3, false);
1292  (*imu_vel)[0] = static_cast<double>(imu_vel_data.x);
1293  (*imu_vel)[1] = static_cast<double>(imu_vel_data.x);
1294  (*imu_vel)[2] = static_cast<double>(imu_vel_data.x);
1295  }
1296 }
1297 
1318 void vpRealSense2::getIMUData(vpColVector *imu_acc, vpColVector *imu_vel, double *ts)
1319 {
1320  auto data = m_pipe.wait_for_frames();
1321 
1322  if(ts != NULL)
1323  *ts = data.get_timestamp();
1324 
1325  if(imu_acc != NULL)
1326  {
1327  auto acc_data = data.first_or_default(RS2_STREAM_ACCEL);
1328  auto imu_acc_data = acc_data.as<rs2::motion_frame>().get_motion_data();
1329 
1330  imu_acc->resize(3, false);
1331  (*imu_acc)[0] = static_cast<double>(imu_acc_data.x);
1332  (*imu_acc)[1] = static_cast<double>(imu_acc_data.y);
1333  (*imu_acc)[2] = static_cast<double>(imu_acc_data.z);
1334  }
1335 
1336  if(imu_vel != NULL)
1337  {
1338  auto vel_data = data.first_or_default(RS2_STREAM_GYRO);
1339  auto imu_vel_data = vel_data.as<rs2::motion_frame>().get_motion_data();
1340 
1341  imu_vel->resize(3, false);
1342  (*imu_vel)[0] = static_cast<double>(imu_vel_data.x);
1343  (*imu_vel)[1] = static_cast<double>(imu_vel_data.y);
1344  (*imu_vel)[2] = static_cast<double>(imu_vel_data.z);
1345  }
1346 }
1347 #endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1348 
1352 bool vpRealSense2::open(const rs2::config &cfg)
1353 {
1354  if (m_init) {
1355  close();
1356  }
1357 
1358  m_pipelineProfile = m_pipe.start(cfg);
1359 
1360  rs2::device dev = m_pipelineProfile.get_device();
1361 
1362 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1363  // Query device product line D400/SR300/L500/T200
1364  m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1365 #endif
1366 
1367  // Go over the device's sensors
1368  for (rs2::sensor &sensor : dev.query_sensors()) {
1369  // Check if the sensor is a depth sensor
1370  if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1371  m_depthScale = dpt.get_depth_scale();
1372  }
1373  }
1374 
1375  m_init = true;
1376  return m_init;
1377 }
1378 
1384 bool vpRealSense2::open(const rs2::config &cfg, std::function<void(rs2::frame)> &callback)
1385 {
1386  if (m_init) {
1387  close();
1388  }
1389 
1390  m_pipelineProfile = m_pipe.start(cfg, callback);
1391 
1392  rs2::device dev = m_pipelineProfile.get_device();
1393 
1394 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1395  // Query device product line D400/SR300/L500/T200
1396  m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1397 #endif
1398 
1399  // Go over the device's sensors
1400  for (rs2::sensor &sensor : dev.query_sensors()) {
1401  // Check if the sensor is a depth sensor
1402  if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1403  m_depthScale = dpt.get_depth_scale();
1404  }
1405  }
1406 
1407  m_init = true;
1408  return m_init;
1409 }
1410 
1415 {
1416 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1417  if (! m_init) { // If pipe is not already created, create it. Otherwise, we have already determined the product line
1418  rs2::pipeline *pipe = new rs2::pipeline;
1419  rs2::pipeline_profile *pipelineProfile = new rs2::pipeline_profile;
1420  *pipelineProfile = pipe->start();
1421 
1422  rs2::device dev = pipelineProfile->get_device();
1423 
1424 #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1425  // Query device product line D400/SR300/L500/T200
1426  m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1427 #endif
1428 
1429  pipe->stop();
1430  delete pipe;
1431  delete pipelineProfile;
1432  }
1433 
1434  return m_product_line;
1435 #else
1436  return (std::string("unknown"));
1437 #endif
1438 }
1439 
1440 namespace
1441 {
1442 // Helper functions to print information about the RealSense device
1443 void print(const rs2_extrinsics &extrinsics, std::ostream &os)
1444 {
1445  std::stringstream ss;
1446  ss << "Rotation Matrix:\n";
1447 
1448  for (auto i = 0; i < 3; ++i) {
1449  for (auto j = 0; j < 3; ++j) {
1450  ss << std::left << std::setw(15) << std::setprecision(5) << extrinsics.rotation[j * 3 + i];
1451  }
1452  ss << std::endl;
1453  }
1454 
1455  ss << "\nTranslation Vector: ";
1456  for (size_t i = 0; i < sizeof(extrinsics.translation) / sizeof(extrinsics.translation[0]); ++i)
1457  ss << std::setprecision(15) << extrinsics.translation[i] << " ";
1458 
1459  os << ss.str() << "\n\n";
1460 }
1461 
1462 void print(const rs2_intrinsics &intrinsics, std::ostream &os)
1463 {
1464  std::stringstream ss;
1465  ss << std::left << std::setw(14) << "Width: "
1466  << "\t" << intrinsics.width << "\n"
1467  << std::left << std::setw(14) << "Height: "
1468  << "\t" << intrinsics.height << "\n"
1469  << std::left << std::setw(14) << "PPX: "
1470  << "\t" << std::setprecision(15) << intrinsics.ppx << "\n"
1471  << std::left << std::setw(14) << "PPY: "
1472  << "\t" << std::setprecision(15) << intrinsics.ppy << "\n"
1473  << std::left << std::setw(14) << "Fx: "
1474  << "\t" << std::setprecision(15) << intrinsics.fx << "\n"
1475  << std::left << std::setw(14) << "Fy: "
1476  << "\t" << std::setprecision(15) << intrinsics.fy << "\n"
1477  << std::left << std::setw(14) << "Distortion: "
1478  << "\t" << rs2_distortion_to_string(intrinsics.model) << "\n"
1479  << std::left << std::setw(14) << "Coeffs: ";
1480 
1481  for (size_t i = 0; i < sizeof(intrinsics.coeffs) / sizeof(intrinsics.coeffs[0]); ++i)
1482  ss << "\t" << std::setprecision(15) << intrinsics.coeffs[i] << " ";
1483 
1484  os << ss.str() << "\n\n";
1485 }
1486 
1487 void safe_get_intrinsics(const rs2::video_stream_profile &profile, rs2_intrinsics &intrinsics)
1488 {
1489  try {
1490  intrinsics = profile.get_intrinsics();
1491  } catch (...) {
1492  }
1493 }
1494 
1495 bool operator==(const rs2_intrinsics &lhs, const rs2_intrinsics &rhs)
1496 {
1497  return lhs.width == rhs.width && lhs.height == rhs.height && lhs.ppx == rhs.ppx && lhs.ppy == rhs.ppy &&
1498  lhs.fx == rhs.fx && lhs.fy == rhs.fy && lhs.model == rhs.model &&
1499  !std::memcmp(lhs.coeffs, rhs.coeffs, sizeof(rhs.coeffs));
1500 }
1501 
1502 std::string get_str_formats(const std::set<rs2_format> &formats)
1503 {
1504  std::stringstream ss;
1505  for (auto format = formats.begin(); format != formats.end(); ++format) {
1506  ss << *format << ((format != formats.end()) && (next(format) == formats.end()) ? "" : "/");
1507  }
1508  return ss.str();
1509 }
1510 
1511 struct stream_and_resolution {
1512  rs2_stream stream;
1513  int stream_index;
1514  int width;
1515  int height;
1516  std::string stream_name;
1517 
1518  bool operator<(const stream_and_resolution &obj) const
1519  {
1520  return (std::make_tuple(stream, stream_index, width, height) <
1521  std::make_tuple(obj.stream, obj.stream_index, obj.width, obj.height));
1522  }
1523 };
1524 
1525 struct stream_and_index {
1526  rs2_stream stream;
1527  int stream_index;
1528 
1529  bool operator<(const stream_and_index &obj) const
1530  {
1531  return (std::make_tuple(stream, stream_index) < std::make_tuple(obj.stream, obj.stream_index));
1532  }
1533 };
1534 } // anonymous namespace
1535 
1556 std::ostream &operator<<(std::ostream &os, const vpRealSense2 &rs)
1557 {
1558  rs2::device dev = rs.m_pipelineProfile.get_device();
1559  os << std::left << std::setw(30) << dev.get_info(RS2_CAMERA_INFO_NAME) << std::setw(20)
1560  << dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER) << std::setw(20) << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION)
1561  << std::endl;
1562 
1563  // Show which options are supported by this device
1564  os << " Device info: \n";
1565  for (auto j = 0; j < RS2_CAMERA_INFO_COUNT; ++j) {
1566  auto param = static_cast<rs2_camera_info>(j);
1567  if (dev.supports(param))
1568  os << " " << std::left << std::setw(30) << rs2_camera_info_to_string(rs2_camera_info(param)) << ": \t"
1569  << dev.get_info(param) << "\n";
1570  }
1571 
1572  os << "\n";
1573 
1574  for (auto &&sensor : dev.query_sensors()) {
1575  os << "Options for " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
1576 
1577  os << std::setw(55) << " Supported options:" << std::setw(10) << "min" << std::setw(10) << " max" << std::setw(6)
1578  << " step" << std::setw(10) << " default" << std::endl;
1579  for (auto j = 0; j < RS2_OPTION_COUNT; ++j) {
1580  auto opt = static_cast<rs2_option>(j);
1581  if (sensor.supports(opt)) {
1582  auto range = sensor.get_option_range(opt);
1583  os << " " << std::left << std::setw(50) << opt << " : " << std::setw(5) << range.min << "... "
1584  << std::setw(12) << range.max << std::setw(6) << range.step << std::setw(10) << range.def << "\n";
1585  }
1586  }
1587 
1588  os << "\n";
1589  }
1590 
1591  for (auto &&sensor : dev.query_sensors()) {
1592  os << "Stream Profiles supported by " << sensor.get_info(RS2_CAMERA_INFO_NAME) << "\n";
1593 
1594  os << std::setw(55) << " Supported modes:" << std::setw(10) << "stream" << std::setw(10) << " resolution"
1595  << std::setw(6) << " fps" << std::setw(10) << " format"
1596  << "\n";
1597  // Show which streams are supported by this device
1598  for (auto &&profile : sensor.get_stream_profiles()) {
1599  if (auto video = profile.as<rs2::video_stream_profile>()) {
1600  os << " " << profile.stream_name() << "\t " << video.width() << "x" << video.height() << "\t@ "
1601  << profile.fps() << "Hz\t" << profile.format() << "\n";
1602  } else {
1603  os << " " << profile.stream_name() << "\t@ " << profile.fps() << "Hz\t" << profile.format() << "\n";
1604  }
1605  }
1606 
1607  os << "\n";
1608  }
1609 
1610  std::map<stream_and_index, rs2::stream_profile> streams;
1611  std::map<stream_and_resolution, std::vector<std::pair<std::set<rs2_format>, rs2_intrinsics> > > intrinsics_map;
1612  for (auto &&sensor : dev.query_sensors()) {
1613  // Intrinsics
1614  for (auto &&profile : sensor.get_stream_profiles()) {
1615  if (auto video = profile.as<rs2::video_stream_profile>()) {
1616  if (streams.find(stream_and_index{profile.stream_type(), profile.stream_index()}) == streams.end()) {
1617  streams[stream_and_index{profile.stream_type(), profile.stream_index()}] = profile;
1618  }
1619 
1620  rs2_intrinsics intrinsics{};
1621  stream_and_resolution stream_res{profile.stream_type(), profile.stream_index(), video.width(), video.height(),
1622  profile.stream_name()};
1623  safe_get_intrinsics(video, intrinsics);
1624  auto it = std::find_if(
1625  (intrinsics_map[stream_res]).begin(), (intrinsics_map[stream_res]).end(),
1626  [&](const std::pair<std::set<rs2_format>, rs2_intrinsics> &kvp) { return intrinsics == kvp.second; });
1627  if (it == (intrinsics_map[stream_res]).end()) {
1628  (intrinsics_map[stream_res]).push_back({{profile.format()}, intrinsics});
1629  } else {
1630  it->first.insert(profile.format()); // If the intrinsics are equals,
1631  // add the profile format to
1632  // format set
1633  }
1634  }
1635  }
1636  }
1637 
1638  os << "Provided Intrinsic:\n";
1639  for (auto &kvp : intrinsics_map) {
1640  auto stream_res = kvp.first;
1641  for (auto &intrinsics : kvp.second) {
1642  auto formats = get_str_formats(intrinsics.first);
1643  os << "Intrinsic of \"" << stream_res.stream_name << "\"\t " << stream_res.width << "x" << stream_res.height
1644  << "\t " << formats << "\n";
1645  if (intrinsics.second == rs2_intrinsics{}) {
1646  os << "Intrinsic NOT available!\n\n";
1647  } else {
1648  print(intrinsics.second, os);
1649  }
1650  }
1651  }
1652 
1653  // Print Extrinsics
1654  os << "\nProvided Extrinsic:\n";
1655  for (auto kvp1 = streams.begin(); kvp1 != streams.end(); ++kvp1) {
1656  for (auto kvp2 = streams.begin(); kvp2 != streams.end(); ++kvp2) {
1657  os << "Extrinsic from \"" << kvp1->second.stream_name() << "\"\t "
1658  << "To"
1659  << "\t \"" << kvp2->second.stream_name() << "\"\n";
1660  auto extrinsics = kvp1->second.get_extrinsics_to(kvp2->second);
1661  print(extrinsics, os);
1662  }
1663  }
1664 
1665  return os;
1666 }
1667 
1668 #elif !defined(VISP_BUILD_SHARED_LIBS)
1669 // Work arround to avoid warning: libvisp_sensor.a(vpRealSense2.cpp.o) has no
1670 // symbols
1671 void dummy_vpRealSense2(){};
1672 #endif
bool operator==(const vpArray2D< double > &A) const
Definition: vpArray2D.h:966
friend std::ostream & operator<<(std::ostream &s, const vpArray2D< Type > &A)
Definition: vpArray2D.h:493
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
void initPersProjWithDistortion(double px, double py, double u0, double v0, double kud, double kdu)
void initProjWithKannalaBrandtDistortion(double px, double py, double u0, double v0, const std::vector< double > &distortion_coefficients)
Implementation of column vector and the associated operations.
Definition: vpColVector.h:131
void resize(unsigned int i, bool flagNullify=true)
Definition: vpColVector.h:310
error that can be emited by ViSP classes.
Definition: vpException.h:72
@ fatalError
Fatal error.
Definition: vpException.h:96
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToGrey(unsigned char *rgb, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false)
static void RGBaToGrey(unsigned char *rgba, unsigned char *grey, unsigned int width, unsigned int height, unsigned int nThreads=0)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
static void BGRToGrey(unsigned char *bgr, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false, unsigned int nThreads=0)
static void BGRToRGBa(unsigned char *bgr, unsigned char *rgba, unsigned int width, unsigned int height, bool flip=false)
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Definition: vpImage.h:800
Type * bitmap
points toward the bitmap
Definition: vpImage.h:143
Definition: vpRGBa.h:67
void acquire(vpImage< unsigned char > &grey, double *ts=NULL)
void getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
virtual ~vpRealSense2()
vpQuaternionVector m_quat
Definition: vpRealSense2.h:387
rs2::pipeline m_pipe
Definition: vpRealSense2.h:382
unsigned int getOdometryData(vpHomogeneousMatrix *cMw, vpColVector *odo_vel, vpColVector *odo_acc, double *ts=NULL)
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
float getDepthScale()
rs2::pointcloud m_pointcloud
Definition: vpRealSense2.h:384
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
void getPointcloud(const rs2::depth_frame &depth_frame, std::vector< vpColVector > &pointcloud)
void getColorFrame(const rs2::frame &frame, vpImage< vpRGBa > &color)
void getIMUVelocity(vpColVector *imu_vel, double *ts)
float m_depthScale
Definition: vpRealSense2.h:379
vpTranslationVector m_pos
Definition: vpRealSense2.h:386
std::string m_product_line
Definition: vpRealSense2.h:389
std::string getProductLine()
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts)
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
void getGreyFrame(const rs2::frame &frame, vpImage< unsigned char > &grey)
rs2::points m_points
Definition: vpRealSense2.h:385
float m_invalidDepthValue
Definition: vpRealSense2.h:380
rs2::pipeline_profile m_pipelineProfile
Definition: vpRealSense2.h:383
Implementation of a rotation matrix and operations on such kind of matrices.
Class that consider the case of a translation vector.