Main MRPT website > C++ reference
MRPT logo
CObservationRGBD360.h
Go to the documentation of this file.
1 /* +---------------------------------------------------------------------------+
2  | Mobile Robot Programming Toolkit (MRPT) |
3  | http://www.mrpt.org/ |
4  | |
5  | Copyright (c) 2005-2014, Individual contributors, see AUTHORS file |
6  | See: http://www.mrpt.org/Authors - All rights reserved. |
7  | Released under BSD License. See details in http://www.mrpt.org/License |
8  +---------------------------------------------------------------------------+ */
9 #ifndef CObservationRGBD360_H
10 #define CObservationRGBD360_H
11 
13 #include <mrpt/utils/CImage.h>
14 #include <mrpt/slam/CObservation.h>
16 #include <mrpt/poses/CPose3D.h>
17 #include <mrpt/poses/CPose2D.h>
18 #include <mrpt/math/CPolygon.h>
19 #include <mrpt/math/CMatrix.h>
20 #include <mrpt/utils/adapters.h>
21 
22 namespace mrpt
23 {
24 namespace slam
25 {
26  DEFINE_SERIALIZABLE_PRE_CUSTOM_BASE_LINKAGE( CObservationRGBD360, CObservation, OBS_IMPEXP )
27 
28 // namespace detail {
29 // // Implemented in CObservationRGBD360_project3D_impl.h
30 // template <class POINTMAP>
31 // void project3DPointsFromDepthImageInto(CObservationRGBD360 &src_obs, POINTMAP &dest_pointcloud, const bool takeIntoAccountSensorPoseOnRobot, const mrpt::poses::CPose3D * robotPoseInTheWorld, const bool PROJ3D_USE_LUT);
32 // }
33 
34  /** Declares a class derived from "CObservation" that
35  * encapsules an omnidirectional RGBD measurement from a set of RGBD sensors.
36  * This kind of observations can carry one or more of these data fields:
37  * - 3D point cloud (as float's).
38  * - 2D range image (as a matrix): Each entry in the matrix "rangeImage(ROW,COLUMN)" contains a distance or a depth (in meters), depending on \a range_is_depth.
39  * - 2D intensity (grayscale or RGB) image (as a mrpt::utils::CImage): For SwissRanger cameras, a logarithmic A-law compression is used to convert the original 16bit intensity to a more standard 8bit graylevel.
40  *
41  * The coordinates of the 3D point cloud are in millimeters with respect to the RGB camera origin of coordinates
42  *
43  * The 2D images and matrices are stored as common images, with an up->down rows order and left->right, as usual.
44  * Optionally, the intensity and confidence channels can be set to delayed-load images for off-rawlog storage so it saves
45  * memory by having loaded in memory just the needed images. See the methods load() and unload().
46  * Due to the intensive storage requirements of this kind of observations, this observation is the only one in MRPT
47  * for which it's recommended to always call "load()" and "unload()" before and after using the observation, *ONLY* when
48  * the observation was read from a rawlog dataset, in order to make sure that all the externally stored data fields are
49  * loaded and ready in memory.
50  *
51  * Classes that grab observations of this type are:
52  * - mrpt::hwdrivers::CSwissRanger3DCamera
53  * - mrpt::hwdrivers::CKinect
54  *
55  *
56  * 3D point clouds can be generated at any moment after grabbing with CObservationRGBD360::project3DPointsFromDepthImage() and CObservationRGBD360::project3DPointsFromDepthImageInto(), provided the correct
57  * calibration parameters.
58  *
59  * \note Starting at serialization version 3 (MRPT 0.9.1+), the 3D point cloud and the rangeImage can both be stored externally to save rawlog space.
60  * \note Starting at serialization version 5 (MRPT 0.9.5+), the new field \a range_is_depth
61  * \note Starting at serialization version 6 (MRPT 0.9.5+), the new field \a intensityImageChannel
62  *
63  * \sa mrpt::hwdrivers::CSwissRanger3DCamera, mrpt::hwdrivers::COpenNI2_RGBD360, CObservation
64  * \ingroup mrpt_obs_grp
65  */
67  {
68  // This must be added to any CSerializable derived class:
70 
71  protected:
72  bool m_points3D_external_stored; //!< If set to true, m_points3D_external_file is valid.
73  std::string m_points3D_external_file; //!< 3D points are in CImage::IMAGES_PATH_BASE+<this_file_name>
74 
75  bool m_rangeImage_external_stored; //!< If set to true, m_rangeImage_external_file is valid.
76  std::string m_rangeImage_external_file; //!< rangeImage is in CImage::IMAGES_PATH_BASE+<this_file_name>
77 
78  public:
79  CObservationRGBD360( ); //!< Default constructor
80  virtual ~CObservationRGBD360( ); //!< Destructor
81 
82  /** @name Delayed-load manual control methods.
83  @{ */
84 // /** Makes sure all images and other fields which may be externally stored are loaded in memory.
85 // * Note that for all CImages, calling load() is not required since the images will be automatically loaded upon first access, so load() shouldn't be needed to be called in normal cases by the user.
86 // * If all the data were alredy loaded or this object has no externally stored data fields, calling this method has no effects.
87 // * \sa unload
88 // */
89 // virtual void load() const;
90 // /** Unload all images, for the case they being delayed-load images stored in external files (othewise, has no effect).
91 // * \sa load
92 // */
93 // virtual void unload();
94 // /** @} */
95 //
96 // /** Project the RGB+D images into a 3D point cloud (with color if the target map supports it) and optionally at a given 3D pose.
97 // * The 3D point coordinates are computed from the depth image (\a rangeImage) and the depth camera camera parameters (\a cameraParams).
98 // * There exist two set of formulas for projecting the i'th point, depending on the value of "range_is_depth".
99 // * In all formulas below, "rangeImage" is the matrix of ranges and the pixel coordinates are (r,c).
100 // *
101 // * 1) [range_is_depth=true] With "range equals depth" or "Kinect-like depth mode": the range values
102 // * are in fact distances along the "+X" axis, not real 3D ranges (this is the way Kinect reports ranges):
103 // *
104 // * \code
105 // * x(i) = rangeImage(r,c)
106 // * y(i) = (r_cx - c) * x(i) / r_fx
107 // * z(i) = (r_cy - r) * x(i) / r_fy
108 // * \endcode
109 // *
110 // *
111 // * 2) [range_is_depth=false] With "normal ranges": range means distance in 3D. This must be set when
112 // * processing data from the SwissRange 3D camera, among others.
113 // *
114 // * \code
115 // * Ky = (r_cx - c)/r_fx
116 // * Kz = (r_cy - r)/r_fy
117 // *
118 // * x(i) = rangeImage(r,c) / sqrt( 1 + Ky^2 + Kz^2 )
119 // * y(i) = Ky * x(i)
120 // * z(i) = Kz * x(i)
121 // * \endcode
122 // *
123 // * The color of each point is determined by projecting the 3D local point into the RGB image using \a cameraParamsIntensity.
124 // *
125 // * By default the local coordinates of points are directly stored into the local map, but if indicated so in \a takeIntoAccountSensorPoseOnRobot
126 // * the points are transformed with \a sensorPose. Furthermore, if provided, those coordinates are transformed with \a robotPoseInTheWorld
127 // *
128 // * \param[in] PROJ3D_USE_LUT (Only when range_is_depth=true) Whether to use a Look-up-table (LUT) to speed up the conversion. It's thread safe in all situations <b>except</b> when you call this method from different threads <b>and</b> with different camera parameter matrices. In all other cases, it's a good idea to left it enabled.
129 // * \tparam POINTMAP Supported maps are all those covered by mrpt::utils::PointCloudAdapter (mrpt::slam::CPointsMap and derived, mrpt::opengl::CPointCloudColoured, PCL point clouds,...)
130 // *
131 // * \note In MRPT < 0.9.5, this method always assumes that ranges were in Kinect-like format.
132 // */
133 // template <class POINTMAP>
134 // inline void project3DPointsFromDepthImageInto(
135 // POINTMAP & dest_pointcloud,
136 // const bool takeIntoAccountSensorPoseOnRobot,
137 // const mrpt::poses::CPose3D *robotPoseInTheWorld=NULL,
138 // const bool PROJ3D_USE_LUT=true)
139 // {
140 // detail::project3DPointsFromDepthImageInto<POINTMAP>(*this,dest_pointcloud,takeIntoAccountSensorPoseOnRobot,robotPoseInTheWorld,PROJ3D_USE_LUT);
141 // }
142 //
143 // /** This method is equivalent to \c project3DPointsFromDepthImageInto() storing the projected 3D points (without color, in local coordinates) in this same class.
144 // * For new code it's recommended to use instead \c project3DPointsFromDepthImageInto() which is much more versatile.
145 // */
146 // inline void project3DPointsFromDepthImage(const bool PROJ3D_USE_LUT=true) {
147 // this->project3DPointsFromDepthImageInto(*this,false,NULL,PROJ3D_USE_LUT);
148 // }
149 //
150 //
151 // /** Convert this 3D observation into an "equivalent 2D fake laser scan", with a configurable vertical FOV.
152 // *
153 // * The result is a 2D laser scan with more "rays" (N) than columns has the 3D observation (W), exactly: N = W * oversampling_ratio.
154 // * This oversampling is required since laser scans sample the space at evenly-separated angles, while
155 // * a range camera follows a tangent-like distribution. By oversampling we make sure we don't leave "gaps" unseen by the virtual "2D laser".
156 // *
157 // * All obstacles within a frustum are considered and the minimum distance is kept in each direction.
158 // * The horizontal FOV of the frustum is automatically computed from the intrinsic parameters, but the
159 // * vertical FOV must be provided by the user, and can be set to be assymetric which may be useful
160 // * depending on the zone of interest where to look for obstacles.
161 // *
162 // * All spatial transformations are riguorosly taken into account in this class, using the depth camera
163 // * intrinsic calibration parameters.
164 // *
165 // * The timestamp of the new object is copied from the 3D object.
166 // * Obviously, a requisite for calling this method is the 3D observation having range data,
167 // * i.e. hasRangeImage must be true. It's not needed to have RGB data nor the raw 3D point clouds
168 // * for this method to work.
169 // *
170 // * \param[out] out_scan2d The resulting 2D equivalent scan.
171 // * \param[in] sensorLabel The sensor label that will have the newly created observation.
172 // * \param[in] angle_sup (Default=5deg) The upper half-FOV angle (in radians)
173 // * \param[in] angle_sup (Default=5deg) The lower half-FOV angle (in radians)
174 // * \param[in] oversampling_ratio (Default=1.2=120%) How many more laser scans rays to create (read above).
175 // *
176 // * \sa The example in http://www.mrpt.org/Example_Kinect_To_2D_laser_scan
177 // */
178 // void convertTo2DScan(
179 // mrpt::slam::CObservation2DRangeScan & out_scan2d,
180 // const std::string & sensorLabel,
181 // const double angle_sup = DEG2RAD(5),
182 // const double angle_inf = DEG2RAD(5),
183 // const double oversampling_ratio = 1.2 );
184 //
185 //
186 // bool hasPoints3D; //!< true means the field points3D contains valid data.
187 // std::vector<float> points3D_x; //!< If hasPoints3D=true, the X coordinates of the 3D point cloud detected by the camera. \sa resizePoints3DVectors
188 // std::vector<float> points3D_y; //!< If hasPoints3D=true, the Y coordinates of the 3D point cloud detected by the camera. \sa resizePoints3DVectors
189 // std::vector<float> points3D_z; //!< If hasPoints3D=true, the Z coordinates of the 3D point cloud detected by the camera. \sa resizePoints3DVectors
190 //
191 // /** Use this method instead of resizing all three \a points3D_x, \a points3D_y & \a points3D_z to allow the usage of the internal memory pool. */
192 // void resizePoints3DVectors(const size_t nPoints);
193 //
194 // // 3D points external storage functions ---------
195 // inline bool points3D_isExternallyStored() const { return m_points3D_external_stored; }
196 // inline std::string points3D_getExternalStorageFile() const { return m_points3D_external_file; }
197 // void points3D_getExternalStorageFileAbsolutePath(std::string &out_path) const;
198 // inline std::string points3D_getExternalStorageFileAbsolutePath() const {
199 // std::string tmp;
200 // points3D_getExternalStorageFileAbsolutePath(tmp);
201 // return tmp;
202 // }
203 // void points3D_convertToExternalStorage( const std::string &fileName, const std::string &use_this_base_dir ); //!< Users won't normally want to call this, it's only used from internal MRPT programs.
204 // // ---------
205 //
206 
207  static const int NUM_SENSORS = 2;
208 
209  mrpt::system::TTimeStamp timestamps[NUM_SENSORS];
210 
211  bool hasRangeImage; //!< true means the field rangeImage contains valid data
212  mrpt::math::CMatrix rangeImages[NUM_SENSORS]; //!< If hasRangeImage=true, a matrix of floats with the range data as captured by the camera (in meters) \sa range_is_depth
213 
214  void rangeImage_setSize(const int HEIGHT, const int WIDTH, const unsigned sensor_id); //!< Similar to calling "rangeImage.setSize(H,W)" but this method provides memory pooling to speed-up the memory allocation.
215 
216 // // Range Matrix external storage functions ---------
217 // inline bool rangeImage_isExternallyStored() const { return m_rangeImage_external_stored; }
218 // inline std::string rangeImage_getExternalStorageFile() const { return m_rangeImage_external_file; }
219 // void rangeImage_getExternalStorageFileAbsolutePath(std::string &out_path) const;
220 // inline std::string rangeImage_getExternalStorageFileAbsolutePath() const {
221 // std::string tmp;
222 // rangeImage_getExternalStorageFileAbsolutePath(tmp);
223 // return tmp;
224 // }
225 // void rangeImage_convertToExternalStorage( const std::string &fileName, const std::string &use_this_base_dir ); //!< Users won't normally want to call this, it's only used from internal MRPT programs.
226 // /** Forces marking this observation as non-externally stored - it doesn't anything else apart from reseting the corresponding flag (Users won't normally want to call this, it's only used from internal MRPT programs) */
227 // void rangeImage_forceResetExternalStorage() { m_rangeImage_external_stored=false; }
228 // // ---------
229 //
230 // /** Enum type for intensityImageChannel */
231 // enum TIntensityChannelID
232 // {
233 // CH_VISIBLE = 0, //!< Grayscale or RGB visible channel of the camera sensor.
234 // CH_IR = 1 //!< Infrarred (IR) channel
235 // };
236 //
237  bool hasIntensityImage; //!< true means the field intensityImage contains valid data
238  mrpt::utils::CImage intensityImages[NUM_SENSORS]; //!< If hasIntensityImage=true, a color or gray-level intensity image of the same size than "rangeImage"
239 //
240 // bool hasConfidenceImage; //!< true means the field confidenceImage contains valid data
241 // mrpt::utils::CImage confidenceImage; //!< If hasConfidenceImage=true, an image with the "confidence" value [range 0-255] as estimated by the capture drivers.
242 //
243  mrpt::utils::TCamera sensorParamss[NUM_SENSORS]; //!< Projection parameters of the 8 RGBD sensor.
244 
245 
246  float maxRange; //!< The maximum range allowed by the device, in meters (e.g. 8.0m, 5.0m,...)
247  CPose3D sensorPose; //!< The 6D pose of the sensor on the robot.
248  float stdError; //!< The "sigma" error of the device in meters, used while inserting the scan in an occupancy grid.
249 
250  /** A general method to retrieve the sensor pose on the robot.
251  * Note that most sensors will return a full (6D) CPose3D, but see the derived classes for more details or special cases.
252  * \sa setSensorPose
253  */
254  void getSensorPose( CPose3D &out_sensorPose ) const { out_sensorPose = sensorPose; }
255 
256  /** A general method to change the sensor pose on the robot.
257  * Note that most sensors will use the full (6D) CPose3D, but see the derived classes for more details or special cases.
258  * \sa getSensorPose
259  */
260  void setSensorPose( const CPose3D &newSensorPose ) { sensorPose = newSensorPose; }
261 //
262 // void swap(CObservationRGBD360 &o); //!< Very efficient method to swap the contents of two observations.
263 //
264 // void getZoneAsObs( CObservationRGBD360 &obs, const unsigned int &r1, const unsigned int &r2, const unsigned int &c1, const unsigned int &c2 );
265 //
266 // /** A Levenberg-Marquart-based optimizer to recover the calibration parameters of a 3D camera given a range (depth) image and the corresponding 3D point cloud.
267 // * \param camera_offset The offset (in meters) in the +X direction of the point cloud. It's 1cm for SwissRanger SR4000.
268 // * \return The final average reprojection error per pixel (typ <0.05 px)
269 // */
270 // static double recoverCameraCalibrationParameters(
271 // const CObservationRGBD360 &in_obs,
272 // mrpt::utils::TCamera &out_camParams,
273 // const double camera_offset = 0.01 );
274 //
275 // /** Look-up-table struct for project3DPointsFromDepthImageInto() */
276 // struct TCached3DProjTables
277 // {
278 // mrpt::vector_float Kzs,Kys;
279 // TCamera prev_camParams;
280 // };
281 // static TCached3DProjTables m_3dproj_lut; //!< 3D point cloud projection look-up-table \sa project3DPointsFromDepthImage
282 
283  }; // End of class def.
284  DEFINE_SERIALIZABLE_POST_CUSTOM_BASE_LINKAGE( CObservationRGBD360, CObservation, OBS_IMPEXP )
285 
286 
287  } // End of namespace
288 
289 // namespace utils
290 // {
291 // using namespace ::mrpt::slam;
292 // // Specialization must occur in the same namespace
293 // MRPT_DECLARE_TTYPENAME_PTR(CObservationRGBD360)
294 //
295 // // Enum <-> string converter:
296 // template <>
297 // struct TEnumTypeFiller<slam::CObservationRGBD360::TIntensityChannelID>
298 // {
299 // typedef slam::CObservationRGBD360::TIntensityChannelID enum_t;
300 // static void fill(bimap<enum_t,std::string> &m_map)
301 // {
302 // m_map.insert(slam::CObservationRGBD360::CH_VISIBLE, "CH_VISIBLE");
303 // m_map.insert(slam::CObservationRGBD360::CH_IR, "CH_IR");
304 // }
305 // };
306 // }
307 
308 // namespace utils
309 // {
310 // using mrpt::slam::CObservationRGBD360;
311 //
312 // /** Specialization mrpt::utils::PointCloudAdapter<CObservationRGBD360> \ingroup mrpt_adapters_grp */
313 // template <>
314 // class PointCloudAdapter<CObservationRGBD360> : public detail::PointCloudAdapterHelperNoRGB<CObservationRGBD360,float>
315 // {
316 // private:
317 // CObservationRGBD360 &m_obj;
318 // public:
319 // typedef float coords_t; //!< The type of each point XYZ coordinates
320 // static const int HAS_RGB = 0; //!< Has any color RGB info?
321 // static const int HAS_RGBf = 0; //!< Has native RGB info (as floats)?
322 // static const int HAS_RGBu8 = 0; //!< Has native RGB info (as uint8_t)?
323 //
324 // /** Constructor (accept a const ref for convenience) */
325 // inline PointCloudAdapter(const CObservationRGBD360 &obj) : m_obj(*const_cast<CObservationRGBD360*>(&obj)) { }
326 // /** Get number of points */
327 // inline size_t size() const { return m_obj.points3D_x.size(); }
328 // /** Set number of points (to uninitialized values) */
329 // inline void resize(const size_t N) {
330 // if (N) m_obj.hasPoints3D = true;
331 // m_obj.resizePoints3DVectors(N);
332 // }
333 //
334 // /** Get XYZ coordinates of i'th point */
335 // template <typename T>
336 // inline void getPointXYZ(const size_t idx, T &x,T &y, T &z) const {
337 // x=m_obj.points3D_x[idx];
338 // y=m_obj.points3D_y[idx];
339 // z=m_obj.points3D_z[idx];
340 // }
341 // /** Set XYZ coordinates of i'th point */
342 // inline void setPointXYZ(const size_t idx, const coords_t x,const coords_t y, const coords_t z) {
343 // m_obj.points3D_x[idx]=x;
344 // m_obj.points3D_y[idx]=y;
345 // m_obj.points3D_z[idx]=z;
346 // }
347 // }; // end of PointCloudAdapter<CObservationRGBD360>
348 // }
349 } // End of namespace
350 
351 //#include "CObservationRGBD360_project3D_impl.h"
352 
353 #endif
uint64_t TTimeStamp
A system independent time type, it holds the the number of 100-nanosecond intervals since January 1...
Definition: datetime.h:30
A class for storing images as grayscale or RGB bitmaps.
Definition: CImage.h:97
void setSensorPose(const CPose3D &newSensorPose)
A general method to change the sensor pose on the robot.
STL namespace.
#define DEFINE_SERIALIZABLE_PRE_CUSTOM_BASE_LINKAGE(class_name, base_name, _LINKAGE_)
This declaration must be inserted in all CSerializable classes definition, before the class declarati...
Declares a class that represents any robot's observation.
Definition: CObservation.h:52
This is the global namespace for all Mobile Robot Programming Toolkit (MRPT) libraries.
#define DEFINE_SERIALIZABLE(class_name)
This declaration must be inserted in all CSerializable classes definition, within the class declarati...
A class used to store a 3D pose (a 3D translation + a rotation in 3D).
Definition: CPose3D.h:69
This class is a "CSerializable" wrapper for "CMatrixFloat".
Definition: CMatrix.h:30
#define DEFINE_SERIALIZABLE_POST_CUSTOM_BASE_LINKAGE(class_name, base_name, _LINKAGE_)
Declares a class derived from "CObservation" that encapsules an omnidirectional RGBD measurement from...
Structure to hold the parameters of a pinhole camera model.
Definition: TCamera.h:31



Page generated by Doxygen 1.8.8 for MRPT 1.2.2 SVN:Unversioned directory at Tue Oct 14 02:14:08 UTC 2014