Main MRPT website > C++ reference for MRPT 1.4.0
CObservation3DRangeScan_project3D_impl.h
Go to the documentation of this file.
1/* +---------------------------------------------------------------------------+
2 | Mobile Robot Programming Toolkit (MRPT) |
3 | http://www.mrpt.org/ |
4 | |
5 | Copyright (c) 2005-2016, Individual contributors, see AUTHORS file |
6 | See: http://www.mrpt.org/Authors - All rights reserved. |
7 | Released under BSD License. See details in http://www.mrpt.org/License |
8 +---------------------------------------------------------------------------+ */
9#ifndef CObservation3DRangeScan_project3D_impl_H
10#define CObservation3DRangeScan_project3D_impl_H
11
12#include <mrpt/utils/round.h> // round()
13
14namespace mrpt {
15namespace obs {
16namespace detail {
17 // Auxiliary functions which implement SSE-optimized proyection of 3D point cloud:
18 template <class POINTMAP> void do_project_3d_pointcloud(const int H,const int W,const float *kys,const float *kzs,const mrpt::math::CMatrix &rangeImage, mrpt::utils::PointCloudAdapter<POINTMAP> &pca, std::vector<uint16_t> &idxs_x, std::vector<uint16_t> &idxs_y);
19 template <class POINTMAP> void do_project_3d_pointcloud_SSE2(const int H,const int W,const float *kys,const float *kzs,const mrpt::math::CMatrix &rangeImage, mrpt::utils::PointCloudAdapter<POINTMAP> &pca, std::vector<uint16_t> &idxs_x, std::vector<uint16_t> &idxs_y);
20
21 template <class POINTMAP>
24 POINTMAP & dest_pointcloud,
25 const bool takeIntoAccountSensorPoseOnRobot,
26 const mrpt::poses::CPose3D * robotPoseInTheWorld,
27 const bool PROJ3D_USE_LUT)
28 {
29 using namespace mrpt::math;
30
31 if (!src_obs.hasRangeImage) return;
32
34
35 // ------------------------------------------------------------
36 // Stage 1/3: Create 3D point cloud local coordinates
37 // ------------------------------------------------------------
38 const int W = src_obs.rangeImage.cols();
39 const int H = src_obs.rangeImage.rows();
40 const size_t WH = W*H;
41
42 pca.resize(WH); // Reserve memory for 3D points. It will be later resized again to the actual number of valid points
43
44 if (src_obs.range_is_depth)
45 {
46 // range_is_depth = true
47
48 // Use cached tables?
49 if (PROJ3D_USE_LUT)
50 {
51 // Use LUT:
52 if (src_obs.m_3dproj_lut.prev_camParams!=src_obs.cameraParams || WH!=size_t(src_obs.m_3dproj_lut.Kys.size()))
53 {
55 src_obs.m_3dproj_lut.Kys.resize(WH);
56 src_obs.m_3dproj_lut.Kzs.resize(WH);
57
58 const float r_cx = src_obs.cameraParams.cx();
59 const float r_cy = src_obs.cameraParams.cy();
60 const float r_fx_inv = 1.0f/src_obs.cameraParams.fx();
61 const float r_fy_inv = 1.0f/src_obs.cameraParams.fy();
62
63 float *kys = &src_obs.m_3dproj_lut.Kys[0];
64 float *kzs = &src_obs.m_3dproj_lut.Kzs[0];
65 for (int r=0;r<H;r++)
66 for (int c=0;c<W;c++)
67 {
68 *kys++ = (r_cx - c) * r_fx_inv;
69 *kzs++ = (r_cy - r) * r_fy_inv;
70 }
71 } // end update LUT.
72
73 ASSERT_EQUAL_(WH,size_t(src_obs.m_3dproj_lut.Kys.size()))
74 ASSERT_EQUAL_(WH,size_t(src_obs.m_3dproj_lut.Kzs.size()))
75 float *kys = &src_obs.m_3dproj_lut.Kys[0];
76 float *kzs = &src_obs.m_3dproj_lut.Kzs[0];
77
78 #if MRPT_HAS_SSE2
79 if ((W & 0x07)==0)
80 do_project_3d_pointcloud_SSE2(H,W,kys,kzs,src_obs.rangeImage,pca, src_obs.points3D_idxs_x, src_obs.points3D_idxs_y );
81 else do_project_3d_pointcloud(H,W,kys,kzs,src_obs.rangeImage,pca, src_obs.points3D_idxs_x, src_obs.points3D_idxs_y ); // if image width is not 8*N, use standard method
82 #else
83 do_project_3d_pointcloud(H,W,kys,kzs,src_obs.rangeImage,pca,src_obs.points3D_idxs_x, src_obs.points3D_idxs_y);
84 #endif
85 }
86 else
87 {
88 // Without LUT:
89 const float r_cx = src_obs.cameraParams.cx();
90 const float r_cy = src_obs.cameraParams.cy();
91 const float r_fx_inv = 1.0f/src_obs.cameraParams.fx();
92 const float r_fy_inv = 1.0f/src_obs.cameraParams.fy();
93 size_t idx=0;
94 for (int r=0;r<H;r++)
95 for (int c=0;c<W;c++)
96 {
97 const float D = src_obs.rangeImage.coeff(r,c);
98 if (D!=.0f) {
99 const float Kz = (r_cy - r) * r_fy_inv;
100 const float Ky = (r_cx - c) * r_fx_inv;
101 pca.setPointXYZ(idx,
102 D, // x
103 Ky * D, // y
104 Kz * D // z
105 );
106 src_obs.points3D_idxs_x[idx]=c;
107 src_obs.points3D_idxs_y[idx]=r;
108 ++idx;
109 }
110 }
111 pca.resize(idx); // Actual number of valid pts
112 }
113 }
114 else
115 {
116 /* range_is_depth = false :
117 * Ky = (r_cx - c)/r_fx
118 * Kz = (r_cy - r)/r_fy
119 *
120 * x(i) = rangeImage(r,c) / sqrt( 1 + Ky^2 + Kz^2 )
121 * y(i) = Ky * x(i)
122 * z(i) = Kz * x(i)
123 */
124 const float r_cx = src_obs.cameraParams.cx();
125 const float r_cy = src_obs.cameraParams.cy();
126 const float r_fx_inv = 1.0f/src_obs.cameraParams.fx();
127 const float r_fy_inv = 1.0f/src_obs.cameraParams.fy();
128 size_t idx=0;
129 for (int r=0;r<H;r++)
130 for (int c=0;c<W;c++)
131 {
132 const float D = src_obs.rangeImage.coeff(r,c);
133 if (D!=.0f) {
134 const float Ky = (r_cx - c) * r_fx_inv;
135 const float Kz = (r_cy - r) * r_fy_inv;
136 pca.setPointXYZ(idx,
137 D / std::sqrt(1+Ky*Ky+Kz*Kz), // x
138 Ky * D, // y
139 Kz * D // z
140 );
141 src_obs.points3D_idxs_x[idx]=c;
142 src_obs.points3D_idxs_y[idx]=r;
143 ++idx;
144 }
145 }
146 pca.resize(idx); // Actual number of valid pts
147 }
148
149 // -------------------------------------------------------------
150 // Stage 2/3: Project local points into RGB image to get colors
151 // -------------------------------------------------------------
152 if (src_obs.hasIntensityImage)
153 {
154 const int imgW = src_obs.intensityImage.getWidth();
155 const int imgH = src_obs.intensityImage.getHeight();
156 const bool hasColorIntensityImg = src_obs.intensityImage.isColor();
157
158 const float cx = src_obs.cameraParamsIntensity.cx();
159 const float cy = src_obs.cameraParamsIntensity.cy();
160 const float fx = src_obs.cameraParamsIntensity.fx();
161 const float fy = src_obs.cameraParamsIntensity.fy();
162
163 // Unless we are in a special case (both depth & RGB images coincide)...
164 const bool isDirectCorresp = src_obs.doDepthAndIntensityCamerasCoincide();
165
166 // ...precompute the inverse of the pose transformation out of the loop,
167 // store as a 4x4 homogeneous matrix to exploit SSE optimizations below:
169 if (!isDirectCorresp)
170 {
175 R_inv,t_inv);
176
177 T_inv(3,3)=1;
178 T_inv.block<3,3>(0,0)=R_inv.cast<float>();
179 T_inv.block<3,1>(0,3)=t_inv.cast<float>();
180 }
181
182 Eigen::Matrix<float,4,1> pt_wrt_color, pt_wrt_depth;
183 pt_wrt_depth[3]=1;
184
186
187 // For each local point:
188 const size_t nPts = pca.size();
189 for (size_t i=0;i<nPts;i++)
190 {
191 int img_idx_x, img_idx_y; // projected pixel coordinates, in the RGB image plane
192 bool pointWithinImage = false;
193 if (isDirectCorresp)
194 {
195 pointWithinImage=true;
196 img_idx_x = src_obs.points3D_idxs_x[i];
197 img_idx_y = src_obs.points3D_idxs_y[i];
198 }
199 else
200 {
201 // Project point, which is now in "pca" in local coordinates wrt the depth camera, into the intensity camera:
202 pca.getPointXYZ(i,pt_wrt_depth[0],pt_wrt_depth[1],pt_wrt_depth[2]);
203 pt_wrt_color.noalias() = T_inv*pt_wrt_depth;
204
205 // Project to image plane:
206 if (pt_wrt_color[2]) {
207 img_idx_x = mrpt::utils::round( cx + fx * pt_wrt_color[0]/pt_wrt_color[2] );
208 img_idx_y = mrpt::utils::round( cy + fy * pt_wrt_color[1]/pt_wrt_color[2] );
209 pointWithinImage=
210 img_idx_x>=0 && img_idx_x<imgW &&
211 img_idx_y>=0 && img_idx_y<imgH;
212 }
213 }
214
215 if (pointWithinImage)
216 {
217 if (hasColorIntensityImg) {
218 const uint8_t *c= src_obs.intensityImage.get_unsafe(img_idx_x, img_idx_y, 0);
219 pCol.R = c[2];
220 pCol.G = c[1];
221 pCol.B = c[0];
222 }
223 else{
224 uint8_t c= *src_obs.intensityImage.get_unsafe(img_idx_x, img_idx_y, 0);
225 pCol.R = pCol.G = pCol.B = c;
226 }
227 }
228 else
229 {
230 pCol.R = pCol.G = pCol.B = 255;
231 }
232 // Set color:
233 pca.setPointRGBu8(i,pCol.R,pCol.G,pCol.B);
234 } // end for each point
235 } // end if src_obs has intensity image
236
237 // ...
238
239 // ------------------------------------------------------------
240 // Stage 3/3: Apply 6D transformations
241 // ------------------------------------------------------------
242 if (takeIntoAccountSensorPoseOnRobot || robotPoseInTheWorld)
243 {
244 mrpt::poses::CPose3D transf_to_apply; // Either ROBOTPOSE or ROBOTPOSE(+)SENSORPOSE or SENSORPOSE
245 if (takeIntoAccountSensorPoseOnRobot)
246 transf_to_apply = src_obs.sensorPose;
247 if (robotPoseInTheWorld)
248 transf_to_apply.composeFrom(*robotPoseInTheWorld, mrpt::poses::CPose3D(transf_to_apply));
249
250 const mrpt::math::CMatrixFixedNumeric<float,4,4> HM = transf_to_apply.getHomogeneousMatrixVal().cast<float>();
251 Eigen::Matrix<float,4,1> pt, pt_transf;
252 pt[3]=1;
253
254 const size_t nPts = pca.size();
255 for (size_t i=0;i<nPts;i++)
256 {
257 pca.getPointXYZ(i,pt[0],pt[1],pt[2]);
258 pt_transf.noalias() = HM*pt;
259 pca.setPointXYZ(i,pt_transf[0],pt_transf[1],pt_transf[2]);
260 }
261 }
262 } // end of project3DPointsFromDepthImageInto
263
264 // Auxiliary functions which implement proyection of 3D point clouds:
265 template <class POINTMAP>
266 inline void do_project_3d_pointcloud(const int H,const int W,const float *kys,const float *kzs,const mrpt::math::CMatrix &rangeImage, mrpt::utils::PointCloudAdapter<POINTMAP> &pca, std::vector<uint16_t> &idxs_x, std::vector<uint16_t> &idxs_y)
267 {
268 size_t idx=0;
269 for (int r=0;r<H;r++)
270 for (int c=0;c<W;c++)
271 {
272 const float D = rangeImage.coeff(r,c);
273 if (D!=.0f) {
274 pca.setPointXYZ(idx, D /*x*/, *kys++ * D /*y*/, *kzs++ * D /*z*/);
275 idxs_x[idx]=c;
276 idxs_y[idx]=r;
277 ++idx;
278 }
279 }
280 pca.resize(idx);
281 }
282
283 // Auxiliary functions which implement proyection of 3D point clouds:
284 template <class POINTMAP>
285 inline void do_project_3d_pointcloud_SSE2(const int H,const int W,const float *kys,const float *kzs,const mrpt::math::CMatrix &rangeImage, mrpt::utils::PointCloudAdapter<POINTMAP> &pca, std::vector<uint16_t> &idxs_x, std::vector<uint16_t> &idxs_y)
286 {
287 #if MRPT_HAS_SSE2
288 // Use optimized version:
289 const int W_4 = W >> 2; // /=4 , since we process 4 values at a time.
290 size_t idx=0;
291 MRPT_ALIGN16 float xs[4],ys[4],zs[4];
292 for (int r=0;r<H;r++)
293 {
294 const float *D_ptr = &rangeImage.coeffRef(r,0); // Matrices are 16-aligned
295
296 for (int c=0;c<W_4;c++)
297 {
298 const __m128 D = _mm_load_ps(D_ptr);
299
300 const __m128 KY = _mm_load_ps(kys);
301 const __m128 KZ = _mm_load_ps(kzs);
302
303 _mm_storeu_ps(xs , D);
304 _mm_storeu_ps(ys , _mm_mul_ps(KY,D));
305 _mm_storeu_ps(zs , _mm_mul_ps(KZ,D));
306
307 for (int q=0;q<4;q++)
308 if (xs[q]!=.0f) {
309 pca.setPointXYZ(idx,xs[q],ys[q],zs[q]);
310 idxs_x[idx]=(c<<2)+q;
311 idxs_y[idx]=r;
312 ++idx;
313 }
314 D_ptr+=4;
315 kys+=4;
316 kzs+=4;
317 }
318 }
319 pca.resize(idx);
320 #endif
321 }
322
323} // End of namespace
324} // End of namespace
325} // End of namespace
326#endif
A numeric matrix of compile-time fixed size.
This class is a "CSerializable" wrapper for "CMatrixFloat".
Definition: CMatrix.h:31
Declares a class derived from "CObservation" that encapsules a 3D range scan measurement,...
mrpt::utils::TCamera cameraParamsIntensity
Projection parameters of the intensity (graylevel or RGB) camera.
mrpt::utils::TCamera cameraParams
Projection parameters of the depth camera.
bool hasIntensityImage
true means the field intensityImage contains valid data
mrpt::poses::CPose3D relativePoseIntensityWRTDepth
Relative pose of the intensity camera wrt the depth camera (which is the coordinates origin for this ...
bool doDepthAndIntensityCamerasCoincide() const
Return true if relativePoseIntensityWRTDepth equals the pure rotation (0,0,0,-90deg,...
std::vector< uint16_t > points3D_idxs_y
//!< If hasPoints3D=true, the (x,y) pixel coordinates for each (X,Y,Z) point in points3D_x,...
bool hasRangeImage
true means the field rangeImage contains valid data
mrpt::math::CMatrix rangeImage
If hasRangeImage=true, a matrix of floats with the range data as captured by the camera (in meters)
static TCached3DProjTables m_3dproj_lut
3D point cloud projection look-up-table
mrpt::utils::CImage intensityImage
If hasIntensityImage=true, a color or gray-level intensity image of the same size than "rangeImage".
mrpt::poses::CPose3D sensorPose
The 6D pose of the sensor on the robot.
bool range_is_depth
true: Kinect-like ranges: entries of rangeImage are distances along the +X axis; false: Ranges in ran...
A class used to store a 3D pose (a 3D translation + a rotation in 3D).
Definition: CPose3D.h:73
void getRotationMatrix(mrpt::math::CMatrixDouble33 &ROT) const
Get the 3x3 rotation matrix.
Definition: CPose3D.h:176
mrpt::math::CMatrixDouble44 getHomogeneousMatrixVal() const
Definition: CPose3D.h:173
mrpt::math::CArrayDouble< 3 > m_coords
The translation vector [x,y,z] access directly or with x(), y(), z() setter/getter methods.
Definition: CPose3D.h:81
void composeFrom(const CPose3D &A, const CPose3D &B)
Makes "this = A (+) B"; this method is slightly more efficient than "this= A + B;" since it avoids th...
size_t getWidth() const MRPT_OVERRIDE
Returns the width of the image in pixels.
bool isColor() const
Returns true if the image is RGB, false if it is grayscale.
unsigned char * get_unsafe(unsigned int col, unsigned int row, unsigned int channel=0) const
Access to pixels without checking boundaries - Use normally the () operator better,...
size_t getHeight() const MRPT_OVERRIDE
Returns the height of the image in pixels.
An adapter to different kinds of point cloud object.
Definition: adapters.h:38
double cx() const
Get the value of the principal point x-coordinate (in pixels).
Definition: TCamera.h:154
double fx() const
Get the value of the focal length x-value (in pixels).
Definition: TCamera.h:158
double cy() const
Get the value of the principal point y-coordinate (in pixels).
Definition: TCamera.h:156
double fy() const
Get the value of the focal length y-value (in pixels).
Definition: TCamera.h:160
int round(const T value)
Returns the closer integer (int) to x.
Definition: round.h:26
#define ASSERT_EQUAL_(__A, __B)
Definition: mrpt_macros.h:264
#define MRPT_ALIGN16
Definition: mrpt_macros.h:92
This base provides a set of functions for maths stuff.
Definition: CArray.h:19
void homogeneousMatrixInverse(const MATRIXLIKE1 &M, MATRIXLIKE2 &out_inverse_M)
Efficiently compute the inverse of a 4x4 homogeneous matrix by only transposing the rotation 3x3 part...
void do_project_3d_pointcloud(const int H, const int W, const float *kys, const float *kzs, const mrpt::math::CMatrix &rangeImage, mrpt::utils::PointCloudAdapter< POINTMAP > &pca, std::vector< uint16_t > &idxs_x, std::vector< uint16_t > &idxs_y)
void do_project_3d_pointcloud_SSE2(const int H, const int W, const float *kys, const float *kzs, const mrpt::math::CMatrix &rangeImage, mrpt::utils::PointCloudAdapter< POINTMAP > &pca, std::vector< uint16_t > &idxs_x, std::vector< uint16_t > &idxs_y)
void project3DPointsFromDepthImageInto(CObservation3DRangeScan &src_obs, POINTMAP &dest_pointcloud, const bool takeIntoAccountSensorPoseOnRobot, const mrpt::poses::CPose3D *robotPoseInTheWorld, const bool PROJ3D_USE_LUT)
This is the global namespace for all Mobile Robot Programming Toolkit (MRPT) libraries.
unsigned char uint8_t
Definition: pstdint.h:143
A RGB color - 8bit.
Definition: TColor.h:26



Page generated by Doxygen 1.9.6 for MRPT 1.4.0 SVN: at Wed Mar 22 06:08:57 UTC 2023