18
18
19
19
package boofcv .visualize ;
20
20
21
+ import boofcv .alg .interpolate .InterpolatePixelMB ;
21
22
import boofcv .alg .misc .ImageMiscOps ;
23
+ import boofcv .factory .interpolate .FactoryInterpolation ;
22
24
import boofcv .misc .BoofMiscOps ;
25
+ import boofcv .struct .border .BorderType ;
23
26
import boofcv .struct .calib .CameraPinhole ;
24
27
import boofcv .struct .image .GrayF32 ;
25
28
import boofcv .struct .image .InterleavedU8 ;
26
29
import boofcv .struct .mesh .VertexMesh ;
27
30
import georegression .geometry .UtilPolygons2D_F64 ;
28
31
import georegression .metric .Intersection2D_F64 ;
32
+ import georegression .struct .point .Point2D_F32 ;
29
33
import georegression .struct .point .Point2D_F64 ;
30
34
import georegression .struct .point .Point3D_F64 ;
31
35
import georegression .struct .se .Se3_F64 ;
36
+ import georegression .struct .shapes .Polygon2D_F32 ;
32
37
import georegression .struct .shapes .Polygon2D_F64 ;
33
38
import georegression .struct .shapes .Rectangle2D_I32 ;
34
39
import lombok .Getter ;
35
40
import lombok .Setter ;
41
+ import org .ddogleg .struct .DogArray ;
42
+ import org .ddogleg .struct .FastAccess ;
36
43
import org .ddogleg .struct .VerbosePrint ;
37
44
import org .jetbrains .annotations .Nullable ;
38
45
@@ -71,14 +78,37 @@ public class RenderMesh implements VerbosePrint {
71
78
/** Transform from world (what the mesh is in) to the camera view */
72
79
public @ Getter final Se3_F64 worldToView = new Se3_F64 ();
73
80
74
- // Workspace variables
81
+ /** If true then a polygon will only be rendered if the surface normal is pointed towards the camera */
82
+ public @ Getter @ Setter boolean checkSurfaceNormal = true ;
83
+
84
+ // Image for texture mapping
85
+ private InterleavedU8 textureImage = new InterleavedU8 (1 , 1 , 3 );
86
+ private InterpolatePixelMB <InterleavedU8 > textureInterp = FactoryInterpolation .bilinearPixelMB (textureImage , BorderType .EXTENDED );
87
+ private float [] textureValues = new float [3 ];
88
+
89
+ //---------- Workspace variables
75
90
private final Point3D_F64 camera = new Point3D_F64 ();
76
91
private final Point2D_F64 point = new Point2D_F64 ();
77
- private final Polygon2D_F64 polygon = new Polygon2D_F64 ();
92
+
93
+ // mesh in camera reference frame
94
+ private final DogArray <Point3D_F64 > meshCam = new DogArray <>(Point3D_F64 ::new );
95
+ // Mesh projected onto the image
96
+ private final Polygon2D_F64 polygonProj = new Polygon2D_F64 ();
97
+ // Vertex of polygon in the texture image
98
+ private final Polygon2D_F32 polygonTex = new Polygon2D_F32 ();
99
+ // Axis aligned bonding box
78
100
final Rectangle2D_I32 aabb = new Rectangle2D_I32 ();
101
+ // Workspace for a sub-triangle in the polygon
102
+ private final Polygon2D_F64 workTri = new Polygon2D_F64 (3 );
79
103
80
104
@ Nullable PrintStream verbose = null ;
81
105
106
+ public void setTextureImage ( InterleavedU8 textureImage ) {
107
+ this .textureImage = textureImage ;
108
+ textureInterp .setImage (textureImage );
109
+ textureValues = new float [textureImage .numBands ];
110
+ }
111
+
82
112
/**
83
113
* Renders the mesh onto an image. Produces an RGB image and depth image. Must have configured
84
114
* {@link #intrinsics} already and set {@link #worldToView}.
@@ -89,6 +119,10 @@ public void render( VertexMesh mesh ) {
89
119
// Sanity check to see if intrinsics has been configured
90
120
BoofMiscOps .checkTrue (intrinsics .width > 0 && intrinsics .height > 0 , "Intrinsics not set" );
91
121
122
+ // Make sure there are normals if it's configured to use them
123
+ if (checkSurfaceNormal && mesh .normals .size () == 0 )
124
+ mesh .computeNormals ();
125
+
92
126
// Initialize output images
93
127
initializeImages ();
94
128
@@ -99,8 +133,12 @@ public void render( VertexMesh mesh ) {
99
133
final double cx = intrinsics .cx ;
100
134
final double cy = intrinsics .cy ;
101
135
136
+ // Keep track of how many meshes were rendered
102
137
int shapesRenderedCount = 0 ;
103
138
139
+ var worldCamera = new Point3D_F64 ();
140
+ worldToView .transformReverse (worldCamera , worldCamera );
141
+
104
142
for (int shapeIdx = 1 ; shapeIdx < mesh .offsets .size ; shapeIdx ++) {
105
143
// First and last point in the polygon
106
144
final int idx0 = mesh .offsets .get (shapeIdx - 1 );
@@ -111,7 +149,18 @@ public void render( VertexMesh mesh ) {
111
149
continue ;
112
150
113
151
// Project points on the shape onto the image and store in polygon
114
- polygon .vertexes .reset ().reserve (idx1 - idx0 );
152
+ polygonProj .vertexes .reset ().reserve (idx1 - idx0 );
153
+ meshCam .reset ().reserve (idx1 - idx0 );
154
+
155
+ if (mesh .texture .size () > 0 ) {
156
+ mesh .getTexture (shapeIdx - 1 , polygonTex .vertexes );
157
+ }
158
+
159
+ // Prune using normal vector
160
+ if (mesh .normals .size () > 0 && checkSurfaceNormal ) {
161
+ if (!isFrontVisible (mesh , shapeIdx , idx0 , worldCamera )) continue ;
162
+ }
163
+
115
164
boolean behindCamera = false ;
116
165
for (int i = idx0 ; i < idx1 ; i ++) {
117
166
Point3D_F64 world = mesh .vertexes .getTemp (mesh .indexes .get (i ));
@@ -132,22 +181,46 @@ public void render( VertexMesh mesh ) {
132
181
double pixelX = normX *fx + cx ;
133
182
double pixelY = normY *fy + cy ;
134
183
135
- polygon .vertexes .grow ().setTo (pixelX , pixelY );
184
+ polygonProj .vertexes .grow ().setTo (pixelX , pixelY );
185
+ meshCam .grow ().setTo (camera );
136
186
}
137
187
138
188
// Skip if not visible
139
189
if (behindCamera )
140
190
continue ;
141
191
142
- // Compute the pixels which might be able to see polygon
143
- computeBoundingBox (width , height , polygon , aabb );
144
-
145
- projectSurfaceOntoImage (mesh , polygon , shapeIdx -1 );
192
+ if (mesh .texture .size () == 0 ) {
193
+ projectSurfaceColor (mesh , polygonProj , shapeIdx - 1 );
194
+ } else {
195
+ projectSurfaceTexture (meshCam , polygonProj , polygonTex );
196
+ }
146
197
147
198
shapesRenderedCount ++;
148
199
}
149
200
150
- if (verbose != null ) verbose .println ("total shapes rendered: " + shapesRenderedCount );
201
+ if (verbose != null ) verbose .println ("total shapes rendered: " + shapesRenderedCount );
202
+ }
203
+
204
+ /**
205
+ * Use the normal vector to see if the front of the mesh is visible. If it's not visible we can skip it
206
+ *
207
+ * @return true if visible
208
+ */
209
+ private static boolean isFrontVisible ( VertexMesh mesh , int shapeIdx , int idx0 , Point3D_F64 worldCamera ) {
210
+ // Get normal in world coordinates
211
+ Point3D_F64 normal = mesh .normals .getTemp (shapeIdx - 1 );
212
+
213
+ // vector from the camera to a vertex
214
+ Point3D_F64 v1 = mesh .vertexes .getTemp (mesh .indexes .get (idx0 ));
215
+ v1 .x -= worldCamera .x ;
216
+ v1 .y -= worldCamera .y ;
217
+ v1 .z -= worldCamera .z ;
218
+
219
+ // compute the dot product
220
+ double dot = v1 .x *normal .x + v1 .y *normal .y + v1 .z *normal .z ;
221
+
222
+ // Don't render if we are viewing it from behind
223
+ return dot < 0.0 ;
151
224
}
152
225
153
226
void initializeImages () {
@@ -180,7 +253,7 @@ static void computeBoundingBox( int width, int height, Polygon2D_F64 polygon, Re
180
253
* is searched exhaustively. If the projected 2D polygon contains a pixels and the polygon is closer than
181
254
* the current depth of the pixel it is rendered there and the depth image is updated.
182
255
*/
183
- void projectSurfaceOntoImage ( VertexMesh mesh , Polygon2D_F64 polygon , int shapeIdx ) {
256
+ void projectSurfaceColor ( VertexMesh mesh , Polygon2D_F64 polyProj , int shapeIdx ) {
184
257
// TODO temp hack. Best way is to find the distance to the 3D polygon at this point. Instead we will
185
258
// use the depth of the first point.
186
259
//
@@ -190,6 +263,7 @@ void projectSurfaceOntoImage( VertexMesh mesh, Polygon2D_F64 polygon, int shapeI
190
263
Point3D_F64 world = mesh .vertexes .getTemp (vertexIndex );
191
264
worldToView .transform (world , camera );
192
265
266
+ // TODO compute the depth at each pixel
193
267
float depth = (float )camera .z ;
194
268
195
269
// TODO look at vertexes and get min/max depth. Use that to quickly reject pixels based on depth without
@@ -198,6 +272,8 @@ void projectSurfaceOntoImage( VertexMesh mesh, Polygon2D_F64 polygon, int shapeI
198
272
// The entire surface will have one color
199
273
int color = surfaceColor .surfaceRgb (shapeIdx );
200
274
275
+ computeBoundingBox (intrinsics .width , intrinsics .height , polygonProj , aabb );
276
+
201
277
// Go through all pixels and see if the points are inside the polygon. If so
202
278
for (int pixelY = aabb .y0 ; pixelY < aabb .y1 ; pixelY ++) {
203
279
for (int pixelX = aabb .x0 ; pixelX < aabb .x1 ; pixelX ++) {
@@ -208,7 +284,7 @@ void projectSurfaceOntoImage( VertexMesh mesh, Polygon2D_F64 polygon, int shapeI
208
284
}
209
285
210
286
point .setTo (pixelX , pixelY );
211
- if (!Intersection2D_F64 .containsConvex (polygon , point ))
287
+ if (!Intersection2D_F64 .containsConvex (polyProj , point ))
212
288
continue ;
213
289
214
290
// Update depth and image
@@ -219,6 +295,113 @@ void projectSurfaceOntoImage( VertexMesh mesh, Polygon2D_F64 polygon, int shapeI
219
295
}
220
296
}
221
297
298
+ /**
299
+ * Projection with texture mapping. Breaks the polygon up into triangles and uses Barycentric coordinates to
300
+ * map pixels to textured mapped coordinates.
301
+ *
302
+ * @param mesh 3D location of vertexes in the mesh
303
+ * @param polyProj Projected pixels of mesh
304
+ * @param polyText Texture coordinates of the mesh
305
+ */
306
+ void projectSurfaceTexture ( FastAccess <Point3D_F64 > mesh , Polygon2D_F64 polyProj , Polygon2D_F32 polyText ) {
307
+ // If the mesh has more than 3 sides, break it up into triangles using the first vertex as a pivot
308
+ // This works because the mesh has to be convex
309
+ for (int vertC = 2 ; vertC < polyProj .size (); vertC ++) {
310
+ int vertA = 0 ;
311
+ int vertB = vertC - 1 ;
312
+
313
+ float Z0 = (float )mesh .get (vertA ).z ;
314
+ float Z1 = (float )mesh .get (vertB ).z ;
315
+ float Z2 = (float )mesh .get (vertC ).z ;
316
+
317
+ Point2D_F64 r0 = polyProj .get (vertA );
318
+ Point2D_F64 r1 = polyProj .get (vertB );
319
+ Point2D_F64 r2 = polyProj .get (vertC );
320
+
321
+ // Pre-compute part of Barycentric Coordinates
322
+ double x0 = r2 .x - r0 .x ;
323
+ double y0 = r2 .y - r0 .y ;
324
+ double x1 = r1 .x - r0 .x ;
325
+ double y1 = r1 .y - r0 .y ;
326
+
327
+ double d00 = x0 *x0 + y0 *y0 ;
328
+ double d01 = x0 *x1 + y0 *y1 ;
329
+ double d11 = x1 *x1 + y1 *y1 ;
330
+
331
+ double denom = d00 *d11 - d01 *d01 ;
332
+
333
+ // Compute coordinate on texture image
334
+ Point2D_F32 t0 = polyText .get (vertC );
335
+ Point2D_F32 t1 = polyText .get (vertB );
336
+ Point2D_F32 t2 = polyText .get (vertA );
337
+
338
+ // Do the polygon intersection with the triangle in question only
339
+ workTri .get (0 ).setTo (polyProj .get (vertA ));
340
+ workTri .get (1 ).setTo (polyProj .get (vertB ));
341
+ workTri .get (2 ).setTo (polyProj .get (vertC ));
342
+
343
+ // TODO look at vertexes and get min/max depth. Use that to quickly reject pixels based on depth without
344
+ // convex intersection or computing the depth at that pixel on this surface
345
+
346
+ computeBoundingBox (intrinsics .width , intrinsics .height , workTri , aabb );
347
+
348
+ // Go through all pixels and see if the points are inside the polygon. If so
349
+ for (int pixelY = aabb .y0 ; pixelY < aabb .y1 ; pixelY ++) {
350
+ double y2 = pixelY - r0 .y ;
351
+
352
+ for (int pixelX = aabb .x0 ; pixelX < aabb .x1 ; pixelX ++) {
353
+
354
+ point .setTo (pixelX , pixelY );
355
+ if (!Intersection2D_F64 .containsConvex (workTri , point ))
356
+ continue ;
357
+
358
+ // See if this is the closest point appearing at this pixel
359
+ float pixelDepth = depthImage .unsafe_get (pixelX , pixelY );
360
+
361
+ // Compute rest of Barycentric
362
+ double x2 = pixelX - r0 .x ;
363
+ double d20 = x2 *x0 + y2 *y0 ;
364
+ double d21 = x2 *x1 + y2 *y1 ;
365
+
366
+ float alpha = (float )((d11 *d20 - d01 *d21 )/denom );
367
+ float beta = (float )((d00 *d21 - d01 *d20 )/denom );
368
+ float gamma = 1.0f - alpha - beta ;
369
+
370
+ // depth of the mesh at this point
371
+ float depth = alpha *Z0 + beta *Z1 + gamma *Z2 ;
372
+
373
+ if (!Float .isNaN (pixelDepth ) && depth >= pixelDepth ) {
374
+ continue ;
375
+ }
376
+
377
+ float u = alpha *t0 .x + beta *t1 .x + gamma *t2 .x ;
378
+ float v = alpha *t0 .y + beta *t1 .y + gamma *t2 .y ;
379
+
380
+ float pixTexX = u *(textureImage .width - 1 );
381
+ float pixTexY = (1.0f - v )*(textureImage .height - 1 );
382
+
383
+ int color = interpolateTextureRgb (pixTexX , pixTexY );
384
+
385
+ // Update depth and image
386
+ // Make sure the alpha channel is set to 100% in RGBA format
387
+ depthImage .unsafe_set (pixelX , pixelY , depth );
388
+ rgbImage .set24 (pixelX , pixelY , color );
389
+ }
390
+ }
391
+ }
392
+ }
393
+
394
+ /**
395
+ * Gets the RGB color using interpolation at the specified pixel coordinate in the texture image
396
+ */
397
+ private int interpolateTextureRgb ( float px , float py ) {
398
+ textureInterp .get (px , py , textureValues );
399
+ int r = (int )(textureValues [0 ] + 0.5f );
400
+ int g = (int )(textureValues [1 ] + 0.5f );
401
+ int b = (int )(textureValues [2 ] + 0.5f );
402
+ return (r << 16 ) | (g << 8 ) | b ;
403
+ }
404
+
222
405
@ Override public void setVerbose ( @ Nullable PrintStream out , @ Nullable Set <String > configuration ) {
223
406
verbose = BoofMiscOps .addPrefix (this , out );
224
407
}
0 commit comments