Skip to content

Commit 043d300

Browse files
feature/render-mesh
- It can render meshes - Can handle non-triangles
1 parent f5d5062 commit 043d300

File tree

8 files changed

+279
-20
lines changed

8 files changed

+279
-20
lines changed

applications/src/main/java/boofcv/app/MeshViewerApp.java

+16
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,10 @@
2121
import boofcv.gui.BoofSwingUtil;
2222
import boofcv.gui.image.ShowImages;
2323
import boofcv.gui.mesh.MeshViewerPanel;
24+
import boofcv.io.image.UtilImageIO;
2425
import boofcv.io.points.PointCloudIO;
26+
import boofcv.struct.image.ImageType;
27+
import boofcv.struct.image.InterleavedU8;
2528
import boofcv.struct.mesh.VertexMesh;
2629
import org.apache.commons.io.FilenameUtils;
2730
import org.ddogleg.struct.DogArray_I32;
@@ -68,11 +71,24 @@ private static void loadFile( File file ) {
6871
System.err.println("No shapes to render! Is this a point cloud?");
6972
}
7073

74+
// See if there should be a texture mapped file
75+
InterleavedU8 rgb = null;
76+
if (mesh.texture.size() > 0) {
77+
System.out.println("Loading texture image");
78+
String name = FilenameUtils.getBaseName(file.getName());
79+
File textureFile = new File(file.getParentFile(), name + ".jpg");
80+
rgb = UtilImageIO.loadImage(textureFile, true, ImageType.IL_U8);
81+
if (rgb == null)
82+
System.err.println("Failed to load texture image");
83+
}
84+
InterleavedU8 _rgb = rgb;
7185
SwingUtilities.invokeLater(() -> {
7286
var panel = new MeshViewerPanel();
7387
panel.setMesh(mesh, false);
7488
if (colors.size > 0)
7589
panel.setVertexColors("RGB", colors.data);
90+
if (_rgb != null)
91+
panel.setTextureImage(_rgb);
7692
panel.setPreferredSize(new Dimension(500, 500));
7793
ShowImages.showWindow(panel, "Mesh Viewer", true);
7894
});

integration/boofcv-swing/src/main/java/boofcv/gui/mesh/MeshViewerPanel.java

+5-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/*
2-
* Copyright (c) 2023, Peter Abeles. All Rights Reserved.
2+
* Copyright (c) 2024, Peter Abeles. All Rights Reserved.
33
*
44
* This file is part of BoofCV (http://boofcv.org).
55
*
@@ -494,4 +494,8 @@ public int getRenderBackgroundColor() {
494494
coolDownTime = System.currentTimeMillis() + 200L;
495495
return false;
496496
}
497+
498+
public void setTextureImage( InterleavedU8 rgb ) {
499+
renderer.setTextureImage(rgb);
500+
}
497501
}

main/boofcv-io/src/main/java/boofcv/io/points/impl/PlyCodec.java

+5-2
Original file line numberDiff line numberDiff line change
@@ -379,7 +379,9 @@ public static void readMesh( InputStream input, VertexMesh mesh, DogArray_I32 co
379379
mesh.indexes.addAll(indexes, offset, offset + length);
380380
}
381381

382-
@Override public void addTexture( int count, float[] coor ) {}
382+
@Override public void addTexture( int count, float[] coor ) {
383+
mesh.addTexture(count, coor);
384+
}
383385
});
384386
}
385387

@@ -590,8 +592,9 @@ private static void readTextureCoor( InputStream reader, DataType valueType, Byt
590592

591593
default -> throw new RuntimeException("Unexpected type");
592594
}
593-
output.addTexture(count, tempF);
594595
}
596+
output.addTexture(count/2, tempF);
597+
595598
}
596599

597600
private static PlyWriter wrapMeshForWriting( VertexMesh mesh, @Nullable DogArray_I32 colorRGB ) {

main/boofcv-io/src/main/java/boofcv/visualize/RenderMesh.java

+194-11
Original file line numberDiff line numberDiff line change
@@ -18,21 +18,28 @@
1818

1919
package boofcv.visualize;
2020

21+
import boofcv.alg.interpolate.InterpolatePixelMB;
2122
import boofcv.alg.misc.ImageMiscOps;
23+
import boofcv.factory.interpolate.FactoryInterpolation;
2224
import boofcv.misc.BoofMiscOps;
25+
import boofcv.struct.border.BorderType;
2326
import boofcv.struct.calib.CameraPinhole;
2427
import boofcv.struct.image.GrayF32;
2528
import boofcv.struct.image.InterleavedU8;
2629
import boofcv.struct.mesh.VertexMesh;
2730
import georegression.geometry.UtilPolygons2D_F64;
2831
import georegression.metric.Intersection2D_F64;
32+
import georegression.struct.point.Point2D_F32;
2933
import georegression.struct.point.Point2D_F64;
3034
import georegression.struct.point.Point3D_F64;
3135
import georegression.struct.se.Se3_F64;
36+
import georegression.struct.shapes.Polygon2D_F32;
3237
import georegression.struct.shapes.Polygon2D_F64;
3338
import georegression.struct.shapes.Rectangle2D_I32;
3439
import lombok.Getter;
3540
import lombok.Setter;
41+
import org.ddogleg.struct.DogArray;
42+
import org.ddogleg.struct.FastAccess;
3643
import org.ddogleg.struct.VerbosePrint;
3744
import org.jetbrains.annotations.Nullable;
3845

@@ -71,14 +78,37 @@ public class RenderMesh implements VerbosePrint {
7178
/** Transform from world (what the mesh is in) to the camera view */
7279
public @Getter final Se3_F64 worldToView = new Se3_F64();
7380

74-
// Workspace variables
81+
/** If true then a polygon will only be rendered if the surface normal is pointed towards the camera */
82+
public @Getter @Setter boolean checkSurfaceNormal = true;
83+
84+
// Image for texture mapping
85+
private InterleavedU8 textureImage = new InterleavedU8(1, 1, 3);
86+
private InterpolatePixelMB<InterleavedU8> textureInterp = FactoryInterpolation.bilinearPixelMB(textureImage, BorderType.EXTENDED);
87+
private float[] textureValues = new float[3];
88+
89+
//---------- Workspace variables
7590
private final Point3D_F64 camera = new Point3D_F64();
7691
private final Point2D_F64 point = new Point2D_F64();
77-
private final Polygon2D_F64 polygon = new Polygon2D_F64();
92+
93+
// mesh in camera reference frame
94+
private final DogArray<Point3D_F64> meshCam = new DogArray<>(Point3D_F64::new);
95+
// Mesh projected onto the image
96+
private final Polygon2D_F64 polygonProj = new Polygon2D_F64();
97+
// Vertex of polygon in the texture image
98+
private final Polygon2D_F32 polygonTex = new Polygon2D_F32();
99+
// Axis aligned bonding box
78100
final Rectangle2D_I32 aabb = new Rectangle2D_I32();
101+
// Workspace for a sub-triangle in the polygon
102+
private final Polygon2D_F64 workTri = new Polygon2D_F64(3);
79103

80104
@Nullable PrintStream verbose = null;
81105

106+
public void setTextureImage( InterleavedU8 textureImage ) {
107+
this.textureImage = textureImage;
108+
textureInterp.setImage(textureImage);
109+
textureValues = new float[textureImage.numBands];
110+
}
111+
82112
/**
83113
* Renders the mesh onto an image. Produces an RGB image and depth image. Must have configured
84114
* {@link #intrinsics} already and set {@link #worldToView}.
@@ -89,6 +119,10 @@ public void render( VertexMesh mesh ) {
89119
// Sanity check to see if intrinsics has been configured
90120
BoofMiscOps.checkTrue(intrinsics.width > 0 && intrinsics.height > 0, "Intrinsics not set");
91121

122+
// Make sure there are normals if it's configured to use them
123+
if (checkSurfaceNormal && mesh.normals.size() == 0)
124+
mesh.computeNormals();
125+
92126
// Initialize output images
93127
initializeImages();
94128

@@ -99,8 +133,12 @@ public void render( VertexMesh mesh ) {
99133
final double cx = intrinsics.cx;
100134
final double cy = intrinsics.cy;
101135

136+
// Keep track of how many meshes were rendered
102137
int shapesRenderedCount = 0;
103138

139+
var worldCamera = new Point3D_F64();
140+
worldToView.transformReverse(worldCamera, worldCamera);
141+
104142
for (int shapeIdx = 1; shapeIdx < mesh.offsets.size; shapeIdx++) {
105143
// First and last point in the polygon
106144
final int idx0 = mesh.offsets.get(shapeIdx - 1);
@@ -111,7 +149,18 @@ public void render( VertexMesh mesh ) {
111149
continue;
112150

113151
// Project points on the shape onto the image and store in polygon
114-
polygon.vertexes.reset().reserve(idx1 - idx0);
152+
polygonProj.vertexes.reset().reserve(idx1 - idx0);
153+
meshCam.reset().reserve(idx1 - idx0);
154+
155+
if (mesh.texture.size() > 0) {
156+
mesh.getTexture(shapeIdx - 1, polygonTex.vertexes);
157+
}
158+
159+
// Prune using normal vector
160+
if (mesh.normals.size() > 0 && checkSurfaceNormal) {
161+
if (!isFrontVisible(mesh, shapeIdx, idx0, worldCamera)) continue;
162+
}
163+
115164
boolean behindCamera = false;
116165
for (int i = idx0; i < idx1; i++) {
117166
Point3D_F64 world = mesh.vertexes.getTemp(mesh.indexes.get(i));
@@ -132,22 +181,46 @@ public void render( VertexMesh mesh ) {
132181
double pixelX = normX*fx + cx;
133182
double pixelY = normY*fy + cy;
134183

135-
polygon.vertexes.grow().setTo(pixelX, pixelY);
184+
polygonProj.vertexes.grow().setTo(pixelX, pixelY);
185+
meshCam.grow().setTo(camera);
136186
}
137187

138188
// Skip if not visible
139189
if (behindCamera)
140190
continue;
141191

142-
// Compute the pixels which might be able to see polygon
143-
computeBoundingBox(width, height, polygon, aabb);
144-
145-
projectSurfaceOntoImage(mesh, polygon, shapeIdx-1);
192+
if (mesh.texture.size() == 0) {
193+
projectSurfaceColor(mesh, polygonProj, shapeIdx - 1);
194+
} else {
195+
projectSurfaceTexture(meshCam, polygonProj, polygonTex);
196+
}
146197

147198
shapesRenderedCount++;
148199
}
149200

150-
if (verbose != null ) verbose.println("total shapes rendered: " + shapesRenderedCount);
201+
if (verbose != null) verbose.println("total shapes rendered: " + shapesRenderedCount);
202+
}
203+
204+
/**
205+
* Use the normal vector to see if the front of the mesh is visible. If it's not visible we can skip it
206+
*
207+
* @return true if visible
208+
*/
209+
private static boolean isFrontVisible( VertexMesh mesh, int shapeIdx, int idx0, Point3D_F64 worldCamera ) {
210+
// Get normal in world coordinates
211+
Point3D_F64 normal = mesh.normals.getTemp(shapeIdx - 1);
212+
213+
// vector from the camera to a vertex
214+
Point3D_F64 v1 = mesh.vertexes.getTemp(mesh.indexes.get(idx0));
215+
v1.x -= worldCamera.x;
216+
v1.y -= worldCamera.y;
217+
v1.z -= worldCamera.z;
218+
219+
// compute the dot product
220+
double dot = v1.x*normal.x + v1.y*normal.y + v1.z*normal.z;
221+
222+
// Don't render if we are viewing it from behind
223+
return dot < 0.0;
151224
}
152225

153226
void initializeImages() {
@@ -180,7 +253,7 @@ static void computeBoundingBox( int width, int height, Polygon2D_F64 polygon, Re
180253
* is searched exhaustively. If the projected 2D polygon contains a pixels and the polygon is closer than
181254
* the current depth of the pixel it is rendered there and the depth image is updated.
182255
*/
183-
void projectSurfaceOntoImage( VertexMesh mesh, Polygon2D_F64 polygon, int shapeIdx ) {
256+
void projectSurfaceColor( VertexMesh mesh, Polygon2D_F64 polyProj, int shapeIdx ) {
184257
// TODO temp hack. Best way is to find the distance to the 3D polygon at this point. Instead we will
185258
// use the depth of the first point.
186259
//
@@ -190,6 +263,7 @@ void projectSurfaceOntoImage( VertexMesh mesh, Polygon2D_F64 polygon, int shapeI
190263
Point3D_F64 world = mesh.vertexes.getTemp(vertexIndex);
191264
worldToView.transform(world, camera);
192265

266+
// TODO compute the depth at each pixel
193267
float depth = (float)camera.z;
194268

195269
// TODO look at vertexes and get min/max depth. Use that to quickly reject pixels based on depth without
@@ -198,6 +272,8 @@ void projectSurfaceOntoImage( VertexMesh mesh, Polygon2D_F64 polygon, int shapeI
198272
// The entire surface will have one color
199273
int color = surfaceColor.surfaceRgb(shapeIdx);
200274

275+
computeBoundingBox(intrinsics.width, intrinsics.height, polygonProj, aabb);
276+
201277
// Go through all pixels and see if the points are inside the polygon. If so
202278
for (int pixelY = aabb.y0; pixelY < aabb.y1; pixelY++) {
203279
for (int pixelX = aabb.x0; pixelX < aabb.x1; pixelX++) {
@@ -208,7 +284,7 @@ void projectSurfaceOntoImage( VertexMesh mesh, Polygon2D_F64 polygon, int shapeI
208284
}
209285

210286
point.setTo(pixelX, pixelY);
211-
if (!Intersection2D_F64.containsConvex(polygon, point))
287+
if (!Intersection2D_F64.containsConvex(polyProj, point))
212288
continue;
213289

214290
// Update depth and image
@@ -219,6 +295,113 @@ void projectSurfaceOntoImage( VertexMesh mesh, Polygon2D_F64 polygon, int shapeI
219295
}
220296
}
221297

298+
/**
299+
* Projection with texture mapping. Breaks the polygon up into triangles and uses Barycentric coordinates to
300+
* map pixels to textured mapped coordinates.
301+
*
302+
* @param mesh 3D location of vertexes in the mesh
303+
* @param polyProj Projected pixels of mesh
304+
* @param polyText Texture coordinates of the mesh
305+
*/
306+
void projectSurfaceTexture( FastAccess<Point3D_F64> mesh, Polygon2D_F64 polyProj, Polygon2D_F32 polyText ) {
307+
// If the mesh has more than 3 sides, break it up into triangles using the first vertex as a pivot
308+
// This works because the mesh has to be convex
309+
for (int vertC = 2; vertC < polyProj.size(); vertC++) {
310+
int vertA = 0;
311+
int vertB = vertC - 1;
312+
313+
float Z0 = (float)mesh.get(vertA).z;
314+
float Z1 = (float)mesh.get(vertB).z;
315+
float Z2 = (float)mesh.get(vertC).z;
316+
317+
Point2D_F64 r0 = polyProj.get(vertA);
318+
Point2D_F64 r1 = polyProj.get(vertB);
319+
Point2D_F64 r2 = polyProj.get(vertC);
320+
321+
// Pre-compute part of Barycentric Coordinates
322+
double x0 = r2.x - r0.x;
323+
double y0 = r2.y - r0.y;
324+
double x1 = r1.x - r0.x;
325+
double y1 = r1.y - r0.y;
326+
327+
double d00 = x0*x0 + y0*y0;
328+
double d01 = x0*x1 + y0*y1;
329+
double d11 = x1*x1 + y1*y1;
330+
331+
double denom = d00*d11 - d01*d01;
332+
333+
// Compute coordinate on texture image
334+
Point2D_F32 t0 = polyText.get(vertC);
335+
Point2D_F32 t1 = polyText.get(vertB);
336+
Point2D_F32 t2 = polyText.get(vertA);
337+
338+
// Do the polygon intersection with the triangle in question only
339+
workTri.get(0).setTo(polyProj.get(vertA));
340+
workTri.get(1).setTo(polyProj.get(vertB));
341+
workTri.get(2).setTo(polyProj.get(vertC));
342+
343+
// TODO look at vertexes and get min/max depth. Use that to quickly reject pixels based on depth without
344+
// convex intersection or computing the depth at that pixel on this surface
345+
346+
computeBoundingBox(intrinsics.width, intrinsics.height, workTri, aabb);
347+
348+
// Go through all pixels and see if the points are inside the polygon. If so
349+
for (int pixelY = aabb.y0; pixelY < aabb.y1; pixelY++) {
350+
double y2 = pixelY - r0.y;
351+
352+
for (int pixelX = aabb.x0; pixelX < aabb.x1; pixelX++) {
353+
354+
point.setTo(pixelX, pixelY);
355+
if (!Intersection2D_F64.containsConvex(workTri, point))
356+
continue;
357+
358+
// See if this is the closest point appearing at this pixel
359+
float pixelDepth = depthImage.unsafe_get(pixelX, pixelY);
360+
361+
// Compute rest of Barycentric
362+
double x2 = pixelX - r0.x;
363+
double d20 = x2*x0 + y2*y0;
364+
double d21 = x2*x1 + y2*y1;
365+
366+
float alpha = (float)((d11*d20 - d01*d21)/denom);
367+
float beta = (float)((d00*d21 - d01*d20)/denom);
368+
float gamma = 1.0f - alpha - beta;
369+
370+
// depth of the mesh at this point
371+
float depth = alpha*Z0 + beta*Z1 + gamma*Z2;
372+
373+
if (!Float.isNaN(pixelDepth) && depth >= pixelDepth) {
374+
continue;
375+
}
376+
377+
float u = alpha*t0.x + beta*t1.x + gamma*t2.x;
378+
float v = alpha*t0.y + beta*t1.y + gamma*t2.y;
379+
380+
float pixTexX = u*(textureImage.width - 1);
381+
float pixTexY = (1.0f - v)*(textureImage.height - 1);
382+
383+
int color = interpolateTextureRgb(pixTexX, pixTexY);
384+
385+
// Update depth and image
386+
// Make sure the alpha channel is set to 100% in RGBA format
387+
depthImage.unsafe_set(pixelX, pixelY, depth);
388+
rgbImage.set24(pixelX, pixelY, color);
389+
}
390+
}
391+
}
392+
}
393+
394+
/**
395+
* Gets the RGB color using interpolation at the specified pixel coordinate in the texture image
396+
*/
397+
private int interpolateTextureRgb( float px, float py ) {
398+
textureInterp.get(px, py, textureValues);
399+
int r = (int)(textureValues[0] + 0.5f);
400+
int g = (int)(textureValues[1] + 0.5f);
401+
int b = (int)(textureValues[2] + 0.5f);
402+
return (r << 16) | (g << 8) | b;
403+
}
404+
222405
@Override public void setVerbose( @Nullable PrintStream out, @Nullable Set<String> configuration ) {
223406
verbose = BoofMiscOps.addPrefix(this, out);
224407
}

0 commit comments

Comments
 (0)