From a0f79318c5b16ef5d14bbff8840bc0dc045d078d Mon Sep 17 00:00:00 2001 From: Nikhila Ravi Date: Fri, 11 Jun 2021 14:33:01 -0700 Subject: [PATCH] Culling to frustrum bug fix Summary: When `z_clip_value = None` and faces are outside the view frustum the shape of one of the tensors in `clip.py` is incorrect. `faces_num_clipped_verts` should be (F,) but it was (F,3). Added a new test to ensure this case is handled. Reviewed By: bottler Differential Revision: D29051282 fbshipit-source-id: 5f4172ba4d4a75d928404dde9abf48aef18c68bd --- pytorch3d/renderer/mesh/clip.py | 2 +- .../test_render_mesh_clipped_cam_dist=0.5.jpg | Bin 7393 -> 0 bytes tests/test_render_meshes_clipped.py | 43 +++++++++++++----- 3 files changed, 33 insertions(+), 12 deletions(-) delete mode 100644 tests/data/test_render_mesh_clipped_cam_dist=0.5.jpg diff --git a/pytorch3d/renderer/mesh/clip.py b/pytorch3d/renderer/mesh/clip.py index 0e2aae72..de5d0f6b 100644 --- a/pytorch3d/renderer/mesh/clip.py +++ b/pytorch3d/renderer/mesh/clip.py @@ -372,7 +372,7 @@ def clip_faces( # (F) dim tensor containing the number of clipped vertices in each triangle faces_num_clipped_verts = faces_clipped_verts.sum(1) else: - faces_num_clipped_verts = torch.zeros([F, 3], device=device) + faces_num_clipped_verts = torch.zeros([F], device=device) # If no triangles need to be clipped or culled, avoid unnecessary computation # and return early diff --git a/tests/data/test_render_mesh_clipped_cam_dist=0.5.jpg b/tests/data/test_render_mesh_clipped_cam_dist=0.5.jpg deleted file mode 100644 index 8ea2545edd28f9f8d6ebaa2de13dcffdd0a85d75..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 7393 zcmeG>4Lp?T_Rq{1gH%3V@;yt7L6oRn+KjTpB4rq=QCH-au~#LMn;2=;HWh_Fu42Y8 zzLnacR2!+*ZW1F(Xst@V8p_9rVc!2UBi;UcyKQ%W-QB&vdtV;!nRCv2=6pWqybrV) z{RybJIlDRo3MrJC{n4zq#ral|d*C86{>*(oy z^s$xcM@IAJ>gka_F`Ku*+Sb;VXll3g(}kZ|+1M_W3c)BUDk{%V)|xp}YoVc@;lh9U zLMwr)0v2Ea4r2tcsu-Lq2E74@P)|HYc7azH28)x!%M%n7m1aPOQWb#3;BZ(u93C$x z2f1S)9muKT)eNnd$*WTX2u5MZ!aYYb73MBKcTK}f@cd(&!0qvhN}6-Dv~`S4Oi5{Okno5dkvpUQx@+%O3H$bceSnpm zl6vg;iIaaz<7Q>^a&q(X3(kMTFD<)pvHVii^&2<8tN#9nTMc&`o9^9je$dkK=<$=z zr(NAWy)OobhDSuBFUQ8Ec445M)7O+?KeI~}+J%*q!^sh(c44qP;lioP;SH_j)s|5S z0b%M!3->4>%a3NByQVnT#_PF8;C6wM=Et@j#xJC%$t-(6!{UFHWv>nUyIn0n8Ha(v z!>Ix?7)~Q`{O4lPjl7Z=+VE;!l+kx zW>!(6pwhJfVXC=9qB&vk9Q z^G{BfcblD99r$pYU3mR2U#H7ow4QuYkFBw(HQdxUk)QfA&$`Oh;rg9M2$^}^oSEHL zY8{^#c3tc@r*}O*+Hu5QPt{NJzU#Kr+a4Fx<5~ai+YqlVzmn~iIQY`UYwl2l zWzTTYom`VRF{;KYx*~h5zIG?sBaDzCme|!8TTpZ!kBNLW!*!M z96udsBig6r@*>g}lS%QGG@eHRJ0VN#RU?VjjI*NBP*6IrqN^i#ijvBik?KPhy3ZdQ zb0e3|LqXT2di*W(Tv7Uoi3v+$i!XENc`9ZLt8g-FGYZIxu9H03-BGiXyIpAavL1?vvw(^;y^;yD5 z6bwBe+k*_&ei{6bZ0~S?t4uOSF)wclP8s5fX+#-c8szh2)qZpibRX~^!B}vAS7%p*4NSqudVd>`c`A$&uV-#FXFxX8VY9Wie zJ>|YFTs$bYOPpIBnb$wCqCC+-y?msA2B;L|oy$82+^9@kz-W9xZo1GAXIBKixM@Pj z?Q@HLF)naaObT!&@GKE1(!IMYTHw9|sd+2URn3H&fZPS-XqhZg5a1^EqSz1TWU7!_ohifd62L!|(1 zhg`|H?d7D=Q}CARiS4Yo!n9w(9?+3W6xWUk$}6t4rz4)&Cn$(&Dz@ACqt&BoP-f#} z-hToGMCNmKM_rny{m<(DZM*B@RU@Ipx!yr2@NIVkPm_Km(@Es0$>0y)Mb7jvUyPyv zv&d~4vc>SBiaiv7K$G9OWI;{$NSXADA>q>a!uwksC`1NUcSDEg5$lrjHPAn?GM9jHeJYBqe=Hz zv!!YIK?3|{l1#@~7Hn9sl^%c4Xpg-v8!7am%H{(^dAc`G$N1Modur?1A=`*Jvx22{ zy;mA@K;nMb^?Gll75sgdbmAefz%dl$-AlIh{9ZY!MACncss5Ddc-gpj{&{BMRvrj< z86XxT#4E9|v;C57kdF0&J?x!3@Ebd{#!hz>BvrgrEby6zI5RUFSOm9UF43GHB*j-# zd;h}jZS8T?6Nsz(CjyE)BH6U`M=1ZOVMOa>}8YT3M)Nv(5*9iCW))g2w|A zY{;=`C>R$U!QIpF%jIb%%SL-yYI+iErTQ(C#{9i=?bqgH-Al#aE+^lXes9CQPQ?G5 zl$0eWXhHtaBhVkYDgMe4=zmS|0}X16PypWt(><6M+nr#;>5a`v&QMMK!uLB5)8i<3 zxG1IAV~L$nlbuM7O+SZ%ON_;zjCx2>R<0slcjtS< z_g3U%0bqAd5}?BwV-m8n=IRHx zEe#eO>EB;tRV8q}xJz)f&*ihv5^AijQFQkgljhz_j2fF&S3-3?>P&Hn8Y5Udvqacm zXT3netG}Noo320Y?%HsMu?Ieeexd7caKyMGKn9X-2E@bXf|K6S;qfttW`cZGG& zy_lQ*;IU@0*Zo^XF-{F4Rr4uCirBtgT)KsIR;ZNu%!nqrb3r0Byla{Cyy+H6A}AoP zKHx64Y89vMcfTO%{8JWE!8aeB)AYzwQUe+1i+n0ZLq-r3@Iyvt7vA7RoZ2{nupel) zw-qayp@887k#&;lGD)YYXwijH3pW;-U$DQ4vq@hE%49c}K}LHzncoX>e?~!ID|35_ zSglojtdK$$+H=RRtYAM|!tD2gBtPc%QU;V6;yb(W^IJL^&t1=_Y4x8HEW6&T{SBy23 zBCR2^oTuU6QtAYsXOMY?5tVw57b&ljGA^s%NUXBhl9Kl2;)1W9msqddOB@aMQ2D&$vDSug zzX%@}yS(%VWZHq`%0iW0EVpmNwqDWvQ>lwqpa0quk2_eo(Y=4Y$eML(5T2latA6_J z!#S&R3$34!n!cyHSb1)%*2-4`J zi;C_PWF3}8ceDD4hon-s&0&3-4LQ`z7@Y80`+b#eX2HC$W~iihSdT~MVks@Xd5u(fNeP@)b*!qY8a&My>si-M`?#Ol-B}+)q4S8DXW9&` zq8Rxmn|)dNx;AUGu`8xl51fd%w;`W2*v*Ye>BjX zk5=Vn(#`?pG(o(oGE-=fbLI4Xo5WceZ1v{e>z+IOkDUAb90_+e+>1(y-AOO%t-Z5F znDNAjTNKYC6&?Hsfp31nIymiXWx?a3szb85-xVH$dxjKY;ZU^GElNaXC=?IH>q4M-VM`P{}77Ms6i|m>W zvWAe%aw_2~Cj)JhWWFV5@H-LFdeY*~?Qn$+J?%o>j*|YOD+%$lEu9Ch>=M(u(%dYD zZm%@SG0MuP_D0v|aEeu-H#_IOJ=I=~v)SA|2Pwt~;CoRWXHbkSH32**JPD_2N~JXV zaCWC|liDB0-^a3{+%O^WYLUfD68@16-LMXOJeegof(++0JUw#PonsuJjm!70zZW6U zN-AX}M&;NmBKc?iEN*Y!mv?5^&id0GRBal6%*gt5WX|etnne{K;YNk;-P;Mh?wid< GAN(Ix=};U1 diff --git a/tests/test_render_meshes_clipped.py b/tests/test_render_meshes_clipped.py index 845da8e1..21d318eb 100644 --- a/tests/test_render_meshes_clipped.py +++ b/tests/test_render_meshes_clipped.py @@ -15,7 +15,11 @@ import numpy as np import torch from common_testing import TestCaseMixin, get_tests_dir, load_rgb_image from pytorch3d.io import save_obj -from pytorch3d.renderer.cameras import FoVPerspectiveCameras, look_at_view_transform +from pytorch3d.renderer.cameras import ( + FoVPerspectiveCameras, + look_at_view_transform, + PerspectiveCameras, +) from pytorch3d.renderer.lighting import PointLights from pytorch3d.renderer.mesh import ( ClipFrustum, @@ -27,8 +31,9 @@ from pytorch3d.renderer.mesh.rasterize_meshes import _RasterizeFaceVerts from pytorch3d.renderer.mesh.rasterizer import MeshRasterizer, RasterizationSettings from pytorch3d.renderer.mesh.renderer import MeshRenderer from pytorch3d.renderer.mesh.shader import SoftPhongShader +from pytorch3d.renderer.mesh.textures import TexturesVertex from pytorch3d.structures.meshes import Meshes - +from pytorch3d.utils import torus # If DEBUG=True, save out images generated in the tests for debugging. # All saved images have prefix DEBUG_ @@ -97,9 +102,9 @@ class TestRenderMeshesClipping(TestCaseMixin, unittest.TestCase): return mesh, verts return mesh - def test_cube_mesh_render(self): + def debug_cube_mesh_render(self): """ - End-End test of rendering a cube mesh with texture + End-End debug run of rendering a cube mesh with texture from decreasing camera distances. The camera starts outside the cube and enters the inside of the cube. """ @@ -132,22 +137,16 @@ class TestRenderMeshesClipping(TestCaseMixin, unittest.TestCase): # the camera enters the cube. Check the output looks correct. images_list = [] dists = np.linspace(0.1, 2.5, 20)[::-1] + for d in dists: R, T = look_at_view_transform(d, 0, 0) T[0, 1] -= 0.1 # move down in the y axis cameras = FoVPerspectiveCameras(device=device, R=R, T=T, fov=90) images = renderer(mesh, cameras=cameras) rgb = images[0, ..., :3].cpu().detach() - filename = "DEBUG_cube_dist=%.1f.jpg" % d im = (rgb.numpy() * 255).astype(np.uint8) images_list.append(im) - # Check one of the images where the camera is inside the mesh - if d == 0.5: - filename = "test_render_mesh_clipped_cam_dist=0.5.jpg" - image_ref = load_rgb_image(filename, DATA_DIR) - self.assertClose(rgb, image_ref, atol=0.05) - # Save a gif of the output - this should show # the camera moving inside the cube. if DEBUG: @@ -655,3 +654,25 @@ class TestRenderMeshesClipping(TestCaseMixin, unittest.TestCase): double_hit = torch.tensor([0, 0, -1], device=device) check_double_hit = any(torch.allclose(i, double_hit) for i in unique_vals) self.assertFalse(check_double_hit) + + def test_mesh_outside_frustrum(self): + """ + Test the case where the mesh is completely outside the view + frustrum so all faces are culled and z_clip_value = None. + """ + device = "cuda:0" + mesh = torus(20.0, 85.0, 32, 16, device=device) + tex = TexturesVertex(verts_features=torch.rand_like(mesh.verts_padded())) + mesh.textures = tex + raster_settings = RasterizationSettings(image_size=512, cull_to_frustum=True) + R, T = look_at_view_transform(1.0, 0.0, 0.0) + cameras = PerspectiveCameras(device=device, R=R, T=T) + renderer = MeshRenderer( + rasterizer=MeshRasterizer(cameras=cameras, raster_settings=raster_settings), + shader=SoftPhongShader(cameras=cameras, device=device), + ) + images = renderer(mesh) + + # Mesh is completely outside the view frustrum + # The image should be white. + self.assertClose(images[0, ..., :3], torch.ones_like(images[0, ..., :3]))