mirror of
https://github.com/facebookresearch/pytorch3d.git
synced 2025-12-20 22:30:35 +08:00
Support for moving the renderer to a new device
Summary: Support for moving all the tensors of the renderer to another device by calling `renderer.to(new_device)` Currently the `MeshRenderer`, `MeshRasterizer` and `SoftPhongShader` (and other shaders) are all of type `nn.Module` which already supports easily moving tensors of submodules (defined as class attributes) to a different device. However the class attributes of the rasterizer and shader (e.g. cameras, lights, materials), are of type `TensorProperties`, not nn.Module so we need to explicity create a `to` method to move these tensors to device. Note that the `TensorProperties` class already has a `to` method so we only need to call `cameras.to(device)` and don't need to worry about the internal tensors. The other option is of course making these other classes (cameras, lights etc) also of type nn.Module. Reviewed By: gkioxari Differential Revision: D23885107 fbshipit-source-id: d71565c442181f739de4d797076ed5d00fb67f8e
This commit is contained in:
committed by
Facebook GitHub Bot
parent
b1eee579fd
commit
956d3a010c
@@ -1042,3 +1042,67 @@ class TestRenderMeshes(TestCaseMixin, unittest.TestCase):
|
||||
)
|
||||
|
||||
self.assertClose(rgb, image_ref, atol=0.05)
|
||||
|
||||
def test_to(self):
|
||||
# Test moving all the tensors in the renderer to a new device
|
||||
# to support multigpu rendering.
|
||||
device1 = torch.device("cpu")
|
||||
|
||||
R, T = look_at_view_transform(1500, 0.0, 0.0)
|
||||
|
||||
# Init shader settings
|
||||
materials = Materials(device=device1)
|
||||
lights = PointLights(device=device1)
|
||||
lights.location = torch.tensor([0.0, 0.0, +1000.0], device=device1)[None]
|
||||
|
||||
raster_settings = RasterizationSettings(
|
||||
image_size=256, blur_radius=0.0, faces_per_pixel=1
|
||||
)
|
||||
cameras = FoVPerspectiveCameras(
|
||||
device=device1, R=R, T=T, aspect_ratio=1.0, fov=60.0, zfar=100
|
||||
)
|
||||
rasterizer = MeshRasterizer(cameras=cameras, raster_settings=raster_settings)
|
||||
|
||||
blend_params = BlendParams(
|
||||
1e-4,
|
||||
1e-4,
|
||||
background_color=torch.zeros(3, dtype=torch.float32, device=device1),
|
||||
)
|
||||
|
||||
shader = SoftPhongShader(
|
||||
lights=lights,
|
||||
cameras=cameras,
|
||||
materials=materials,
|
||||
blend_params=blend_params,
|
||||
)
|
||||
renderer = MeshRenderer(rasterizer=rasterizer, shader=shader)
|
||||
|
||||
def _check_props_on_device(renderer, device):
|
||||
self.assertEqual(renderer.rasterizer.cameras.device, device)
|
||||
self.assertEqual(renderer.shader.cameras.device, device)
|
||||
self.assertEqual(renderer.shader.lights.device, device)
|
||||
self.assertEqual(renderer.shader.lights.ambient_color.device, device)
|
||||
self.assertEqual(renderer.shader.materials.device, device)
|
||||
self.assertEqual(renderer.shader.materials.ambient_color.device, device)
|
||||
|
||||
mesh = ico_sphere(2, device1)
|
||||
verts_padded = mesh.verts_padded()
|
||||
textures = TexturesVertex(
|
||||
verts_features=torch.ones_like(verts_padded, device=device1)
|
||||
)
|
||||
mesh.textures = textures
|
||||
_check_props_on_device(renderer, device1)
|
||||
|
||||
# Test rendering on cpu
|
||||
output_images = renderer(mesh)
|
||||
self.assertEqual(output_images.device, device1)
|
||||
|
||||
# Move renderer and mesh to another device and re render
|
||||
# This also tests that background_color is correctly moved to
|
||||
# the new device
|
||||
device2 = torch.device("cuda:0")
|
||||
renderer.to(device2)
|
||||
mesh = mesh.to(device2)
|
||||
_check_props_on_device(renderer, device2)
|
||||
output_images = renderer(mesh)
|
||||
self.assertEqual(output_images.device, device2)
|
||||
|
||||
Reference in New Issue
Block a user