mirror of
https://github.com/facebookresearch/pytorch3d.git
synced 2025-08-02 03:42:50 +08:00
Allow single offset in offset_verts
Summary: It is common when trying things out to want to move a whole mesh or point cloud by the same amount. Here we allow the offset functions to broadcast. Also add a sanity check to join_meshes_as_scene which it is easy to call wrongly. Reviewed By: nikhilaravi Differential Revision: D25980593 fbshipit-source-id: cdf1568e1317e3b81ad94ed4e608ba7eef81290b
This commit is contained in:
parent
d60c52df4a
commit
ddebdfbcd7
@ -255,7 +255,7 @@
|
||||
"N = verts.shape[0]\n",
|
||||
"center = verts.mean(0)\n",
|
||||
"scale = max((verts - center).abs().max(0)[0])\n",
|
||||
"mesh.offset_verts_(-center.expand(N, 3))\n",
|
||||
"mesh.offset_verts_(-center)\n",
|
||||
"mesh.scale_verts_((1.0 / float(scale)));"
|
||||
]
|
||||
},
|
||||
|
@ -1255,12 +1255,15 @@ class Meshes(object):
|
||||
Add an offset to the vertices of this Meshes. In place operation.
|
||||
|
||||
Args:
|
||||
vert_offsets_packed: A Tensor of the same shape as self.verts_packed
|
||||
giving offsets to be added to all vertices.
|
||||
vert_offsets_packed: A Tensor of shape (3,) or the same shape as
|
||||
self.verts_packed, giving offsets to be added
|
||||
to all vertices.
|
||||
Returns:
|
||||
self.
|
||||
"""
|
||||
verts_packed = self.verts_packed()
|
||||
if vert_offsets_packed.shape == (3,):
|
||||
vert_offsets_packed = vert_offsets_packed.expand_as(verts_packed)
|
||||
if vert_offsets_packed.shape != verts_packed.shape:
|
||||
raise ValueError("Verts offsets must have dimension (all_v, 3).")
|
||||
# update verts packed
|
||||
@ -1581,6 +1584,12 @@ def join_meshes_as_scene(
|
||||
Returns:
|
||||
new Meshes object containing a single mesh
|
||||
"""
|
||||
if not isinstance(include_textures, (bool, int)):
|
||||
# We want to avoid letting join_meshes_as_scene(mesh1, mesh2) silently
|
||||
# do the wrong thing.
|
||||
raise ValueError(
|
||||
f"include_textures argument cannot be {type(include_textures)}"
|
||||
)
|
||||
if isinstance(meshes, List):
|
||||
meshes = join_meshes_as_batch(meshes, include_textures=include_textures)
|
||||
|
||||
|
@ -793,12 +793,16 @@ class Pointclouds(object):
|
||||
Translate the point clouds by an offset. In place operation.
|
||||
|
||||
Args:
|
||||
offsets_packed: A Tensor of the same shape as self.points_packed
|
||||
giving offsets to be added to all points.
|
||||
offsets_packed: A Tensor of shape (3,) or the same shape
|
||||
as self.points_packed giving offsets to be added to
|
||||
all points.
|
||||
|
||||
Returns:
|
||||
self.
|
||||
"""
|
||||
points_packed = self.points_packed()
|
||||
if offsets_packed.shape == (3,):
|
||||
offsets_packed = offsets_packed.expand_as(points_packed)
|
||||
if offsets_packed.shape != points_packed.shape:
|
||||
raise ValueError("Offsets must have dimension (all_p, 3).")
|
||||
self._points_packed = points_packed + offsets_packed
|
||||
|
@ -1,5 +1,6 @@
|
||||
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
|
||||
|
||||
import itertools
|
||||
import random
|
||||
import unittest
|
||||
|
||||
@ -445,7 +446,7 @@ class TestMeshes(TestCaseMixin, unittest.TestCase):
|
||||
mesh = TestMeshes.init_mesh(N, 10, 100)
|
||||
all_v = mesh.verts_packed().size(0)
|
||||
verts_per_mesh = mesh.num_verts_per_mesh()
|
||||
for force in [0, 1]:
|
||||
for force, deform_shape in itertools.product([0, 1], [(all_v, 3), 3]):
|
||||
if force:
|
||||
# force mesh to have computed attributes
|
||||
mesh._compute_packed(refresh=True)
|
||||
@ -455,7 +456,7 @@ class TestMeshes(TestCaseMixin, unittest.TestCase):
|
||||
mesh._compute_face_areas_normals(refresh=True)
|
||||
mesh._compute_vertex_normals(refresh=True)
|
||||
|
||||
deform = torch.rand((all_v, 3), dtype=torch.float32, device=mesh.device)
|
||||
deform = torch.rand(deform_shape, dtype=torch.float32, device=mesh.device)
|
||||
# new meshes class to hold the deformed mesh
|
||||
new_mesh_naive = naive_offset_verts(mesh, deform)
|
||||
|
||||
@ -465,10 +466,14 @@ class TestMeshes(TestCaseMixin, unittest.TestCase):
|
||||
verts_cumsum = torch.cumsum(verts_per_mesh, 0).tolist()
|
||||
verts_cumsum.insert(0, 0)
|
||||
for i in range(N):
|
||||
item_offset = (
|
||||
deform
|
||||
if deform.ndim == 1
|
||||
else deform[verts_cumsum[i] : verts_cumsum[i + 1]]
|
||||
)
|
||||
self.assertClose(
|
||||
new_mesh.verts_list()[i],
|
||||
mesh.verts_list()[i]
|
||||
+ deform[verts_cumsum[i] : verts_cumsum[i + 1]],
|
||||
mesh.verts_list()[i] + item_offset,
|
||||
)
|
||||
self.assertClose(
|
||||
new_mesh.verts_list()[i], new_mesh_naive.verts_list()[i]
|
||||
|
@ -1,6 +1,6 @@
|
||||
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
|
||||
|
||||
|
||||
import itertools
|
||||
import random
|
||||
import unittest
|
||||
|
||||
@ -516,13 +516,13 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
clouds = self.init_cloud(N, 100, 10)
|
||||
all_p = clouds.points_packed().size(0)
|
||||
points_per_cloud = clouds.num_points_per_cloud()
|
||||
for force in (False, True):
|
||||
for force, deform_shape in itertools.product((0, 1), [(all_p, 3), 3]):
|
||||
if force:
|
||||
clouds._compute_packed(refresh=True)
|
||||
clouds._compute_padded()
|
||||
clouds.padded_to_packed_idx()
|
||||
|
||||
deform = torch.rand((all_p, 3), dtype=torch.float32, device=clouds.device)
|
||||
deform = torch.rand(deform_shape, dtype=torch.float32, device=clouds.device)
|
||||
new_clouds_naive = naive_offset(clouds, deform)
|
||||
|
||||
new_clouds = clouds.offset(deform)
|
||||
@ -530,10 +530,14 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
points_cumsum = torch.cumsum(points_per_cloud, 0).tolist()
|
||||
points_cumsum.insert(0, 0)
|
||||
for i in range(N):
|
||||
item_offset = (
|
||||
deform
|
||||
if deform.ndim == 1
|
||||
else deform[points_cumsum[i] : points_cumsum[i + 1]]
|
||||
)
|
||||
self.assertClose(
|
||||
new_clouds.points_list()[i],
|
||||
clouds.points_list()[i]
|
||||
+ deform[points_cumsum[i] : points_cumsum[i + 1]],
|
||||
clouds.points_list()[i] + item_offset,
|
||||
)
|
||||
self.assertClose(
|
||||
clouds.normals_list()[i], new_clouds_naive.normals_list()[i]
|
||||
|
Loading…
x
Reference in New Issue
Block a user