mirror of
https://github.com/facebookresearch/pytorch3d.git
synced 2025-12-20 14:20:38 +08:00
Address black + isort fbsource linter warnings
Summary: Address black + isort fbsource linter warnings from D20558374 (previous diff) Reviewed By: nikhilaravi Differential Revision: D20558373 fbshipit-source-id: d3607de4a01fb24c0d5269634563a7914bddf1c8
This commit is contained in:
committed by
Facebook GitHub Bot
parent
eb512ffde3
commit
d57daa6f85
@@ -1,13 +1,12 @@
|
||||
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
|
||||
|
||||
|
||||
import numpy as np
|
||||
import unittest
|
||||
|
||||
import numpy as np
|
||||
import torch
|
||||
|
||||
from pytorch3d.structures.pointclouds import Pointclouds
|
||||
|
||||
from common_testing import TestCaseMixin
|
||||
from pytorch3d.structures.pointclouds import Pointclouds
|
||||
|
||||
|
||||
class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
@@ -52,13 +51,11 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
normals_list, features_list = None, None
|
||||
if with_normals:
|
||||
normals_list = [
|
||||
torch.rand((i, 3), device=device, dtype=torch.float32)
|
||||
for i in p
|
||||
torch.rand((i, 3), device=device, dtype=torch.float32) for i in p
|
||||
]
|
||||
if with_features:
|
||||
features_list = [
|
||||
torch.rand((i, channels), device=device, dtype=torch.float32)
|
||||
for i in p
|
||||
torch.rand((i, channels), device=device, dtype=torch.float32) for i in p
|
||||
]
|
||||
|
||||
if lists_to_tensors:
|
||||
@@ -68,9 +65,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
if with_features:
|
||||
features_list = torch.stack(features_list)
|
||||
|
||||
return Pointclouds(
|
||||
points_list, normals=normals_list, features=features_list
|
||||
)
|
||||
return Pointclouds(points_list, normals=normals_list, features=features_list)
|
||||
|
||||
def test_simple(self):
|
||||
device = torch.device("cuda:0")
|
||||
@@ -81,12 +76,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
device=device,
|
||||
),
|
||||
torch.tensor(
|
||||
[
|
||||
[0.1, 0.3, 0.3],
|
||||
[0.6, 0.7, 0.8],
|
||||
[0.2, 0.3, 0.4],
|
||||
[0.1, 0.5, 0.3],
|
||||
],
|
||||
[[0.1, 0.3, 0.3], [0.6, 0.7, 0.8], [0.2, 0.3, 0.4], [0.1, 0.5, 0.3]],
|
||||
dtype=torch.float32,
|
||||
device=device,
|
||||
),
|
||||
@@ -111,9 +101,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
self.assertClose(
|
||||
clouds.cloud_to_packed_first_idx().cpu(), torch.tensor([0, 3, 7])
|
||||
)
|
||||
self.assertClose(
|
||||
clouds.num_points_per_cloud().cpu(), torch.tensor([3, 4, 5])
|
||||
)
|
||||
self.assertClose(clouds.num_points_per_cloud().cpu(), torch.tensor([3, 4, 5]))
|
||||
self.assertClose(
|
||||
clouds.padded_to_packed_idx().cpu(),
|
||||
torch.tensor([0, 1, 2, 5, 6, 7, 8, 10, 11, 12, 13, 14]),
|
||||
@@ -129,11 +117,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
"points_padded",
|
||||
"padded_to_packed_idx",
|
||||
]
|
||||
public_normals_getters = [
|
||||
"normals_list",
|
||||
"normals_packed",
|
||||
"normals_padded",
|
||||
]
|
||||
public_normals_getters = ["normals_list", "normals_packed", "normals_padded"]
|
||||
public_features_getters = [
|
||||
"features_list",
|
||||
"features_packed",
|
||||
@@ -147,17 +131,13 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
points_data = [torch.zeros((max_len, 3)).uniform_() for i in lengths]
|
||||
normals_data = [torch.zeros((max_len, 3)).uniform_() for i in lengths]
|
||||
features_data = [torch.zeros((max_len, C)).uniform_() for i in lengths]
|
||||
for length, p, n, f in zip(
|
||||
lengths, points_data, normals_data, features_data
|
||||
):
|
||||
for length, p, n, f in zip(lengths, points_data, normals_data, features_data):
|
||||
p[length:] = 0.0
|
||||
n[length:] = 0.0
|
||||
f[length:] = 0.0
|
||||
points_list = [d[:length] for length, d in zip(lengths, points_data)]
|
||||
normals_list = [d[:length] for length, d in zip(lengths, normals_data)]
|
||||
features_list = [
|
||||
d[:length] for length, d in zip(lengths, features_data)
|
||||
]
|
||||
features_list = [d[:length] for length, d in zip(lengths, features_data)]
|
||||
points_packed = torch.cat(points_data)
|
||||
normals_packed = torch.cat(normals_data)
|
||||
features_packed = torch.cat(features_data)
|
||||
@@ -173,13 +153,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
("emptylist_emptylist_emptylist", [], [], []),
|
||||
]
|
||||
false_cases_inputs = [
|
||||
(
|
||||
"list_packed",
|
||||
points_list,
|
||||
normals_packed,
|
||||
features_packed,
|
||||
ValueError,
|
||||
),
|
||||
("list_packed", points_list, normals_packed, features_packed, ValueError),
|
||||
("packed_0", points_packed, None, None, ValueError),
|
||||
]
|
||||
|
||||
@@ -230,15 +204,11 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
self.assertIsNone(features_padded)
|
||||
for n in range(N):
|
||||
p = points_list[n].shape[0]
|
||||
self.assertClose(
|
||||
points_padded[n, :p, :], points_list[n]
|
||||
)
|
||||
self.assertClose(points_padded[n, :p, :], points_list[n])
|
||||
if with_normals:
|
||||
norms = normals_list[n].shape[0]
|
||||
self.assertEqual(p, norms)
|
||||
self.assertClose(
|
||||
normals_padded[n, :p, :], normals_list[n]
|
||||
)
|
||||
self.assertClose(normals_padded[n, :p, :], normals_list[n])
|
||||
if with_features:
|
||||
f = features_list[n].shape[0]
|
||||
self.assertEqual(p, f)
|
||||
@@ -248,9 +218,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
if points_padded.shape[1] > p:
|
||||
self.assertTrue(points_padded[n, p:, :].eq(0).all())
|
||||
if with_features:
|
||||
self.assertTrue(
|
||||
features_padded[n, p:, :].eq(0).all()
|
||||
)
|
||||
self.assertTrue(features_padded[n, p:, :].eq(0).all())
|
||||
self.assertEqual(points_per_cloud[n], p)
|
||||
|
||||
# Check compute packed.
|
||||
@@ -272,17 +240,13 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
)
|
||||
if with_normals:
|
||||
self.assertClose(
|
||||
normals_packed[cur : cur + p, :],
|
||||
normals_list[n],
|
||||
normals_packed[cur : cur + p, :], normals_list[n]
|
||||
)
|
||||
if with_features:
|
||||
self.assertClose(
|
||||
features_packed[cur : cur + p, :],
|
||||
features_list[n],
|
||||
features_packed[cur : cur + p, :], features_list[n]
|
||||
)
|
||||
self.assertTrue(
|
||||
packed_to_cloud[cur : cur + p].eq(n).all()
|
||||
)
|
||||
self.assertTrue(packed_to_cloud[cur : cur + p].eq(n).all())
|
||||
self.assertTrue(cloud_to_packed[n] == cur)
|
||||
cur += p
|
||||
|
||||
@@ -312,9 +276,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
)[0]
|
||||
points = torch.rand((p, 3), dtype=torch.float32, device=device)
|
||||
normals = torch.rand((p, 3), dtype=torch.float32, device=device)
|
||||
features = torch.rand(
|
||||
(p, C), dtype=torch.float32, device=device
|
||||
)
|
||||
features = torch.rand((p, C), dtype=torch.float32, device=device)
|
||||
else:
|
||||
points = torch.tensor([], dtype=torch.float32, device=device)
|
||||
normals = torch.tensor([], dtype=torch.float32, device=device)
|
||||
@@ -331,9 +293,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
if with_features:
|
||||
this_features = features_list
|
||||
clouds = Pointclouds(
|
||||
points=points_list,
|
||||
normals=this_normals,
|
||||
features=this_features,
|
||||
points=points_list, normals=this_normals, features=this_features
|
||||
)
|
||||
points_padded = clouds.points_padded()
|
||||
normals_padded = clouds.normals_padded()
|
||||
@@ -346,13 +306,9 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
for n in range(N):
|
||||
p = len(points_list[n])
|
||||
if p > 0:
|
||||
self.assertClose(
|
||||
points_padded[n, :p, :], points_list[n]
|
||||
)
|
||||
self.assertClose(points_padded[n, :p, :], points_list[n])
|
||||
if with_normals:
|
||||
self.assertClose(
|
||||
normals_padded[n, :p, :], normals_list[n]
|
||||
)
|
||||
self.assertClose(normals_padded[n, :p, :], normals_list[n])
|
||||
if with_features:
|
||||
self.assertClose(
|
||||
features_padded[n, :p, :], features_list[n]
|
||||
@@ -360,13 +316,9 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
if points_padded.shape[1] > p:
|
||||
self.assertTrue(points_padded[n, p:, :].eq(0).all())
|
||||
if with_normals:
|
||||
self.assertTrue(
|
||||
normals_padded[n, p:, :].eq(0).all()
|
||||
)
|
||||
self.assertTrue(normals_padded[n, p:, :].eq(0).all())
|
||||
if with_features:
|
||||
self.assertTrue(
|
||||
features_padded[n, p:, :].eq(0).all()
|
||||
)
|
||||
self.assertTrue(features_padded[n, p:, :].eq(0).all())
|
||||
self.assertTrue(points_per_cloud[n] == p)
|
||||
|
||||
def test_clone_list(self):
|
||||
@@ -379,12 +331,8 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
new_clouds = clouds.clone()
|
||||
|
||||
# Check cloned and original objects do not share tensors.
|
||||
self.assertSeparate(
|
||||
new_clouds.points_list()[0], clouds.points_list()[0]
|
||||
)
|
||||
self.assertSeparate(
|
||||
new_clouds.normals_list()[0], clouds.normals_list()[0]
|
||||
)
|
||||
self.assertSeparate(new_clouds.points_list()[0], clouds.points_list()[0])
|
||||
self.assertSeparate(new_clouds.normals_list()[0], clouds.normals_list()[0])
|
||||
self.assertSeparate(
|
||||
new_clouds.features_list()[0], clouds.features_list()[0]
|
||||
)
|
||||
@@ -412,12 +360,8 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
new_clouds = clouds.clone()
|
||||
|
||||
# Check cloned and original objects do not share tensors.
|
||||
self.assertSeparate(
|
||||
new_clouds.points_list()[0], clouds.points_list()[0]
|
||||
)
|
||||
self.assertSeparate(
|
||||
new_clouds.normals_list()[0], clouds.normals_list()[0]
|
||||
)
|
||||
self.assertSeparate(new_clouds.points_list()[0], clouds.points_list()[0])
|
||||
self.assertSeparate(new_clouds.normals_list()[0], clouds.normals_list()[0])
|
||||
self.assertSeparate(
|
||||
new_clouds.features_list()[0], clouds.features_list()[0]
|
||||
)
|
||||
@@ -442,9 +386,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
for i in range(N):
|
||||
self.assertClose(cloud1.points_list()[i], cloud2.points_list()[i])
|
||||
self.assertClose(cloud1.normals_list()[i], cloud2.normals_list()[i])
|
||||
self.assertClose(
|
||||
cloud1.features_list()[i], cloud2.features_list()[i]
|
||||
)
|
||||
self.assertClose(cloud1.features_list()[i], cloud2.features_list()[i])
|
||||
has_normals = cloud1.normals_list() is not None
|
||||
self.assertTrue(has_normals == (cloud2.normals_list() is not None))
|
||||
has_features = cloud1.features_list() is not None
|
||||
@@ -459,22 +401,13 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
if has_features:
|
||||
self.assertClose(cloud1.features_padded(), cloud2.features_padded())
|
||||
self.assertClose(cloud1.features_packed(), cloud2.features_packed())
|
||||
self.assertClose(cloud1.packed_to_cloud_idx(), cloud2.packed_to_cloud_idx())
|
||||
self.assertClose(
|
||||
cloud1.packed_to_cloud_idx(), cloud2.packed_to_cloud_idx()
|
||||
)
|
||||
self.assertClose(
|
||||
cloud1.cloud_to_packed_first_idx(),
|
||||
cloud2.cloud_to_packed_first_idx(),
|
||||
)
|
||||
self.assertClose(
|
||||
cloud1.num_points_per_cloud(), cloud2.num_points_per_cloud()
|
||||
)
|
||||
self.assertClose(
|
||||
cloud1.packed_to_cloud_idx(), cloud2.packed_to_cloud_idx()
|
||||
)
|
||||
self.assertClose(
|
||||
cloud1.padded_to_packed_idx(), cloud2.padded_to_packed_idx()
|
||||
cloud1.cloud_to_packed_first_idx(), cloud2.cloud_to_packed_first_idx()
|
||||
)
|
||||
self.assertClose(cloud1.num_points_per_cloud(), cloud2.num_points_per_cloud())
|
||||
self.assertClose(cloud1.packed_to_cloud_idx(), cloud2.packed_to_cloud_idx())
|
||||
self.assertClose(cloud1.padded_to_packed_idx(), cloud2.padded_to_packed_idx())
|
||||
self.assertTrue(all(cloud1.valid == cloud2.valid))
|
||||
self.assertTrue(cloud1.equisized == cloud2.equisized)
|
||||
|
||||
@@ -482,9 +415,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
def naive_offset(clouds, offsets_packed):
|
||||
new_points_packed = clouds.points_packed() + offsets_packed
|
||||
new_points_list = list(
|
||||
new_points_packed.split(
|
||||
clouds.num_points_per_cloud().tolist(), 0
|
||||
)
|
||||
new_points_packed.split(clouds.num_points_per_cloud().tolist(), 0)
|
||||
)
|
||||
return Pointclouds(
|
||||
points=new_points_list,
|
||||
@@ -502,9 +433,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
clouds._compute_padded()
|
||||
clouds.padded_to_packed_idx()
|
||||
|
||||
deform = torch.rand(
|
||||
(all_p, 3), dtype=torch.float32, device=clouds.device
|
||||
)
|
||||
deform = torch.rand((all_p, 3), dtype=torch.float32, device=clouds.device)
|
||||
new_clouds_naive = naive_offset(clouds, deform)
|
||||
|
||||
new_clouds = clouds.offset(deform)
|
||||
@@ -521,8 +450,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
clouds.normals_list()[i], new_clouds_naive.normals_list()[i]
|
||||
)
|
||||
self.assertClose(
|
||||
clouds.features_list()[i],
|
||||
new_clouds_naive.features_list()[i],
|
||||
clouds.features_list()[i], new_clouds_naive.features_list()[i]
|
||||
)
|
||||
self.assertCloudsEqual(new_clouds, new_clouds_naive)
|
||||
|
||||
@@ -550,15 +478,13 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
new_clouds = clouds.scale(scales)
|
||||
for i in range(N):
|
||||
self.assertClose(
|
||||
scales[i] * clouds.points_list()[i],
|
||||
new_clouds.points_list()[i],
|
||||
scales[i] * clouds.points_list()[i], new_clouds.points_list()[i]
|
||||
)
|
||||
self.assertClose(
|
||||
clouds.normals_list()[i], new_clouds_naive.normals_list()[i]
|
||||
)
|
||||
self.assertClose(
|
||||
clouds.features_list()[i],
|
||||
new_clouds_naive.features_list()[i],
|
||||
clouds.features_list()[i], new_clouds_naive.features_list()[i]
|
||||
)
|
||||
self.assertCloudsEqual(new_clouds, new_clouds_naive)
|
||||
|
||||
@@ -576,20 +502,15 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
for i in range(len(clouds)):
|
||||
for n in range(N):
|
||||
self.assertClose(
|
||||
clouds.points_list()[i],
|
||||
new_clouds.points_list()[i * N + n],
|
||||
clouds.points_list()[i], new_clouds.points_list()[i * N + n]
|
||||
)
|
||||
self.assertClose(
|
||||
clouds.normals_list()[i],
|
||||
new_clouds.normals_list()[i * N + n],
|
||||
clouds.normals_list()[i], new_clouds.normals_list()[i * N + n]
|
||||
)
|
||||
self.assertClose(
|
||||
clouds.features_list()[i],
|
||||
new_clouds.features_list()[i * N + n],
|
||||
)
|
||||
self.assertTrue(
|
||||
clouds.valid[i] == new_clouds.valid[i * N + n]
|
||||
clouds.features_list()[i], new_clouds.features_list()[i * N + n]
|
||||
)
|
||||
self.assertTrue(clouds.valid[i] == new_clouds.valid[i * N + n])
|
||||
self.assertAllSeparate(
|
||||
clouds.points_list()
|
||||
+ new_clouds.points_list()
|
||||
@@ -627,8 +548,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
"padded_to_packed_idx",
|
||||
]:
|
||||
self.assertClose(
|
||||
getattr(new_cloud, attrib)().cpu(),
|
||||
getattr(cloud, attrib)().cpu(),
|
||||
getattr(new_cloud, attrib)().cpu(), getattr(cloud, attrib)().cpu()
|
||||
)
|
||||
for i in range(len(cloud)):
|
||||
self.assertClose(
|
||||
@@ -638,8 +558,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
cloud.normals_list()[i].cpu(), new_cloud.normals_list()[i].cpu()
|
||||
)
|
||||
self.assertClose(
|
||||
cloud.features_list()[i].cpu(),
|
||||
new_cloud.features_list()[i].cpu(),
|
||||
cloud.features_list()[i].cpu(), new_cloud.features_list()[i].cpu()
|
||||
)
|
||||
self.assertTrue(all(cloud.valid.cpu() == new_cloud.valid.cpu()))
|
||||
self.assertTrue(cloud.equisized == new_cloud.equisized)
|
||||
@@ -666,8 +585,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
"padded_to_packed_idx",
|
||||
]:
|
||||
self.assertClose(
|
||||
getattr(new_cloud, attrib)().cpu(),
|
||||
getattr(cloud, attrib)().cpu(),
|
||||
getattr(new_cloud, attrib)().cpu(), getattr(cloud, attrib)().cpu()
|
||||
)
|
||||
for i in range(len(cloud)):
|
||||
self.assertClose(
|
||||
@@ -677,8 +595,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
cloud.normals_list()[i].cpu(), new_cloud.normals_list()[i].cpu()
|
||||
)
|
||||
self.assertClose(
|
||||
cloud.features_list()[i].cpu(),
|
||||
new_cloud.features_list()[i].cpu(),
|
||||
cloud.features_list()[i].cpu(), new_cloud.features_list()[i].cpu()
|
||||
)
|
||||
self.assertTrue(all(cloud.valid.cpu() == new_cloud.valid.cpu()))
|
||||
self.assertTrue(cloud.equisized == new_cloud.equisized)
|
||||
@@ -698,11 +615,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
self.assertEqual(len(split_clouds[1]), 3)
|
||||
self.assertTrue(
|
||||
split_clouds[1].points_list()
|
||||
== [
|
||||
clouds.get_cloud(2)[0],
|
||||
clouds.get_cloud(3)[0],
|
||||
clouds.get_cloud(4)[0],
|
||||
]
|
||||
== [clouds.get_cloud(2)[0], clouds.get_cloud(3)[0], clouds.get_cloud(4)[0]]
|
||||
)
|
||||
|
||||
split_sizes = [2, 0.3]
|
||||
@@ -751,9 +664,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
points_padded = clouds.points_padded()
|
||||
points_padded_flat = points_padded.view(-1, 3)
|
||||
|
||||
self.assertClose(
|
||||
points_padded_flat[padded_to_packed_idx], points_packed
|
||||
)
|
||||
self.assertClose(points_padded_flat[padded_to_packed_idx], points_packed)
|
||||
|
||||
idx = padded_to_packed_idx.view(-1, 1).expand(-1, 3)
|
||||
self.assertClose(points_padded_flat.gather(0, idx), points_packed)
|
||||
@@ -765,16 +676,13 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
def check_equal(selected, indices):
|
||||
for selectedIdx, index in indices:
|
||||
self.assertClose(
|
||||
selected.points_list()[selectedIdx],
|
||||
clouds.points_list()[index],
|
||||
selected.points_list()[selectedIdx], clouds.points_list()[index]
|
||||
)
|
||||
self.assertClose(
|
||||
selected.normals_list()[selectedIdx],
|
||||
clouds.normals_list()[index],
|
||||
selected.normals_list()[selectedIdx], clouds.normals_list()[index]
|
||||
)
|
||||
self.assertClose(
|
||||
selected.features_list()[selectedIdx],
|
||||
clouds.features_list()[index],
|
||||
selected.features_list()[selectedIdx], clouds.features_list()[index]
|
||||
)
|
||||
|
||||
# int index
|
||||
@@ -820,11 +728,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
for with_normfeat in (True, False):
|
||||
for with_new_normfeat in (True, False):
|
||||
clouds = self.init_cloud(
|
||||
N,
|
||||
P,
|
||||
C,
|
||||
with_normals=with_normfeat,
|
||||
with_features=with_normfeat,
|
||||
N, P, C, with_normals=with_normfeat, with_features=with_normfeat
|
||||
)
|
||||
|
||||
num_points_per_cloud = clouds.num_points_per_cloud()
|
||||
@@ -843,8 +747,7 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
clouds.points_padded().shape, device=clouds.device
|
||||
)
|
||||
new_normals_list = [
|
||||
new_normals[i, : num_points_per_cloud[i]]
|
||||
for i in range(N)
|
||||
new_normals[i, : num_points_per_cloud[i]] for i in range(N)
|
||||
]
|
||||
feat_shape = [
|
||||
clouds.points_padded().shape[0],
|
||||
@@ -853,14 +756,11 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
]
|
||||
new_features = torch.rand(feat_shape, device=clouds.device)
|
||||
new_features_list = [
|
||||
new_features[i, : num_points_per_cloud[i]]
|
||||
for i in range(N)
|
||||
new_features[i, : num_points_per_cloud[i]] for i in range(N)
|
||||
]
|
||||
|
||||
# update
|
||||
new_clouds = clouds.update_padded(
|
||||
new_points, new_normals, new_features
|
||||
)
|
||||
new_clouds = clouds.update_padded(new_points, new_normals, new_features)
|
||||
self.assertIsNone(new_clouds._points_list)
|
||||
self.assertIsNone(new_clouds._points_packed)
|
||||
|
||||
@@ -868,13 +768,9 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
self.assertTrue(all(new_clouds.valid == clouds.valid))
|
||||
|
||||
self.assertClose(new_clouds.points_padded(), new_points)
|
||||
self.assertClose(
|
||||
new_clouds.points_packed(), torch.cat(new_points_list)
|
||||
)
|
||||
self.assertClose(new_clouds.points_packed(), torch.cat(new_points_list))
|
||||
for i in range(N):
|
||||
self.assertClose(
|
||||
new_clouds.points_list()[i], new_points_list[i]
|
||||
)
|
||||
self.assertClose(new_clouds.points_list()[i], new_points_list[i])
|
||||
|
||||
if with_new_normfeat:
|
||||
for i in range(N):
|
||||
@@ -890,27 +786,22 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
)
|
||||
self.assertClose(new_clouds.features_padded(), new_features)
|
||||
self.assertClose(
|
||||
new_clouds.features_packed(),
|
||||
torch.cat(new_features_list),
|
||||
new_clouds.features_packed(), torch.cat(new_features_list)
|
||||
)
|
||||
else:
|
||||
if with_normfeat:
|
||||
for i in range(N):
|
||||
self.assertClose(
|
||||
new_clouds.normals_list()[i],
|
||||
clouds.normals_list()[i],
|
||||
new_clouds.normals_list()[i], clouds.normals_list()[i]
|
||||
)
|
||||
self.assertClose(
|
||||
new_clouds.features_list()[i],
|
||||
clouds.features_list()[i],
|
||||
new_clouds.features_list()[i], clouds.features_list()[i]
|
||||
)
|
||||
self.assertNotSeparate(
|
||||
new_clouds.normals_list()[i],
|
||||
clouds.normals_list()[i],
|
||||
new_clouds.normals_list()[i], clouds.normals_list()[i]
|
||||
)
|
||||
self.assertNotSeparate(
|
||||
new_clouds.features_list()[i],
|
||||
clouds.features_list()[i],
|
||||
new_clouds.features_list()[i], clouds.features_list()[i]
|
||||
)
|
||||
|
||||
self.assertClose(
|
||||
@@ -920,19 +811,16 @@ class TestPointclouds(TestCaseMixin, unittest.TestCase):
|
||||
new_clouds.normals_packed(), clouds.normals_packed()
|
||||
)
|
||||
self.assertClose(
|
||||
new_clouds.features_padded(),
|
||||
clouds.features_padded(),
|
||||
new_clouds.features_padded(), clouds.features_padded()
|
||||
)
|
||||
self.assertClose(
|
||||
new_clouds.features_packed(),
|
||||
clouds.features_packed(),
|
||||
new_clouds.features_packed(), clouds.features_packed()
|
||||
)
|
||||
self.assertNotSeparate(
|
||||
new_clouds.normals_padded(), clouds.normals_padded()
|
||||
)
|
||||
self.assertNotSeparate(
|
||||
new_clouds.features_padded(),
|
||||
clouds.features_padded(),
|
||||
new_clouds.features_padded(), clouds.features_padded()
|
||||
)
|
||||
else:
|
||||
self.assertIsNone(new_clouds.normals_list())
|
||||
|
||||
Reference in New Issue
Block a user