Address black + isort fbsource linter warnings

Summary: Address black + isort fbsource linter warnings from D20558374 (previous diff)

Reviewed By: nikhilaravi

Differential Revision: D20558373

fbshipit-source-id: d3607de4a01fb24c0d5269634563a7914bddf1c8
This commit is contained in:
Patrick Labatut
2020-03-29 14:46:33 -07:00
committed by Facebook GitHub Bot
parent eb512ffde3
commit d57daa6f85
110 changed files with 705 additions and 1850 deletions

View File

@@ -3,11 +3,7 @@
from .meshes import Meshes, join_meshes
from .pointclouds import Pointclouds
from .textures import Textures
from .utils import (
list_to_packed,
list_to_padded,
packed_to_list,
padded_to_list,
)
from .utils import list_to_packed, list_to_padded, packed_to_list, padded_to_list
__all__ = [k for k in globals().keys() if not k.startswith("_")]

View File

@@ -1,6 +1,7 @@
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
from typing import List
import torch
from . import utils as struct_utils
@@ -314,14 +315,11 @@ class Meshes(object):
if isinstance(verts, list) and isinstance(faces, list):
self._verts_list = verts
self._faces_list = [
f[f.gt(-1).all(1)].to(torch.int64) if len(f) > 0 else f
for f in faces
f[f.gt(-1).all(1)].to(torch.int64) if len(f) > 0 else f for f in faces
]
self._N = len(self._verts_list)
self.device = torch.device("cpu")
self.valid = torch.zeros(
(self._N,), dtype=torch.bool, device=self.device
)
self.valid = torch.zeros((self._N,), dtype=torch.bool, device=self.device)
if self._N > 0:
self.device = self._verts_list[0].device
self._num_verts_per_mesh = torch.tensor(
@@ -348,18 +346,14 @@ class Meshes(object):
elif torch.is_tensor(verts) and torch.is_tensor(faces):
if verts.size(2) != 3 and faces.size(2) != 3:
raise ValueError(
"Verts and Faces tensors have incorrect dimensions."
)
raise ValueError("Verts and Faces tensors have incorrect dimensions.")
self._verts_padded = verts
self._faces_padded = faces.to(torch.int64)
self._N = self._verts_padded.shape[0]
self._V = self._verts_padded.shape[1]
self.device = self._verts_padded.device
self.valid = torch.zeros(
(self._N,), dtype=torch.bool, device=self.device
)
self.valid = torch.zeros((self._N,), dtype=torch.bool, device=self.device)
if self._N > 0:
# Check that padded faces - which have value -1 - are at the
# end of the tensors
@@ -400,12 +394,8 @@ class Meshes(object):
# Set the num verts/faces on the textures if present.
if self.textures is not None:
self.textures._num_faces_per_mesh = (
self._num_faces_per_mesh.tolist()
)
self.textures._num_verts_per_mesh = (
self._num_verts_per_mesh.tolist()
)
self.textures._num_faces_per_mesh = self._num_faces_per_mesh.tolist()
self.textures._num_verts_per_mesh = self._num_verts_per_mesh.tolist()
def __len__(self):
return self._N
@@ -665,8 +655,7 @@ class Meshes(object):
self._verts_padded_to_packed_idx = torch.cat(
[
torch.arange(v, dtype=torch.int64, device=self.device)
+ i * self._V
torch.arange(v, dtype=torch.int64, device=self.device) + i * self._V
for (i, v) in enumerate(self._num_verts_per_mesh)
],
dim=0,
@@ -706,15 +695,10 @@ class Meshes(object):
tensor of normals of shape (N, max(V_n), 3).
"""
if self.isempty():
return torch.zeros(
(self._N, 0, 3), dtype=torch.float32, device=self.device
)
return torch.zeros((self._N, 0, 3), dtype=torch.float32, device=self.device)
verts_normals_list = self.verts_normals_list()
return struct_utils.list_to_padded(
verts_normals_list,
(self._V, 3),
pad_value=0.0,
equisized=self.equisized,
verts_normals_list, (self._V, 3), pad_value=0.0, equisized=self.equisized
)
def faces_normals_packed(self):
@@ -750,15 +734,10 @@ class Meshes(object):
tensor of normals of shape (N, max(F_n), 3).
"""
if self.isempty():
return torch.zeros(
(self._N, 0, 3), dtype=torch.float32, device=self.device
)
return torch.zeros((self._N, 0, 3), dtype=torch.float32, device=self.device)
faces_normals_list = self.faces_normals_list()
return struct_utils.list_to_padded(
faces_normals_list,
(self._F, 3),
pad_value=0.0,
equisized=self.equisized,
faces_normals_list, (self._F, 3), pad_value=0.0, equisized=self.equisized
)
def faces_areas_packed(self):
@@ -797,9 +776,7 @@ class Meshes(object):
return
faces_packed = self.faces_packed()
verts_packed = self.verts_packed()
face_areas, face_normals = mesh_face_areas_normals(
verts_packed, faces_packed
)
face_areas, face_normals = mesh_face_areas_normals(verts_packed, faces_packed)
self._faces_areas_packed = face_areas
self._faces_normals_packed = face_normals
@@ -813,9 +790,7 @@ class Meshes(object):
refresh: Set to True to force recomputation of vertex normals.
Default: False.
"""
if not (
refresh or any(v is None for v in [self._verts_normals_packed])
):
if not (refresh or any(v is None for v in [self._verts_normals_packed])):
return
if self.isempty():
@@ -867,8 +842,7 @@ class Meshes(object):
Computes the padded version of meshes from verts_list and faces_list.
"""
if not (
refresh
or any(v is None for v in [self._verts_padded, self._faces_padded])
refresh or any(v is None for v in [self._verts_padded, self._faces_padded])
):
return
@@ -887,16 +861,10 @@ class Meshes(object):
)
else:
self._faces_padded = struct_utils.list_to_padded(
faces_list,
(self._F, 3),
pad_value=-1.0,
equisized=self.equisized,
faces_list, (self._F, 3), pad_value=-1.0, equisized=self.equisized
)
self._verts_padded = struct_utils.list_to_padded(
verts_list,
(self._V, 3),
pad_value=0.0,
equisized=self.equisized,
verts_list, (self._V, 3), pad_value=0.0, equisized=self.equisized
)
# TODO(nikhilar) Improve performance of _compute_packed.
@@ -1055,9 +1023,7 @@ class Meshes(object):
face_to_edge = inverse_idxs[face_to_edge]
self._faces_packed_to_edges_packed = face_to_edge
num_edges_per_mesh = torch.zeros(
self._N, dtype=torch.int32, device=self.device
)
num_edges_per_mesh = torch.zeros(self._N, dtype=torch.int32, device=self.device)
ones = torch.ones(1, dtype=torch.int32, device=self.device).expand(
self._edges_packed_to_mesh_idx.shape
)

View File

@@ -176,17 +176,13 @@ class Pointclouds(object):
self._points_list = points
self._N = len(self._points_list)
self.device = torch.device("cpu")
self.valid = torch.zeros(
(self._N,), dtype=torch.bool, device=self.device
)
self.valid = torch.zeros((self._N,), dtype=torch.bool, device=self.device)
self._num_points_per_cloud = []
if self._N > 0:
for p in self._points_list:
if len(p) > 0 and (p.dim() != 2 or p.shape[1] != 3):
raise ValueError(
"Clouds in list must be of shape Px3 or empty"
)
raise ValueError("Clouds in list must be of shape Px3 or empty")
self.device = self._points_list[0].device
num_points_per_cloud = torch.tensor(
@@ -210,9 +206,7 @@ class Pointclouds(object):
self._N = self._points_padded.shape[0]
self._P = self._points_padded.shape[1]
self.device = self._points_padded.device
self.valid = torch.ones(
(self._N,), dtype=torch.bool, device=self.device
)
self.valid = torch.ones((self._N,), dtype=torch.bool, device=self.device)
self._num_points_per_cloud = torch.tensor(
[self._P] * self._N, device=self.device
)
@@ -260,9 +254,7 @@ class Pointclouds(object):
if isinstance(aux_input, list):
if len(aux_input) != self._N:
raise ValueError(
"Points and auxiliary input must be the same length."
)
raise ValueError("Points and auxiliary input must be the same length.")
for p, d in zip(self._num_points_per_cloud, aux_input):
if p != d.shape[0]:
raise ValueError(
@@ -282,9 +274,7 @@ class Pointclouds(object):
return aux_input, None, aux_input_C
elif torch.is_tensor(aux_input):
if aux_input.dim() != 3:
raise ValueError(
"Auxiliary input tensor has incorrect dimensions."
)
raise ValueError("Auxiliary input tensor has incorrect dimensions.")
if self._N != aux_input.shape[0]:
raise ValueError("Points and inputs must be the same length.")
if self._P != aux_input.shape[1]:
@@ -531,8 +521,7 @@ class Pointclouds(object):
else:
self._padded_to_packed_idx = torch.cat(
[
torch.arange(v, dtype=torch.int64, device=self.device)
+ i * self._P
torch.arange(v, dtype=torch.int64, device=self.device) + i * self._P
for (i, v) in enumerate(self._num_points_per_cloud)
],
dim=0,
@@ -551,9 +540,7 @@ class Pointclouds(object):
self._normals_padded, self._features_padded = None, None
if self.isempty():
self._points_padded = torch.zeros(
(self._N, 0, 3), device=self.device
)
self._points_padded = torch.zeros((self._N, 0, 3), device=self.device)
else:
self._points_padded = struct_utils.list_to_padded(
self.points_list(),
@@ -621,9 +608,7 @@ class Pointclouds(object):
points_list_to_packed = struct_utils.list_to_packed(points_list)
self._points_packed = points_list_to_packed[0]
if not torch.allclose(
self._num_points_per_cloud, points_list_to_packed[1]
):
if not torch.allclose(self._num_points_per_cloud, points_list_to_packed[1]):
raise ValueError("Inconsistent list to packed conversion")
self._cloud_to_packed_first_idx = points_list_to_packed[2]
self._packed_to_cloud_idx = points_list_to_packed[3]
@@ -696,13 +681,9 @@ class Pointclouds(object):
if other._N > 0:
other._points_list = [v.to(device) for v in other.points_list()]
if other._normals_list is not None:
other._normals_list = [
n.to(device) for n in other.normals_list()
]
other._normals_list = [n.to(device) for n in other.normals_list()]
if other._features_list is not None:
other._features_list = [
f.to(device) for f in other.features_list()
]
other._features_list = [f.to(device) for f in other.features_list()]
for k in self._INTERNAL_TENSORS:
v = getattr(self, k)
if torch.is_tensor(v):
@@ -892,16 +873,11 @@ class Pointclouds(object):
for features in self.features_list():
new_features_list.extend(features.clone() for _ in range(N))
return Pointclouds(
points=new_points_list,
normals=new_normals_list,
features=new_features_list,
points=new_points_list, normals=new_normals_list, features=new_features_list
)
def update_padded(
self,
new_points_padded,
new_normals_padded=None,
new_features_padded=None,
self, new_points_padded, new_normals_padded=None, new_features_padded=None
):
"""
Returns a Pointcloud structure with updated padded tensors and copies of
@@ -920,13 +896,9 @@ class Pointclouds(object):
def check_shapes(x, size):
if x.shape[0] != size[0]:
raise ValueError(
"new values must have the same batch dimension."
)
raise ValueError("new values must have the same batch dimension.")
if x.shape[1] != size[1]:
raise ValueError(
"new values must have the same number of points."
)
raise ValueError("new values must have the same number of points.")
if size[2] is not None:
if x.shape[2] != size[2]:
raise ValueError(

View File

@@ -1,6 +1,7 @@
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
from typing import List, Optional, Union
import torch
import torchvision.transforms as T
@@ -233,11 +234,7 @@ class Textures(object):
if all(
v is not None
for v in [
self._faces_uvs_padded,
self._verts_uvs_padded,
self._maps_padded,
]
for v in [self._faces_uvs_padded, self._verts_uvs_padded, self._maps_padded]
):
new_verts_uvs = _extend_tensor(self._verts_uvs_padded, N)
new_faces_uvs = _extend_tensor(self._faces_uvs_padded, N)

View File

@@ -1,6 +1,7 @@
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
from typing import List, Union
import torch
@@ -38,9 +39,7 @@ def list_to_padded(
pad_dim1 = max(y.shape[1] for y in x if len(y) > 0)
else:
if len(pad_size) != 2:
raise ValueError(
"Pad size must contain target size for 1st and 2nd dim"
)
raise ValueError("Pad size must contain target size for 1st and 2nd dim")
pad_dim0, pad_dim1 = pad_size
N = len(x)
@@ -55,9 +54,7 @@ def list_to_padded(
return x_padded
def padded_to_list(
x: torch.Tensor, split_size: Union[list, tuple, None] = None
):
def padded_to_list(x: torch.Tensor, split_size: Union[list, tuple, None] = None):
r"""
Transforms a padded tensor of shape (N, M, K) into a list of N tensors
of shape (Mi, Ki) where (Mi, Ki) is specified in split_size(i), or of shape
@@ -81,9 +78,7 @@ def padded_to_list(
N = len(split_size)
if x.shape[0] != N:
raise ValueError(
"Split size must be of same length as inputs first dimension"
)
raise ValueError("Split size must be of same length as inputs first dimension")
for i in range(N):
if isinstance(split_size[i], int):
@@ -119,9 +114,7 @@ def list_to_packed(x: List[torch.Tensor]):
"""
N = len(x)
num_items = torch.zeros(N, dtype=torch.int64, device=x[0].device)
item_packed_first_idx = torch.zeros(
N, dtype=torch.int64, device=x[0].device
)
item_packed_first_idx = torch.zeros(N, dtype=torch.int64, device=x[0].device)
item_packed_to_list_idx = []
cur = 0
for i, y in enumerate(x):
@@ -187,9 +180,7 @@ def padded_to_packed(
N, M, D = x.shape
if split_size is not None and pad_value is not None:
raise ValueError(
"Only one of split_size or pad_value should be provided."
)
raise ValueError("Only one of split_size or pad_value should be provided.")
x_packed = x.reshape(-1, D) # flatten padded
@@ -205,9 +196,7 @@ def padded_to_packed(
# Convert to packed using split sizes
N = len(split_size)
if x.shape[0] != N:
raise ValueError(
"Split size must be of same length as inputs first dimension"
)
raise ValueError("Split size must be of same length as inputs first dimension")
if not all(isinstance(i, int) for i in split_size):
raise ValueError(