suppress errors in vision/fair/pytorch3d

Reviewed By: kjchalup

Differential Revision: D39198333

fbshipit-source-id: 3f4ebcf625215f21d165073837578ff69b05f72d
This commit is contained in:
Pyre Bot Jr 2022-09-01 11:46:55 -07:00 committed by Facebook GitHub Bot
parent d19e6243d0
commit c80e5fd07a
9 changed files with 41 additions and 4 deletions

View File

@ -207,6 +207,7 @@ class Experiment(Configurable): # pyre-ignore: 13
val_loader, val_loader,
) = accelerator.prepare(model, optimizer, train_loader, val_loader) ) = accelerator.prepare(model, optimizer, train_loader, val_loader)
# pyre-fixme[16]: Optional type has no attribute `is_multisequence`.
if not self.training_loop.evaluator.is_multisequence: if not self.training_loop.evaluator.is_multisequence:
all_train_cameras = self.data_source.all_train_cameras all_train_cameras = self.data_source.all_train_cameras
else: else:

View File

@ -29,6 +29,7 @@ from .utils import seed_all_random_engines
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# pyre-fixme[13]: Attribute `evaluator` is never initialized.
class TrainingLoopBase(ReplaceableBase): class TrainingLoopBase(ReplaceableBase):
""" """
Members: Members:
@ -62,7 +63,7 @@ class TrainingLoopBase(ReplaceableBase):
@registry.register @registry.register
class ImplicitronTrainingLoop(TrainingLoopBase): # pyre-ignore [13] class ImplicitronTrainingLoop(TrainingLoopBase):
""" """
Members: Members:
eval_only: If True, only run evaluation using the test dataloader. eval_only: If True, only run evaluation using the test dataloader.
@ -137,6 +138,7 @@ class ImplicitronTrainingLoop(TrainingLoopBase): # pyre-ignore [13]
# only run evaluation on the test dataloader # only run evaluation on the test dataloader
if self.eval_only: if self.eval_only:
if test_loader is not None: if test_loader is not None:
# pyre-fixme[16]: `Optional` has no attribute `run`.
self.evaluator.run( self.evaluator.run(
all_train_cameras=all_train_cameras, all_train_cameras=all_train_cameras,
dataloader=test_loader, dataloader=test_loader,

View File

@ -158,9 +158,14 @@ class MLPWithInputSkips(Configurable, torch.nn.Module):
# if the skip tensor is None, we use `x` instead. # if the skip tensor is None, we use `x` instead.
z = x z = x
skipi = 0 skipi = 0
# pyre-fixme[6]: For 1st param expected `Iterable[Variable[_T]]` but got
# `Union[Tensor, Module]`.
for li, layer in enumerate(self.mlp): for li, layer in enumerate(self.mlp):
# pyre-fixme[58]: `in` is not supported for right operand type
# `Union[torch._tensor.Tensor, torch.nn.modules.module.Module]`.
if li in self._input_skips: if li in self._input_skips:
if self._skip_affine_trans: if self._skip_affine_trans:
# pyre-fixme[29]: `Union[BoundMethod[typing.Callable(torch._C._Te...
y = self._apply_affine_layer(self.skip_affines[skipi], y, z) y = self._apply_affine_layer(self.skip_affines[skipi], y, z)
else: else:
y = torch.cat((y, z), dim=-1) y = torch.cat((y, z), dim=-1)
@ -170,6 +175,7 @@ class MLPWithInputSkips(Configurable, torch.nn.Module):
@registry.register @registry.register
# pyre-fixme[13]: Attribute `network` is never initialized.
class MLPDecoder(DecoderFunctionBase): class MLPDecoder(DecoderFunctionBase):
""" """
Decoding function which uses `MLPWithIputSkips` to convert the embedding to output. Decoding function which uses `MLPWithIputSkips` to convert the embedding to output.

View File

@ -89,7 +89,6 @@ class VoxelGridBase(ReplaceableBase, torch.nn.Module):
torch.Tensor: shape (n_grids, n_points, n_features) torch.Tensor: shape (n_grids, n_points, n_features)
""" """
points_local = locator.world_to_local_coords(points) points_local = locator.world_to_local_coords(points)
# pyre-ignore[29]
return self.evaluate_local(points_local, grid_values) return self.evaluate_local(points_local, grid_values)
def evaluate_local( def evaluate_local(
@ -139,6 +138,8 @@ class FullResolutionVoxelGrid(VoxelGridBase):
# the type of grid_values argument needed to run evaluate_local() # the type of grid_values argument needed to run evaluate_local()
values_type: ClassVar[Type[VoxelGridValuesBase]] = FullResolutionVoxelGridValues values_type: ClassVar[Type[VoxelGridValuesBase]] = FullResolutionVoxelGridValues
# pyre-fixme[14]: `evaluate_local` overrides method defined in `VoxelGridBase`
# inconsistently.
def evaluate_local( def evaluate_local(
self, points: torch.Tensor, grid_values: FullResolutionVoxelGridValues self, points: torch.Tensor, grid_values: FullResolutionVoxelGridValues
) -> torch.Tensor: ) -> torch.Tensor:
@ -213,6 +214,8 @@ class CPFactorizedVoxelGrid(VoxelGridBase):
n_components: int = 24 n_components: int = 24
matrix_reduction: bool = True matrix_reduction: bool = True
# pyre-fixme[14]: `evaluate_local` overrides method defined in `VoxelGridBase`
# inconsistently.
def evaluate_local( def evaluate_local(
self, points: torch.Tensor, grid_values: CPFactorizedVoxelGridValues self, points: torch.Tensor, grid_values: CPFactorizedVoxelGridValues
) -> torch.Tensor: ) -> torch.Tensor:
@ -318,6 +321,8 @@ class VMFactorizedVoxelGrid(VoxelGridBase):
distribution_of_components: Optional[Tuple[int, int, int]] = None distribution_of_components: Optional[Tuple[int, int, int]] = None
matrix_reduction: bool = True matrix_reduction: bool = True
# pyre-fixme[14]: `evaluate_local` overrides method defined in `VoxelGridBase`
# inconsistently.
def evaluate_local( def evaluate_local(
self, points: torch.Tensor, grid_values: VMFactorizedVoxelGridValues self, points: torch.Tensor, grid_values: VMFactorizedVoxelGridValues
) -> torch.Tensor: ) -> torch.Tensor:
@ -392,9 +397,11 @@ class VMFactorizedVoxelGrid(VoxelGridBase):
if self.distribution_of_components is None and self.n_components % 3 != 0: if self.distribution_of_components is None and self.n_components % 3 != 0:
raise ValueError("n_components must be divisible by 3") raise ValueError("n_components must be divisible by 3")
if self.distribution_of_components is None: if self.distribution_of_components is None:
# pyre-ignore[58]
calculated_distribution_of_components = [ calculated_distribution_of_components = [
self.n_components // 3 for _ in range(3) # pyre-fixme[58]: `//` is not supported for operand types
# `Optional[int]` and `int`.
self.n_components // 3
for _ in range(3)
] ]
else: else:
calculated_distribution_of_components = self.distribution_of_components calculated_distribution_of_components = self.distribution_of_components
@ -437,6 +444,7 @@ class VMFactorizedVoxelGrid(VoxelGridBase):
return shape_dict return shape_dict
# pyre-fixme[13]: Attribute `voxel_grid` is never initialized.
class VoxelGridModule(Configurable, torch.nn.Module): class VoxelGridModule(Configurable, torch.nn.Module):
""" """
A wrapper torch.nn.Module for the VoxelGrid classes, which A wrapper torch.nn.Module for the VoxelGrid classes, which
@ -459,6 +467,7 @@ class VoxelGridModule(Configurable, torch.nn.Module):
voxel_grid_class_type: str = "FullResolutionVoxelGrid" voxel_grid_class_type: str = "FullResolutionVoxelGrid"
voxel_grid: VoxelGridBase voxel_grid: VoxelGridBase
# pyre-fixme[8]: Attribute has type `Tuple[float, float, float]`; used as `float`.
extents: Tuple[float, float, float] = 1.0 extents: Tuple[float, float, float] = 1.0
translation: Tuple[float, float, float] = (0.0, 0.0, 0.0) translation: Tuple[float, float, float] = (0.0, 0.0, 0.0)
@ -505,8 +514,11 @@ class VoxelGridModule(Configurable, torch.nn.Module):
# voxel size and translation. # voxel size and translation.
voxel_size=self.extents, voxel_size=self.extents,
volume_translation=self.translation, volume_translation=self.translation,
# pyre-fixme[29]: `Union[BoundMethod[typing.Callable(torch._C._TensorBase...
device=next(self.params.values()).device, device=next(self.params.values()).device,
) )
# pyre-fixme[29]: `Union[torch._tensor.Tensor,
# torch.nn.modules.module.Module]` is not a function.
grid_values = self.voxel_grid.values_type(**self.params) grid_values = self.voxel_grid.values_type(**self.params)
# voxel grids operate with extra n_grids dimension, which we fix to one # voxel grids operate with extra n_grids dimension, which we fix to one
return self.voxel_grid.evaluate_world(points[None], grid_values, locator)[0] return self.voxel_grid.evaluate_world(points[None], grid_values, locator)[0]

View File

@ -185,11 +185,14 @@ def _remove_outlier_cameras(
keep_indices = utils.get_inlier_indicators( keep_indices = utils.get_inlier_indicators(
cameras.get_camera_center(), dim=0, outlier_rate=outlier_rate cameras.get_camera_center(), dim=0, outlier_rate=outlier_rate
) )
# pyre-fixme[6]: For 1st param expected `Union[List[int], int, BoolTensor,
# LongTensor]` but got `Tensor`.
clean_cameras = cameras[keep_indices] clean_cameras = cameras[keep_indices]
logger.info( logger.info(
"Filtered outlier cameras when estimating the trajectory: " "Filtered outlier cameras when estimating the trajectory: "
f"{len(cameras)}{len(clean_cameras)}" f"{len(cameras)}{len(clean_cameras)}"
) )
# pyre-fixme[7]: Expected `PerspectiveCameras` but got `CamerasBase`.
return clean_cameras return clean_cameras

View File

@ -401,7 +401,9 @@ class CamerasBase(TensorProperties):
kwargs = {} kwargs = {}
tensor_types = { tensor_types = {
# pyre-fixme[16]: Module `cuda` has no attribute `BoolTensor`.
"bool": (torch.BoolTensor, torch.cuda.BoolTensor), "bool": (torch.BoolTensor, torch.cuda.BoolTensor),
# pyre-fixme[16]: Module `cuda` has no attribute `LongTensor`.
"long": (torch.LongTensor, torch.cuda.LongTensor), "long": (torch.LongTensor, torch.cuda.LongTensor),
} }
if not isinstance( if not isinstance(
@ -419,8 +421,14 @@ class CamerasBase(TensorProperties):
index = [index] index = [index]
if isinstance(index, tensor_types["bool"]): if isinstance(index, tensor_types["bool"]):
# pyre-fixme[16]: Item `List` of `Union[List[int], BoolTensor,
# LongTensor]` has no attribute `ndim`.
# pyre-fixme[16]: Item `List` of `Union[List[int], BoolTensor,
# LongTensor]` has no attribute `shape`.
if index.ndim != 1 or index.shape[0] != len(self): if index.ndim != 1 or index.shape[0] != len(self):
raise ValueError( raise ValueError(
# pyre-fixme[16]: Item `List` of `Union[List[int], BoolTensor,
# LongTensor]` has no attribute `shape`.
f"Boolean index of shape {index.shape} does not match cameras" f"Boolean index of shape {index.shape} does not match cameras"
) )
elif max(index) >= len(self): elif max(index) >= len(self):

View File

@ -204,6 +204,7 @@ class MeshRasterizer(nn.Module):
to_ndc_transform = cameras.get_ndc_camera_transform(**kwargs) to_ndc_transform = cameras.get_ndc_camera_transform(**kwargs)
projection_transform = try_get_projection_transform(cameras, kwargs) projection_transform = try_get_projection_transform(cameras, kwargs)
if projection_transform is not None: if projection_transform is not None:
# pyre-fixme[16]: Anonymous callable has no attribute `compose`.
projection_transform = projection_transform.compose(to_ndc_transform) projection_transform = projection_transform.compose(to_ndc_transform)
verts_ndc = projection_transform.transform_points(verts_view, eps=eps) verts_ndc = projection_transform.transform_points(verts_view, eps=eps)
else: else:

View File

@ -107,6 +107,7 @@ class PointsRasterizer(nn.Module):
to_ndc_transform = cameras.get_ndc_camera_transform(**kwargs) to_ndc_transform = cameras.get_ndc_camera_transform(**kwargs)
projection_transform = try_get_projection_transform(cameras, kwargs) projection_transform = try_get_projection_transform(cameras, kwargs)
if projection_transform is not None: if projection_transform is not None:
# pyre-fixme[16]: Anonymous callable has no attribute `compose`.
projection_transform = projection_transform.compose(to_ndc_transform) projection_transform = projection_transform.compose(to_ndc_transform)
pts_ndc = projection_transform.transform_points(pts_view, eps=eps) pts_ndc = projection_transform.transform_points(pts_view, eps=eps)
else: else:

View File

@ -981,6 +981,9 @@ class VolumeLocator:
device = device if device is not None else self.device device = device if device is not None else self.device
other._grid_sizes = self._grid_sizes[index].to(device) other._grid_sizes = self._grid_sizes[index].to(device)
other._local_to_world_transform = self.get_local_to_world_coords_transform()[ other._local_to_world_transform = self.get_local_to_world_coords_transform()[
# pyre-fixme[6]: For 1st param expected `Union[List[int], int, slice,
# BoolTensor, LongTensor]` but got `Union[None, List[int], Tuple[int],
# int, slice, Tensor]`.
index index
].to(device) ].to(device)