apply Black 25.11.0 style in fbcode (70/92)

Summary:
Formats the covered files with pyfmt.

paintitblack

Reviewed By: itamaro

Differential Revision: D90476295

fbshipit-source-id: 5101d4aae980a9f8955a4cb10bae23997c48837f
This commit is contained in:
Bowie Chen
2026-01-12 02:54:36 -08:00
committed by meta-codesync[bot]
parent 6be5e2da06
commit 0c3b204375
74 changed files with 34 additions and 148 deletions

View File

@@ -19,7 +19,6 @@
# #
import os import os
import sys import sys
import unittest.mock as mock import unittest.mock as mock
from recommonmark.parser import CommonMarkParser from recommonmark.parser import CommonMarkParser

View File

@@ -48,22 +48,18 @@ The outputs of the experiment are saved and logged in multiple ways:
import logging import logging
import os import os
import warnings import warnings
from dataclasses import field from dataclasses import field
import hydra import hydra
import torch import torch
from accelerate import Accelerator from accelerate import Accelerator
from omegaconf import DictConfig, OmegaConf from omegaconf import DictConfig, OmegaConf
from packaging import version from packaging import version
from pytorch3d.implicitron.dataset.data_source import ( from pytorch3d.implicitron.dataset.data_source import (
DataSourceBase, DataSourceBase,
ImplicitronDataSource, ImplicitronDataSource,
) )
from pytorch3d.implicitron.models.base_model import ImplicitronModelBase from pytorch3d.implicitron.models.base_model import ImplicitronModelBase
from pytorch3d.implicitron.models.renderer.multipass_ea import ( from pytorch3d.implicitron.models.renderer.multipass_ea import (
MultiPassEmissionAbsorptionRenderer, MultiPassEmissionAbsorptionRenderer,
) )

View File

@@ -11,7 +11,6 @@ import os
from typing import Optional from typing import Optional
import torch.optim import torch.optim
from accelerate import Accelerator from accelerate import Accelerator
from pytorch3d.implicitron.models.base_model import ImplicitronModelBase from pytorch3d.implicitron.models.base_model import ImplicitronModelBase
from pytorch3d.implicitron.tools import model_io from pytorch3d.implicitron.tools import model_io

View File

@@ -14,9 +14,7 @@ from dataclasses import field
from typing import Any, Dict, List, Optional, Tuple from typing import Any, Dict, List, Optional, Tuple
import torch.optim import torch.optim
from accelerate import Accelerator from accelerate import Accelerator
from pytorch3d.implicitron.models.base_model import ImplicitronModelBase from pytorch3d.implicitron.models.base_model import ImplicitronModelBase
from pytorch3d.implicitron.tools import model_io from pytorch3d.implicitron.tools import model_io
from pytorch3d.implicitron.tools.config import ( from pytorch3d.implicitron.tools.config import (

View File

@@ -12,7 +12,6 @@ import unittest
from pathlib import Path from pathlib import Path
import torch import torch
from hydra import compose, initialize_config_dir from hydra import compose, initialize_config_dir
from omegaconf import OmegaConf from omegaconf import OmegaConf
from projects.implicitron_trainer.impl.optimizer_factory import ( from projects.implicitron_trainer.impl.optimizer_factory import (

View File

@@ -21,7 +21,6 @@ from typing import (
) )
import torch import torch
from pytorch3d.implicitron.dataset.frame_data import FrameData from pytorch3d.implicitron.dataset.frame_data import FrameData
from pytorch3d.implicitron.dataset.utils import GenericWorkaround from pytorch3d.implicitron.dataset.utils import GenericWorkaround

View File

@@ -25,7 +25,6 @@ from typing import (
import numpy as np import numpy as np
import torch import torch
from pytorch3d.implicitron.dataset import orm_types, types from pytorch3d.implicitron.dataset import orm_types, types
from pytorch3d.implicitron.dataset.utils import ( from pytorch3d.implicitron.dataset.utils import (
adjust_camera_to_bbox_crop_, adjust_camera_to_bbox_crop_,

View File

@@ -38,7 +38,6 @@ from pytorch3d.implicitron.dataset.utils import is_known_frame_scalar
from pytorch3d.implicitron.tools.config import registry, ReplaceableBase from pytorch3d.implicitron.tools.config import registry, ReplaceableBase
from pytorch3d.renderer.camera_utils import join_cameras_as_batch from pytorch3d.renderer.camera_utils import join_cameras_as_batch
from pytorch3d.renderer.cameras import CamerasBase from pytorch3d.renderer.cameras import CamerasBase
from tqdm import tqdm from tqdm import tqdm
@@ -327,9 +326,9 @@ class JsonIndexDataset(DatasetBase, ReplaceableBase):
assert os.path.normpath( assert os.path.normpath(
# pyre-ignore[16] # pyre-ignore[16]
self.frame_annots[idx]["frame_annotation"].image.path self.frame_annots[idx]["frame_annotation"].image.path
) == os.path.normpath( ) == os.path.normpath(path), (
path f"Inconsistent frame indices {seq_name, frame_no, path}."
), f"Inconsistent frame indices {seq_name, frame_no, path}." )
return idx return idx
dataset_idx = [ dataset_idx = [

View File

@@ -21,7 +21,6 @@ from pytorch3d.renderer.cameras import CamerasBase
from .dataset_map_provider import DatasetMap, DatasetMapProviderBase, PathManagerFactory from .dataset_map_provider import DatasetMap, DatasetMapProviderBase, PathManagerFactory
from .json_index_dataset import JsonIndexDataset from .json_index_dataset import JsonIndexDataset
from .utils import ( from .utils import (
DATASET_TYPE_KNOWN, DATASET_TYPE_KNOWN,
DATASET_TYPE_TEST, DATASET_TYPE_TEST,

View File

@@ -18,7 +18,6 @@ from typing import Dict, List, Optional, Tuple, Type, Union
import numpy as np import numpy as np
from iopath.common.file_io import PathManager from iopath.common.file_io import PathManager
from omegaconf import DictConfig from omegaconf import DictConfig
from pytorch3d.implicitron.dataset.dataset_map_provider import ( from pytorch3d.implicitron.dataset.dataset_map_provider import (
DatasetMap, DatasetMap,
@@ -31,7 +30,6 @@ from pytorch3d.implicitron.tools.config import (
registry, registry,
run_auto_creation, run_auto_creation,
) )
from pytorch3d.renderer.cameras import CamerasBase from pytorch3d.renderer.cameras import CamerasBase
from tqdm import tqdm from tqdm import tqdm

View File

@@ -12,7 +12,6 @@ import torch
from pytorch3d.implicitron.tools.config import registry from pytorch3d.implicitron.tools.config import registry
from .load_llff import load_llff_data from .load_llff import load_llff_data
from .single_sequence_dataset import ( from .single_sequence_dataset import (
_interpret_blender_cameras, _interpret_blender_cameras,
SingleSceneDatasetMapProviderBase, SingleSceneDatasetMapProviderBase,

View File

@@ -8,7 +8,6 @@ import os
import warnings import warnings
import numpy as np import numpy as np
from PIL import Image from PIL import Image

View File

@@ -13,7 +13,6 @@ import struct
from typing import Optional, Tuple from typing import Optional, Tuple
import numpy as np import numpy as np
from pytorch3d.implicitron.dataset.types import ( from pytorch3d.implicitron.dataset.types import (
DepthAnnotation, DepthAnnotation,
ImageAnnotation, ImageAnnotation,
@@ -22,7 +21,6 @@ from pytorch3d.implicitron.dataset.types import (
VideoAnnotation, VideoAnnotation,
ViewpointAnnotation, ViewpointAnnotation,
) )
from sqlalchemy import LargeBinary from sqlalchemy import LargeBinary
from sqlalchemy.orm import ( from sqlalchemy.orm import (
composite, composite,

View File

@@ -10,7 +10,6 @@ import hashlib
import json import json
import logging import logging
import os import os
import urllib import urllib
from dataclasses import dataclass, Field, field from dataclasses import dataclass, Field, field
from typing import ( from typing import (
@@ -32,13 +31,11 @@ import pandas as pd
import sqlalchemy as sa import sqlalchemy as sa
import torch import torch
from pytorch3d.implicitron.dataset.dataset_base import DatasetBase from pytorch3d.implicitron.dataset.dataset_base import DatasetBase
from pytorch3d.implicitron.dataset.frame_data import ( from pytorch3d.implicitron.dataset.frame_data import (
FrameData, FrameData,
FrameDataBuilder, # noqa FrameDataBuilder, # noqa
FrameDataBuilderBase, FrameDataBuilderBase,
) )
from pytorch3d.implicitron.tools.config import ( from pytorch3d.implicitron.tools.config import (
registry, registry,
ReplaceableBase, ReplaceableBase,

View File

@@ -12,9 +12,7 @@ import os
from typing import List, Optional, Tuple, Type from typing import List, Optional, Tuple, Type
import numpy as np import numpy as np
from omegaconf import DictConfig, OmegaConf from omegaconf import DictConfig, OmegaConf
from pytorch3d.implicitron.dataset.dataset_map_provider import ( from pytorch3d.implicitron.dataset.dataset_map_provider import (
DatasetMap, DatasetMap,
DatasetMapProviderBase, DatasetMapProviderBase,

View File

@@ -18,7 +18,6 @@ from pytorch3d.implicitron.dataset.dataset_base import DatasetBase
from pytorch3d.implicitron.dataset.dataset_map_provider import DatasetMap from pytorch3d.implicitron.dataset.dataset_map_provider import DatasetMap
from pytorch3d.implicitron.dataset.frame_data import FrameData from pytorch3d.implicitron.dataset.frame_data import FrameData
from pytorch3d.implicitron.tools.config import registry, run_auto_creation from pytorch3d.implicitron.tools.config import registry, run_auto_creation
from torch.utils.data import DataLoader from torch.utils.data import DataLoader
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

View File

@@ -15,7 +15,6 @@ from typing import List, Optional, Tuple, TypeVar, Union
import numpy as np import numpy as np
import torch import torch
from PIL import Image from PIL import Image
from pytorch3d.io import IO from pytorch3d.io import IO
from pytorch3d.renderer.cameras import PerspectiveCameras from pytorch3d.renderer.cameras import PerspectiveCameras
from pytorch3d.structures.pointclouds import Pointclouds from pytorch3d.structures.pointclouds import Pointclouds

View File

@@ -14,7 +14,6 @@ import warnings
from typing import Any, Dict, List, Optional, Tuple from typing import Any, Dict, List, Optional, Tuple
import torch import torch
import tqdm import tqdm
from pytorch3d.implicitron.evaluation import evaluate_new_view_synthesis as evaluate from pytorch3d.implicitron.evaluation import evaluate_new_view_synthesis as evaluate
from pytorch3d.implicitron.models.base_model import EvaluationMode, ImplicitronModelBase from pytorch3d.implicitron.models.base_model import EvaluationMode, ImplicitronModelBase

View File

@@ -10,7 +10,6 @@ from dataclasses import dataclass, field
from typing import Any, Dict, List, Optional from typing import Any, Dict, List, Optional
import torch import torch
from pytorch3d.implicitron.models.renderer.base import EvaluationMode from pytorch3d.implicitron.models.renderer.base import EvaluationMode
from pytorch3d.implicitron.tools.config import ReplaceableBase from pytorch3d.implicitron.tools.config import ReplaceableBase
from pytorch3d.renderer.cameras import CamerasBase from pytorch3d.renderer.cameras import CamerasBase

View File

@@ -16,7 +16,6 @@ from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union
import torch import torch
from omegaconf import DictConfig from omegaconf import DictConfig
from pytorch3d.implicitron.models.base_model import ( from pytorch3d.implicitron.models.base_model import (
ImplicitronModelBase, ImplicitronModelBase,
ImplicitronRender, ImplicitronRender,
@@ -28,7 +27,6 @@ from pytorch3d.implicitron.models.metrics import (
RegularizationMetricsBase, RegularizationMetricsBase,
ViewMetricsBase, ViewMetricsBase,
) )
from pytorch3d.implicitron.models.renderer.base import ( from pytorch3d.implicitron.models.renderer.base import (
BaseRenderer, BaseRenderer,
EvaluationMode, EvaluationMode,
@@ -38,7 +36,6 @@ from pytorch3d.implicitron.models.renderer.base import (
RenderSamplingMode, RenderSamplingMode,
) )
from pytorch3d.implicitron.models.renderer.ray_sampler import RaySamplerBase from pytorch3d.implicitron.models.renderer.ray_sampler import RaySamplerBase
from pytorch3d.implicitron.models.utils import ( from pytorch3d.implicitron.models.utils import (
apply_chunked, apply_chunked,
chunk_generator, chunk_generator,
@@ -53,7 +50,6 @@ from pytorch3d.implicitron.tools.config import (
registry, registry,
run_auto_creation, run_auto_creation,
) )
from pytorch3d.implicitron.tools.rasterize_mc import rasterize_sparse_ray_bundle from pytorch3d.implicitron.tools.rasterize_mc import rasterize_sparse_ray_bundle
from pytorch3d.renderer import utils as rend_utils from pytorch3d.renderer import utils as rend_utils
from pytorch3d.renderer.cameras import CamerasBase from pytorch3d.renderer.cameras import CamerasBase

View File

@@ -10,7 +10,6 @@ from abc import ABC, abstractmethod
from typing import Optional from typing import Optional
from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle
from pytorch3d.implicitron.tools.config import ReplaceableBase from pytorch3d.implicitron.tools.config import ReplaceableBase
from pytorch3d.renderer.cameras import CamerasBase from pytorch3d.renderer.cameras import CamerasBase

View File

@@ -16,14 +16,11 @@ This file contains
import logging import logging
from dataclasses import field from dataclasses import field
from enum import Enum from enum import Enum
from typing import Dict, Optional, Tuple from typing import Dict, Optional, Tuple
import torch import torch
from omegaconf import DictConfig from omegaconf import DictConfig
from pytorch3d.implicitron.tools.config import ( from pytorch3d.implicitron.tools.config import (
Configurable, Configurable,
registry, registry,

View File

@@ -11,7 +11,6 @@ import torch
from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle
from pytorch3d.implicitron.tools.config import registry from pytorch3d.implicitron.tools.config import registry
from pytorch3d.renderer.implicit import HarmonicEmbedding from pytorch3d.renderer.implicit import HarmonicEmbedding
from torch import nn from torch import nn
from .base import ImplicitFunctionBase from .base import ImplicitFunctionBase

View File

@@ -21,7 +21,6 @@ from pytorch3d.renderer.implicit import HarmonicEmbedding
from pytorch3d.renderer.implicit.utils import ray_bundle_to_ray_points from pytorch3d.renderer.implicit.utils import ray_bundle_to_ray_points
from .base import ImplicitFunctionBase from .base import ImplicitFunctionBase
from .decoding_functions import ( # noqa from .decoding_functions import ( # noqa
_xavier_init, _xavier_init,
MLPWithInputSkips, MLPWithInputSkips,

View File

@@ -9,7 +9,6 @@
from typing import Callable, Optional from typing import Callable, Optional
import torch import torch
import torch.nn.functional as F import torch.nn.functional as F
from pytorch3d.common.compat import prod from pytorch3d.common.compat import prod
from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle

View File

@@ -13,9 +13,7 @@ from dataclasses import fields
from typing import Callable, Dict, Optional, Tuple from typing import Callable, Dict, Optional, Tuple
import torch import torch
from omegaconf import DictConfig from omegaconf import DictConfig
from pytorch3d.implicitron.models.implicit_function.base import ImplicitFunctionBase from pytorch3d.implicitron.models.implicit_function.base import ImplicitFunctionBase
from pytorch3d.implicitron.models.implicit_function.decoding_functions import ( from pytorch3d.implicitron.models.implicit_function.decoding_functions import (
DecoderFunctionBase, DecoderFunctionBase,

View File

@@ -17,7 +17,6 @@ from typing import Any, Callable, Dict, List, Optional, Tuple, TYPE_CHECKING, Un
import torch import torch
from omegaconf import DictConfig from omegaconf import DictConfig
from pytorch3d.implicitron.models.base_model import ( from pytorch3d.implicitron.models.base_model import (
ImplicitronModelBase, ImplicitronModelBase,
ImplicitronRender, ImplicitronRender,
@@ -28,7 +27,6 @@ from pytorch3d.implicitron.models.metrics import (
RegularizationMetricsBase, RegularizationMetricsBase,
ViewMetricsBase, ViewMetricsBase,
) )
from pytorch3d.implicitron.models.renderer.base import ( from pytorch3d.implicitron.models.renderer.base import (
BaseRenderer, BaseRenderer,
EvaluationMode, EvaluationMode,
@@ -50,7 +48,6 @@ from pytorch3d.implicitron.tools.config import (
registry, registry,
run_auto_creation, run_auto_creation,
) )
from pytorch3d.implicitron.tools.rasterize_mc import rasterize_sparse_ray_bundle from pytorch3d.implicitron.tools.rasterize_mc import rasterize_sparse_ray_bundle
from pytorch3d.renderer import utils as rend_utils from pytorch3d.renderer import utils as rend_utils
from pytorch3d.renderer.cameras import CamerasBase from pytorch3d.renderer.cameras import CamerasBase

View File

@@ -11,7 +11,6 @@ import copy
import torch import torch
from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle
from pytorch3d.implicitron.tools.config import Configurable, expand_args_fields from pytorch3d.implicitron.tools.config import Configurable, expand_args_fields
from pytorch3d.renderer.implicit.sample_pdf import sample_pdf from pytorch3d.renderer.implicit.sample_pdf import sample_pdf

View File

@@ -12,7 +12,6 @@ import torch
from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle
from pytorch3d.implicitron.tools.config import enable_get_default_args from pytorch3d.implicitron.tools.config import enable_get_default_args
from pytorch3d.renderer.implicit import HarmonicEmbedding from pytorch3d.renderer.implicit import HarmonicEmbedding
from torch import nn from torch import nn

View File

@@ -17,11 +17,8 @@ from typing import Any, Dict, Optional, Tuple
import torch import torch
import tqdm import tqdm
from pytorch3d.common.compat import prod from pytorch3d.common.compat import prod
from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle
from pytorch3d.implicitron.tools import image_utils from pytorch3d.implicitron.tools import image_utils
from pytorch3d.implicitron.tools.utils import cat_dataclass from pytorch3d.implicitron.tools.utils import cat_dataclass
@@ -83,9 +80,9 @@ def preprocess_input(
if mask_depths and fg_mask is not None and depth_map is not None: if mask_depths and fg_mask is not None and depth_map is not None:
# mask the depths # mask the depths
assert ( assert mask_threshold > 0.0, (
mask_threshold > 0.0 "Depths should be masked only with thresholded masks"
), "Depths should be masked only with thresholded masks" )
warnings.warn("Masking depths!") warnings.warn("Masking depths!")
depth_map = depth_map * fg_mask depth_map = depth_map * fg_mask

View File

@@ -10,7 +10,6 @@ import math
from typing import Optional, Tuple from typing import Optional, Tuple
import pytorch3d import pytorch3d
import torch import torch
from pytorch3d.ops import packed_to_padded from pytorch3d.ops import packed_to_padded
from pytorch3d.renderer import PerspectiveCameras from pytorch3d.renderer import PerspectiveCameras

View File

@@ -499,7 +499,7 @@ class StatsJSONEncoder(json.JSONEncoder):
return enc return enc
else: else:
raise TypeError( raise TypeError(
f"Object of type {o.__class__.__name__} " f"is not JSON serializable" f"Object of type {o.__class__.__name__} is not JSON serializable"
) )

View File

@@ -17,7 +17,6 @@ import matplotlib
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import numpy as np import numpy as np
import torch import torch
from PIL import Image from PIL import Image
_NO_TORCHVISION = False _NO_TORCHVISION = False

View File

@@ -796,7 +796,7 @@ def save_obj(
# Create .mtl file with the material name and texture map filename # Create .mtl file with the material name and texture map filename
# TODO: enable material properties to also be saved. # TODO: enable material properties to also be saved.
with _open_file(mtl_path, path_manager, "w") as f_mtl: with _open_file(mtl_path, path_manager, "w") as f_mtl:
lines = f"newmtl mesh\n" f"map_Kd {output_path.stem}.png\n" lines = f"newmtl mesh\nmap_Kd {output_path.stem}.png\n"
f_mtl.write(lines) f_mtl.write(lines)

View File

@@ -8,11 +8,8 @@
from .chamfer import chamfer_distance from .chamfer import chamfer_distance
from .mesh_edge_loss import mesh_edge_loss from .mesh_edge_loss import mesh_edge_loss
from .mesh_laplacian_smoothing import mesh_laplacian_smoothing from .mesh_laplacian_smoothing import mesh_laplacian_smoothing
from .mesh_normal_consistency import mesh_normal_consistency from .mesh_normal_consistency import mesh_normal_consistency
from .point_mesh_distance import point_mesh_edge_distance, point_mesh_face_distance from .point_mesh_distance import point_mesh_edge_distance, point_mesh_face_distance

View File

@@ -8,17 +8,14 @@
from .ball_query import ball_query from .ball_query import ball_query
from .cameras_alignment import corresponding_cameras_alignment from .cameras_alignment import corresponding_cameras_alignment
from .cubify import cubify from .cubify import cubify
from .graph_conv import GraphConv from .graph_conv import GraphConv
from .interp_face_attrs import interpolate_face_attributes from .interp_face_attrs import interpolate_face_attributes
from .iou_box3d import box3d_overlap from .iou_box3d import box3d_overlap
from .knn import knn_gather, knn_points from .knn import knn_gather, knn_points
from .laplacian_matrices import cot_laplacian, laplacian, norm_laplacian from .laplacian_matrices import cot_laplacian, laplacian, norm_laplacian
from .mesh_face_areas_normals import mesh_face_areas_normals from .mesh_face_areas_normals import mesh_face_areas_normals
from .mesh_filtering import taubin_smoothing from .mesh_filtering import taubin_smoothing
from .packed_to_padded import packed_to_padded, padded_to_packed from .packed_to_padded import packed_to_padded, padded_to_packed
from .perspective_n_points import efficient_pnp from .perspective_n_points import efficient_pnp
from .points_alignment import corresponding_points_alignment, iterative_closest_point from .points_alignment import corresponding_points_alignment, iterative_closest_point
@@ -30,9 +27,7 @@ from .points_to_volumes import (
add_pointclouds_to_volumes, add_pointclouds_to_volumes,
add_points_features_to_volume_densities_features, add_points_features_to_volume_densities_features,
) )
from .sample_farthest_points import sample_farthest_points from .sample_farthest_points import sample_farthest_points
from .sample_points_from_meshes import sample_points_from_meshes from .sample_points_from_meshes import sample_points_from_meshes
from .subdivide_meshes import SubdivideMeshes from .subdivide_meshes import SubdivideMeshes
from .utils import ( from .utils import (
@@ -42,7 +37,6 @@ from .utils import (
is_pointclouds, is_pointclouds,
wmean, wmean,
) )
from .vert_align import vert_align from .vert_align import vert_align

View File

@@ -11,9 +11,7 @@ from typing import Optional
import torch import torch
import torch.nn.functional as F import torch.nn.functional as F
from pytorch3d.common.compat import meshgrid_ij from pytorch3d.common.compat import meshgrid_ij
from pytorch3d.structures import Meshes from pytorch3d.structures import Meshes

View File

@@ -16,9 +16,7 @@ import sys
from typing import Tuple, Union from typing import Tuple, Union
import torch import torch
from pytorch3d.ops.mesh_face_areas_normals import mesh_face_areas_normals from pytorch3d.ops.mesh_face_areas_normals import mesh_face_areas_normals
from pytorch3d.ops.packed_to_padded import packed_to_padded from pytorch3d.ops.packed_to_padded import packed_to_padded
from pytorch3d.renderer.mesh.rasterizer import Fragments as MeshFragments from pytorch3d.renderer.mesh.rasterizer import Fragments as MeshFragments

View File

@@ -69,7 +69,6 @@ from .mesh import (
TexturesUV, TexturesUV,
TexturesVertex, TexturesVertex,
) )
from .points import ( from .points import (
AlphaCompositor, AlphaCompositor,
NormWeightedCompositor, NormWeightedCompositor,

View File

@@ -153,12 +153,12 @@ def _pulsar_from_opencv_projection(
# Check image sizes. # Check image sizes.
image_w = image_size_wh[0, 0] image_w = image_size_wh[0, 0]
image_h = image_size_wh[0, 1] image_h = image_size_wh[0, 1]
assert torch.all( assert torch.all(image_size_wh[:, 0] == image_w), (
image_size_wh[:, 0] == image_w "All images in a batch must have the same width!"
), "All images in a batch must have the same width!" )
assert torch.all( assert torch.all(image_size_wh[:, 1] == image_h), (
image_size_wh[:, 1] == image_h "All images in a batch must have the same height!"
), "All images in a batch must have the same height!" )
# Focal length. # Focal length.
fx = camera_matrix[:, 0, 0].unsqueeze(1) fx = camera_matrix[:, 0, 0].unsqueeze(1)
fy = camera_matrix[:, 1, 1].unsqueeze(1) fy = camera_matrix[:, 1, 1].unsqueeze(1)

View File

@@ -12,7 +12,6 @@ from .clip import (
ClippedFaces, ClippedFaces,
convert_clipped_rasterization_to_original_faces, convert_clipped_rasterization_to_original_faces,
) )
from .rasterize_meshes import rasterize_meshes from .rasterize_meshes import rasterize_meshes
from .rasterizer import MeshRasterizer, RasterizationSettings from .rasterizer import MeshRasterizer, RasterizationSettings
from .renderer import MeshRenderer, MeshRendererWithFragments from .renderer import MeshRenderer, MeshRendererWithFragments

View File

@@ -14,7 +14,6 @@ import torch
from pytorch3d import _C from pytorch3d import _C
from ..utils import parse_image_size from ..utils import parse_image_size
from .clip import ( from .clip import (
clip_faces, clip_faces,
ClipFrustum, ClipFrustum,

View File

@@ -625,9 +625,7 @@ class TexturesAtlas(TexturesBase):
of length `k`. of length `k`.
""" """
if len(faces_ids_list) != len(self.atlas_list()): if len(faces_ids_list) != len(self.atlas_list()):
raise IndexError( raise IndexError("faces_ids_list must be of the same length as atlas_list.")
"faces_ids_list must be of " "the same length as atlas_list."
)
sub_features = [] sub_features = []
for atlas, faces_ids in zip(self.atlas_list(), faces_ids_list): for atlas, faces_ids in zip(self.atlas_list(), faces_ids_list):
@@ -1657,7 +1655,7 @@ class TexturesUV(TexturesBase):
raise NotImplementedError("This function does not support multiple maps.") raise NotImplementedError("This function does not support multiple maps.")
if len(faces_ids_list) != len(self.faces_uvs_padded()): if len(faces_ids_list) != len(self.faces_uvs_padded()):
raise IndexError( raise IndexError(
"faces_uvs_padded must be of " "the same length as face_ids_list." "faces_uvs_padded must be of the same length as face_ids_list."
) )
sub_faces_uvs, sub_verts_uvs, sub_maps = [], [], [] sub_faces_uvs, sub_verts_uvs, sub_maps = [], [], []
@@ -1871,7 +1869,7 @@ class TexturesVertex(TexturesBase):
""" """
if len(vertex_ids_list) != len(self.verts_features_list()): if len(vertex_ids_list) != len(self.verts_features_list()):
raise IndexError( raise IndexError(
"verts_features_list must be of " "the same length as vertex_ids_list." "verts_features_list must be of the same length as vertex_ids_list."
) )
sub_features = [] sub_features = []

View File

@@ -24,7 +24,6 @@ from typing import Any, Dict
os.environ["PYOPENGL_PLATFORM"] = "egl" os.environ["PYOPENGL_PLATFORM"] = "egl"
import OpenGL.EGL as egl # noqa import OpenGL.EGL as egl # noqa
import pycuda.driver as cuda # noqa import pycuda.driver as cuda # noqa
from OpenGL._opaque import opaque_pointer_cls # noqa from OpenGL._opaque import opaque_pointer_cls # noqa
from OpenGL.raw.EGL._errors import EGLError # noqa from OpenGL.raw.EGL._errors import EGLError # noqa

View File

@@ -17,15 +17,12 @@ import numpy as np
import OpenGL.GL as gl import OpenGL.GL as gl
import pycuda.gl import pycuda.gl
import torch import torch
import torch.nn as nn import torch.nn as nn
from pytorch3d.structures.meshes import Meshes from pytorch3d.structures.meshes import Meshes
from ..cameras import FoVOrthographicCameras, FoVPerspectiveCameras from ..cameras import FoVOrthographicCameras, FoVPerspectiveCameras
from ..mesh.rasterizer import Fragments, RasterizationSettings from ..mesh.rasterizer import Fragments, RasterizationSettings
from ..utils import parse_image_size from ..utils import parse_image_size
from .opengl_utils import _torch_to_opengl, global_device_context_store from .opengl_utils import _torch_to_opengl, global_device_context_store
# Shader strings, used below to compile an OpenGL program. # Shader strings, used below to compile an OpenGL program.

View File

@@ -9,9 +9,7 @@
import torch import torch
from .compositor import AlphaCompositor, NormWeightedCompositor from .compositor import AlphaCompositor, NormWeightedCompositor
from .pulsar.unified import PulsarPointsRenderer from .pulsar.unified import PulsarPointsRenderer
from .rasterize_points import rasterize_points from .rasterize_points import rasterize_points
from .rasterizer import PointsRasterizationSettings, PointsRasterizer from .rasterizer import PointsRasterizationSettings, PointsRasterizer
from .renderer import PointsRenderer from .renderer import PointsRenderer

View File

@@ -11,7 +11,6 @@ from typing import List, Optional, Tuple, Union
import numpy as np import numpy as np
import torch import torch
from pytorch3d import _C from pytorch3d import _C
from pytorch3d.renderer.mesh.rasterize_meshes import pix_to_non_square_ndc from pytorch3d.renderer.mesh.rasterize_meshes import pix_to_non_square_ndc
from ..utils import parse_image_size from ..utils import parse_image_size

View File

@@ -531,9 +531,9 @@ class Meshes:
list of tensors of vertices of shape (V_n, 3). list of tensors of vertices of shape (V_n, 3).
""" """
if self._verts_list is None: if self._verts_list is None:
assert ( assert self._verts_padded is not None, (
self._verts_padded is not None "verts_padded is required to compute verts_list."
), "verts_padded is required to compute verts_list." )
self._verts_list = struct_utils.padded_to_list( self._verts_list = struct_utils.padded_to_list(
self._verts_padded, self.num_verts_per_mesh().tolist() self._verts_padded, self.num_verts_per_mesh().tolist()
) )
@@ -547,9 +547,9 @@ class Meshes:
list of tensors of faces of shape (F_n, 3). list of tensors of faces of shape (F_n, 3).
""" """
if self._faces_list is None: if self._faces_list is None:
assert ( assert self._faces_padded is not None, (
self._faces_padded is not None "faces_padded is required to compute faces_list."
), "faces_padded is required to compute faces_list." )
self._faces_list = struct_utils.padded_to_list( self._faces_list = struct_utils.padded_to_list(
self._faces_padded, self.num_faces_per_mesh().tolist() self._faces_padded, self.num_faces_per_mesh().tolist()
) )
@@ -925,9 +925,9 @@ class Meshes:
verts_list = self.verts_list() verts_list = self.verts_list()
faces_list = self.faces_list() faces_list = self.faces_list()
assert ( assert faces_list is not None and verts_list is not None, (
faces_list is not None and verts_list is not None "faces_list and verts_list arguments are required"
), "faces_list and verts_list arguments are required" )
if self.isempty(): if self.isempty():
self._faces_padded = torch.zeros( self._faces_padded = torch.zeros(

View File

@@ -433,9 +433,9 @@ class Pointclouds:
list of tensors of points of shape (P_n, 3). list of tensors of points of shape (P_n, 3).
""" """
if self._points_list is None: if self._points_list is None:
assert ( assert self._points_padded is not None, (
self._points_padded is not None "points_padded is required to compute points_list."
), "points_padded is required to compute points_list." )
points_list = [] points_list = []
for i in range(self._N): for i in range(self._N):
points_list.append( points_list.append(

View File

@@ -12,11 +12,8 @@ from .camera_conversions import (
pulsar_from_cameras_projection, pulsar_from_cameras_projection,
pulsar_from_opencv_projection, pulsar_from_opencv_projection,
) )
from .checkerboard import checkerboard from .checkerboard import checkerboard
from .ico_sphere import ico_sphere from .ico_sphere import ico_sphere
from .torus import torus from .torus import torus

View File

@@ -8,7 +8,6 @@ from itertools import product
import torch import torch
from fvcore.common.benchmark import benchmark from fvcore.common.benchmark import benchmark
from pytorch3d.ops.ball_query import ball_query from pytorch3d.ops.ball_query import ball_query

View File

@@ -8,7 +8,6 @@
import unittest import unittest
import torch import torch
from pytorch3d.implicitron.models.utils import preprocess_input, weighted_sum_losses from pytorch3d.implicitron.models.utils import preprocess_input, weighted_sum_losses

View File

@@ -11,12 +11,10 @@ from dataclasses import dataclass
from itertools import product from itertools import product
import numpy as np import numpy as np
import torch import torch
from pytorch3d.implicitron.dataset.data_loader_map_provider import ( from pytorch3d.implicitron.dataset.data_loader_map_provider import (
DoublePoolBatchSampler, DoublePoolBatchSampler,
) )
from pytorch3d.implicitron.dataset.dataset_base import DatasetBase from pytorch3d.implicitron.dataset.dataset_base import DatasetBase
from pytorch3d.implicitron.dataset.frame_data import FrameData from pytorch3d.implicitron.dataset.frame_data import FrameData
from pytorch3d.implicitron.dataset.scene_batch_sampler import SceneBatchSampler from pytorch3d.implicitron.dataset.scene_batch_sampler import SceneBatchSampler

View File

@@ -7,9 +7,7 @@
import unittest import unittest
import numpy as np import numpy as np
import torch import torch
from pytorch3d.implicitron.dataset.utils import ( from pytorch3d.implicitron.dataset.utils import (
bbox_xywh_to_xyxy, bbox_xywh_to_xyxy,
bbox_xyxy_to_xywh, bbox_xyxy_to_xywh,
@@ -21,7 +19,6 @@ from pytorch3d.implicitron.dataset.utils import (
rescale_bbox, rescale_bbox,
resize_image, resize_image,
) )
from tests.common_testing import TestCaseMixin from tests.common_testing import TestCaseMixin

View File

@@ -9,7 +9,6 @@ import os
import unittest import unittest
import torch import torch
from pytorch3d.implicitron.dataset.data_loader_map_provider import ( # noqa from pytorch3d.implicitron.dataset.data_loader_map_provider import ( # noqa
SequenceDataLoaderMapProvider, SequenceDataLoaderMapProvider,
SimpleDataLoaderMapProvider, SimpleDataLoaderMapProvider,

View File

@@ -8,7 +8,6 @@ import os
import unittest import unittest
from pytorch3d.implicitron import eval_demo from pytorch3d.implicitron import eval_demo
from tests.common_testing import interactive_testing_requested from tests.common_testing import interactive_testing_requested
from .common_resources import CO3D_MANIFOLD_PATH from .common_resources import CO3D_MANIFOLD_PATH

View File

@@ -15,7 +15,6 @@ import unittest
import lpips import lpips
import numpy as np import numpy as np
import torch import torch
from pytorch3d.implicitron.dataset.frame_data import FrameData from pytorch3d.implicitron.dataset.frame_data import FrameData
from pytorch3d.implicitron.dataset.json_index_dataset import JsonIndexDataset from pytorch3d.implicitron.dataset.json_index_dataset import JsonIndexDataset
from pytorch3d.implicitron.evaluation.evaluate_new_view_synthesis import eval_batch from pytorch3d.implicitron.evaluation.evaluate_new_view_synthesis import eval_batch

View File

@@ -14,7 +14,6 @@ from typing import ClassVar, Optional, Type
import pandas as pd import pandas as pd
import pkg_resources import pkg_resources
import sqlalchemy as sa import sqlalchemy as sa
from pytorch3d.implicitron.dataset import types from pytorch3d.implicitron.dataset import types
from pytorch3d.implicitron.dataset.frame_data import FrameData, GenericFrameDataBuilder from pytorch3d.implicitron.dataset.frame_data import FrameData, GenericFrameDataBuilder
from pytorch3d.implicitron.dataset.orm_types import ( from pytorch3d.implicitron.dataset.orm_types import (

View File

@@ -12,7 +12,6 @@ from typing import List
import numpy as np import numpy as np
import torch import torch
from pytorch3d.implicitron.dataset import types from pytorch3d.implicitron.dataset import types
from pytorch3d.implicitron.dataset.dataset_base import FrameData from pytorch3d.implicitron.dataset.dataset_base import FrameData
from pytorch3d.implicitron.dataset.frame_data import FrameDataBuilder from pytorch3d.implicitron.dataset.frame_data import FrameDataBuilder
@@ -29,7 +28,6 @@ from pytorch3d.implicitron.dataset.utils import (
) )
from pytorch3d.implicitron.tools.config import get_default_args from pytorch3d.implicitron.tools.config import get_default_args
from pytorch3d.renderer.cameras import PerspectiveCameras from pytorch3d.renderer.cameras import PerspectiveCameras
from tests.common_testing import TestCaseMixin from tests.common_testing import TestCaseMixin
from tests.implicitron.common_resources import get_skateboard_data from tests.implicitron.common_resources import get_skateboard_data

View File

@@ -13,7 +13,6 @@ import unittest
from typing import List from typing import List
import numpy as np import numpy as np
import torch import torch
import torchvision import torchvision
from PIL import Image from PIL import Image

View File

@@ -13,7 +13,6 @@ from typing import Tuple
import torch import torch
from pytorch3d.implicitron.dataset.json_index_dataset import JsonIndexDataset from pytorch3d.implicitron.dataset.json_index_dataset import JsonIndexDataset
from pytorch3d.implicitron.dataset.visualize import get_implicitron_sequence_pointcloud from pytorch3d.implicitron.dataset.visualize import get_implicitron_sequence_pointcloud
from pytorch3d.implicitron.models.visualization.render_flyaround import render_flyaround from pytorch3d.implicitron.models.visualization.render_flyaround import render_flyaround
from pytorch3d.implicitron.tools.config import expand_args_fields from pytorch3d.implicitron.tools.config import expand_args_fields
from pytorch3d.implicitron.tools.point_cloud_utils import render_point_cloud_pytorch3d from pytorch3d.implicitron.tools.point_cloud_utils import render_point_cloud_pytorch3d

View File

@@ -8,9 +8,7 @@
import unittest import unittest
import numpy as np import numpy as np
import torch import torch
from pytorch3d.implicitron.models.renderer.base import ( from pytorch3d.implicitron.models.renderer.base import (
approximate_conical_frustum_as_gaussians, approximate_conical_frustum_as_gaussians,
compute_3d_diagonal_covariance_gaussian, compute_3d_diagonal_covariance_gaussian,
@@ -18,7 +16,6 @@ from pytorch3d.implicitron.models.renderer.base import (
ImplicitronRayBundle, ImplicitronRayBundle,
) )
from pytorch3d.implicitron.models.renderer.ray_sampler import AbstractMaskRaySampler from pytorch3d.implicitron.models.renderer.ray_sampler import AbstractMaskRaySampler
from tests.common_testing import TestCaseMixin from tests.common_testing import TestCaseMixin

View File

@@ -7,7 +7,6 @@
import unittest import unittest
from itertools import product from itertools import product
from typing import Tuple from typing import Tuple
from unittest.mock import patch from unittest.mock import patch
import torch import torch
@@ -18,7 +17,6 @@ from pytorch3d.implicitron.models.renderer.ray_sampler import (
compute_radii, compute_radii,
NearFarRaySampler, NearFarRaySampler,
) )
from pytorch3d.renderer.cameras import ( from pytorch3d.renderer.cameras import (
CamerasBase, CamerasBase,
FoVOrthographicCameras, FoVOrthographicCameras,
@@ -28,7 +26,6 @@ from pytorch3d.renderer.cameras import (
) )
from pytorch3d.renderer.implicit.utils import HeterogeneousRayBundle from pytorch3d.renderer.implicit.utils import HeterogeneousRayBundle
from tests.common_camera_utils import init_random_cameras from tests.common_camera_utils import init_random_cameras
from tests.common_testing import TestCaseMixin from tests.common_testing import TestCaseMixin
CAMERA_TYPES = ( CAMERA_TYPES = (

View File

@@ -7,7 +7,6 @@
import unittest import unittest
import numpy as np import numpy as np
from pytorch3d.implicitron.dataset.orm_types import ArrayTypeFactory, TupleTypeFactory from pytorch3d.implicitron.dataset.orm_types import ArrayTypeFactory, TupleTypeFactory

View File

@@ -8,7 +8,6 @@ import unittest
import torch import torch
from pytorch3d.implicitron.tools.point_cloud_utils import get_rgbd_point_cloud from pytorch3d.implicitron.tools.point_cloud_utils import get_rgbd_point_cloud
from pytorch3d.renderer.cameras import PerspectiveCameras from pytorch3d.renderer.cameras import PerspectiveCameras
from tests.common_testing import TestCaseMixin from tests.common_testing import TestCaseMixin

View File

@@ -8,7 +8,6 @@ import unittest
from itertools import product from itertools import product
import torch import torch
from pytorch3d.implicitron.models.renderer.ray_point_refiner import ( from pytorch3d.implicitron.models.renderer.ray_point_refiner import (
apply_blurpool_on_weights, apply_blurpool_on_weights,
RayPointRefiner, RayPointRefiner,

View File

@@ -10,9 +10,7 @@ import unittest
from collections import Counter from collections import Counter
import pkg_resources import pkg_resources
import torch import torch
from pytorch3d.implicitron.dataset.sql_dataset import SqlIndexDataset from pytorch3d.implicitron.dataset.sql_dataset import SqlIndexDataset
NO_BLOBS_KWARGS = { NO_BLOBS_KWARGS = {

View File

@@ -16,7 +16,6 @@ from pytorch3d.implicitron.models.implicit_function.scene_representation_network
from pytorch3d.implicitron.models.renderer.ray_sampler import ImplicitronRayBundle from pytorch3d.implicitron.models.renderer.ray_sampler import ImplicitronRayBundle
from pytorch3d.implicitron.tools.config import get_default_args from pytorch3d.implicitron.tools.config import get_default_args
from pytorch3d.renderer import PerspectiveCameras from pytorch3d.renderer import PerspectiveCameras
from tests.common_testing import TestCaseMixin from tests.common_testing import TestCaseMixin
_BATCH_SIZE: int = 3 _BATCH_SIZE: int = 3

View File

@@ -8,13 +8,11 @@
import unittest import unittest
import torch import torch
from omegaconf import DictConfig, OmegaConf from omegaconf import DictConfig, OmegaConf
from pytorch3d.implicitron.models.implicit_function.voxel_grid_implicit_function import ( from pytorch3d.implicitron.models.implicit_function.voxel_grid_implicit_function import (
VoxelGridImplicitFunction, VoxelGridImplicitFunction,
) )
from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle from pytorch3d.implicitron.models.renderer.base import ImplicitronRayBundle
from pytorch3d.implicitron.tools.config import expand_args_fields, get_default_args from pytorch3d.implicitron.tools.config import expand_args_fields, get_default_args
from pytorch3d.renderer import ray_bundle_to_ray_points from pytorch3d.renderer import ray_bundle_to_ray_points
from tests.common_testing import TestCaseMixin from tests.common_testing import TestCaseMixin

View File

@@ -10,7 +10,6 @@ from typing import Optional, Tuple
import torch import torch
from omegaconf import DictConfig, OmegaConf from omegaconf import DictConfig, OmegaConf
from pytorch3d.implicitron.models.implicit_function.utils import ( from pytorch3d.implicitron.models.implicit_function.utils import (
interpolate_line, interpolate_line,
interpolate_plane, interpolate_plane,
@@ -22,7 +21,6 @@ from pytorch3d.implicitron.models.implicit_function.voxel_grid import (
VMFactorizedVoxelGrid, VMFactorizedVoxelGrid,
VoxelGridModule, VoxelGridModule,
) )
from pytorch3d.implicitron.tools.config import expand_args_fields, get_default_args from pytorch3d.implicitron.tools.config import expand_args_fields, get_default_args
from tests.common_testing import TestCaseMixin from tests.common_testing import TestCaseMixin

View File

@@ -60,7 +60,6 @@ from pytorch3d.transforms.rotation_conversions import random_rotations
from pytorch3d.transforms.so3 import so3_exp_map from pytorch3d.transforms.so3 import so3_exp_map
from .common_camera_utils import init_random_cameras from .common_camera_utils import init_random_cameras
from .common_testing import TestCaseMixin from .common_testing import TestCaseMixin

View File

@@ -673,9 +673,7 @@ class TestMeshPlyIO(TestCaseMixin, unittest.TestCase):
def test_load_simple_binary(self): def test_load_simple_binary(self):
for big_endian in [True, False]: for big_endian in [True, False]:
verts = ( verts = ("0 0 0 0 0 1 0 1 1 0 1 0 1 0 0 1 0 1 1 1 1 1 1 0").split()
"0 0 0 " "0 0 1 " "0 1 1 " "0 1 0 " "1 0 0 " "1 0 1 " "1 1 1 " "1 1 0"
).split()
faces = ( faces = (
"4 0 1 2 3 " "4 0 1 2 3 "
"4 7 6 5 4 " "4 7 6 5 4 "
@@ -688,7 +686,7 @@ class TestMeshPlyIO(TestCaseMixin, unittest.TestCase):
"3 4 5 1" "3 4 5 1"
).split() ).split()
short_one = b"\00\01" if big_endian else b"\01\00" short_one = b"\00\01" if big_endian else b"\01\00"
mixed_data = b"\00\00" b"\03\03" + (short_one + b"\00\01\01\01" b"\00\02") mixed_data = b"\00\00\03\03" + (short_one + b"\00\01\01\01\00\02")
minus_one_data = b"\xff" * 14 minus_one_data = b"\xff" * 14
endian_char = ">" if big_endian else "<" endian_char = ">" if big_endian else "<"
format = ( format = (

View File

@@ -604,9 +604,9 @@ class TestRaysampling(TestCaseMixin, unittest.TestCase):
# test weather they are of the correct shape # test weather they are of the correct shape
for attr in ("origins", "directions", "lengths", "xys"): for attr in ("origins", "directions", "lengths", "xys"):
tensor = getattr(ray_bundle, attr) tensor = getattr(ray_bundle, attr)
assert tensor.shape[:2] == torch.Size( assert tensor.shape[:2] == torch.Size((n_rays_total, 1)), (
(n_rays_total, 1) tensor.shape
), tensor.shape )
# if two camera ids are same than origins should also be the same # if two camera ids are same than origins should also be the same
# directions and xys are always different and lengths equal # directions and xys are always different and lengths equal

View File

@@ -12,7 +12,6 @@ Sanity checks for output images from the renderer.
import os import os
import unittest import unittest
from collections import namedtuple from collections import namedtuple
from itertools import product from itertools import product
import numpy as np import numpy as np