From da9584357e327cdb2b8649ed4c27dbb042b7dc34 Mon Sep 17 00:00:00 2001 From: Jeremy Reizenstein Date: Fri, 5 Aug 2022 08:58:17 -0700 Subject: [PATCH] circleci fixes Summary: Misc fixes. - most important: the mac image is gone so switch to a newer one. - torch.concat is new; was used accidentally - remove lpips from testing in meta.yaml as it is breaking the conda test. Better to leave the relevant tests failing in OSS. - TypedDict usage is breaking implicitron on Python 3.7. Reviewed By: patricklabatut Differential Revision: D38458164 fbshipit-source-id: b16c26453a743b9a771e2a6787b9a4d2a52e41c2 --- .circleci/config.in.yml | 2 +- .circleci/config.yml | 2 +- packaging/pytorch3d/meta.yaml | 1 - .../dataset/data_loader_map_provider.py | 2 +- .../implicitron/dataset/json_index_dataset.py | 14 ++++++++++---- 5 files changed, 13 insertions(+), 8 deletions(-) diff --git a/.circleci/config.in.yml b/.circleci/config.in.yml index 7aae4a1a..b7dc6f05 100644 --- a/.circleci/config.in.yml +++ b/.circleci/config.in.yml @@ -159,7 +159,7 @@ jobs: binary_macos_wheel: <<: *binary_common macos: - xcode: "12.0" + xcode: "13.4.1" steps: - checkout - run: diff --git a/.circleci/config.yml b/.circleci/config.yml index 8c846628..45c98665 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -159,7 +159,7 @@ jobs: binary_macos_wheel: <<: *binary_common macos: - xcode: "12.0" + xcode: "13.4.1" steps: - checkout - run: diff --git a/packaging/pytorch3d/meta.yaml b/packaging/pytorch3d/meta.yaml index 6cd0c69a..8604127d 100644 --- a/packaging/pytorch3d/meta.yaml +++ b/packaging/pytorch3d/meta.yaml @@ -47,7 +47,6 @@ test: - imageio - hydra-core - accelerate - - lpips commands: #pytest . python -m unittest discover -v -s tests -t . diff --git a/pytorch3d/implicitron/dataset/data_loader_map_provider.py b/pytorch3d/implicitron/dataset/data_loader_map_provider.py index 2a0de2ec..754d71ac 100644 --- a/pytorch3d/implicitron/dataset/data_loader_map_provider.py +++ b/pytorch3d/implicitron/dataset/data_loader_map_provider.py @@ -199,7 +199,7 @@ class DoublePoolBatchSampler(Sampler[List[int]]): torch.randperm(len(self.first_indices), generator=self.generator) for _ in range(n_copies) ] - i_first = torch.concat(raw_indices)[:num_batches] + i_first = torch.cat(raw_indices)[:num_batches] else: i_first = torch.randperm(len(self.first_indices), generator=self.generator) first_indices = [self.first_indices[i] for i in i_first] diff --git a/pytorch3d/implicitron/dataset/json_index_dataset.py b/pytorch3d/implicitron/dataset/json_index_dataset.py index ae08ae36..6e4a0424 100644 --- a/pytorch3d/implicitron/dataset/json_index_dataset.py +++ b/pytorch3d/implicitron/dataset/json_index_dataset.py @@ -24,7 +24,7 @@ from typing import ( Sequence, Tuple, Type, - TypedDict, + TYPE_CHECKING, Union, ) @@ -45,9 +45,15 @@ from .utils import is_known_frame_scalar logger = logging.getLogger(__name__) -class FrameAnnotsEntry(TypedDict): - subset: Optional[str] - frame_annotation: types.FrameAnnotation +if TYPE_CHECKING: + from typing import TypedDict + + class FrameAnnotsEntry(TypedDict): + subset: Optional[str] + frame_annotation: types.FrameAnnotation + +else: + FrameAnnotsEntry = dict @registry.register