Fixed typing to have compatibility with OmegaConf 2.2.2 in Pytorch3D

Summary:
I tried to run `experiment.py` and `pytorch3d_implicitron_runner` and faced the failure with this traceback: https://www.internalfb.com/phabricator/paste/view/P515734086

It seems to be due to the new release of OmegaConf (version=2.2.2) which requires different typing. This fix helped to overcome it.

Reviewed By: bottler

Differential Revision: D37881644

fbshipit-source-id: be0cd4ced0526f8382cea5bdca9b340e93a2fba2
This commit is contained in:
Iurii Makarov 2022-07-15 05:55:03 -07:00 committed by Facebook GitHub Bot
parent 379c8b2780
commit 0f966217e5
2 changed files with 3 additions and 3 deletions

View File

@ -5,7 +5,7 @@
# LICENSE file in the root directory of this source tree. # LICENSE file in the root directory of this source tree.
from dataclasses import field from dataclasses import field
from typing import Tuple from typing import Any, Dict, Tuple
from omegaconf import DictConfig from omegaconf import DictConfig
from pytorch3d.implicitron.dataset.data_source import ImplicitronDataSource from pytorch3d.implicitron.dataset.data_source import ImplicitronDataSource
@ -41,7 +41,7 @@ class ExperimentConfig(Configurable):
clip_grad: float = 0.0 clip_grad: float = 0.0
camera_difficulty_bin_breaks: Tuple[float, ...] = 0.97, 0.98 camera_difficulty_bin_breaks: Tuple[float, ...] = 0.97, 0.98
hydra: dict = field( hydra: Dict[str, Any] = field(
default_factory=lambda: { default_factory=lambda: {
"run": {"dir": "."}, # Make hydra not change the working dir. "run": {"dir": "."}, # Make hydra not change the working dir.
"output_subdir": None, # disable storing the .hydra logs "output_subdir": None, # disable storing the .hydra logs

View File

@ -25,7 +25,7 @@ def init_optimizer(
gamma: float = 0.1, gamma: float = 0.1,
momentum: float = 0.9, momentum: float = 0.9,
betas: Tuple[float, ...] = (0.9, 0.999), betas: Tuple[float, ...] = (0.9, 0.999),
milestones: tuple = (), milestones: Tuple[int, ...] = (),
max_epochs: int = 1000, max_epochs: int = 1000,
): ):
""" """