mirror of
https://github.com/facebookresearch/pytorch3d.git
synced 2025-08-02 03:42:50 +08:00
Fixed typing to have compatibility with OmegaConf 2.2.2 in Pytorch3D
Summary: I tried to run `experiment.py` and `pytorch3d_implicitron_runner` and faced the failure with this traceback: https://www.internalfb.com/phabricator/paste/view/P515734086 It seems to be due to the new release of OmegaConf (version=2.2.2) which requires different typing. This fix helped to overcome it. Reviewed By: bottler Differential Revision: D37881644 fbshipit-source-id: be0cd4ced0526f8382cea5bdca9b340e93a2fba2
This commit is contained in:
parent
379c8b2780
commit
0f966217e5
@ -5,7 +5,7 @@
|
||||
# LICENSE file in the root directory of this source tree.
|
||||
|
||||
from dataclasses import field
|
||||
from typing import Tuple
|
||||
from typing import Any, Dict, Tuple
|
||||
|
||||
from omegaconf import DictConfig
|
||||
from pytorch3d.implicitron.dataset.data_source import ImplicitronDataSource
|
||||
@ -41,7 +41,7 @@ class ExperimentConfig(Configurable):
|
||||
clip_grad: float = 0.0
|
||||
camera_difficulty_bin_breaks: Tuple[float, ...] = 0.97, 0.98
|
||||
|
||||
hydra: dict = field(
|
||||
hydra: Dict[str, Any] = field(
|
||||
default_factory=lambda: {
|
||||
"run": {"dir": "."}, # Make hydra not change the working dir.
|
||||
"output_subdir": None, # disable storing the .hydra logs
|
||||
|
@ -25,7 +25,7 @@ def init_optimizer(
|
||||
gamma: float = 0.1,
|
||||
momentum: float = 0.9,
|
||||
betas: Tuple[float, ...] = (0.9, 0.999),
|
||||
milestones: tuple = (),
|
||||
milestones: Tuple[int, ...] = (),
|
||||
max_epochs: int = 1000,
|
||||
):
|
||||
"""
|
||||
|
Loading…
x
Reference in New Issue
Block a user