foreach optimizers

Summary: Allow using the new `foreach` option on optimizers.

Reviewed By: shapovalov

Differential Revision: D39694843

fbshipit-source-id: 97109c245b669bc6edff0f246893f95b7ae71f90
This commit is contained in:
Jeremy Reizenstein
2022-09-22 05:11:56 -07:00
committed by Facebook GitHub Bot
parent db3c12abfb
commit 209c160a20
3 changed files with 33 additions and 13 deletions

View File

@@ -406,6 +406,7 @@ optimizer_factory_ImplicitronOptimizerFactory_args:
weight_decay: 0.0
linear_exponential_lr_milestone: 200
linear_exponential_start_gamma: 0.1
foreach: true
training_loop_ImplicitronTrainingLoop_args:
evaluator_class_type: ImplicitronEvaluator
evaluator_ImplicitronEvaluator_args:

View File

@@ -9,13 +9,17 @@ import tempfile
import unittest
from pathlib import Path
import torch
from hydra import compose, initialize_config_dir
from omegaconf import OmegaConf
from projects.implicitron_trainer.impl.optimizer_factory import (
ImplicitronOptimizerFactory,
)
from .. import experiment
from .utils import interactive_testing_requested, intercept_logs
internal = os.environ.get("FB_TEST", False)
@@ -151,6 +155,16 @@ class TestExperiment(unittest.TestCase):
with initialize_config_dir(config_dir=str(IMPLICITRON_CONFIGS_DIR)):
compose(file.name)
def test_optimizer_factory(self):
model = torch.nn.Linear(2, 2)
adam, sched = ImplicitronOptimizerFactory(breed="Adam")(0, model)
self.assertIsInstance(adam, torch.optim.Adam)
sgd, sched = ImplicitronOptimizerFactory(breed="SGD")(0, model)
self.assertIsInstance(sgd, torch.optim.SGD)
adagrad, sched = ImplicitronOptimizerFactory(breed="Adagrad")(0, model)
self.assertIsInstance(adagrad, torch.optim.Adagrad)
class TestNerfRepro(unittest.TestCase):
@unittest.skip("This test runs full blender training.")