mirror of
https://github.com/facebookresearch/pytorch3d.git
synced 2025-12-19 13:50:35 +08:00
Move load_stats to TrainingLoop
Summary: Stats are logically connected to the training loop, not to the model. Hence, moving to the training loop. Also removing resume_epoch from OptimizerFactory in favor of a single place - ModelFactory. This removes the need for config consistency checks etc. Reviewed By: kjchalup Differential Revision: D38313475 fbshipit-source-id: a1d188a63e28459df381ff98ad8acdcdb14887b7
This commit is contained in:
committed by
Facebook GitHub Bot
parent
760305e044
commit
c3f8dad55c
@@ -1,3 +1,24 @@
|
||||
log_vars:
|
||||
- loss_rgb_psnr_fg
|
||||
- loss_rgb_psnr
|
||||
- loss_rgb_mse
|
||||
- loss_rgb_huber
|
||||
- loss_depth_abs
|
||||
- loss_depth_abs_fg
|
||||
- loss_mask_neg_iou
|
||||
- loss_mask_bce
|
||||
- loss_mask_beta_prior
|
||||
- loss_eikonal
|
||||
- loss_density_tv
|
||||
- loss_depth_neg_penalty
|
||||
- loss_autodecoder_norm
|
||||
- loss_prev_stage_rgb_mse
|
||||
- loss_prev_stage_rgb_psnr_fg
|
||||
- loss_prev_stage_rgb_psnr
|
||||
- loss_prev_stage_mask_bce
|
||||
- objective
|
||||
- epoch
|
||||
- sec/it
|
||||
mask_images: true
|
||||
mask_depths: true
|
||||
render_image_width: 400
|
||||
@@ -28,27 +49,6 @@ loss_weights:
|
||||
loss_prev_stage_rgb_mse: 1.0
|
||||
loss_mask_bce: 0.0
|
||||
loss_prev_stage_mask_bce: 0.0
|
||||
log_vars:
|
||||
- loss_rgb_psnr_fg
|
||||
- loss_rgb_psnr
|
||||
- loss_rgb_mse
|
||||
- loss_rgb_huber
|
||||
- loss_depth_abs
|
||||
- loss_depth_abs_fg
|
||||
- loss_mask_neg_iou
|
||||
- loss_mask_bce
|
||||
- loss_mask_beta_prior
|
||||
- loss_eikonal
|
||||
- loss_density_tv
|
||||
- loss_depth_neg_penalty
|
||||
- loss_autodecoder_norm
|
||||
- loss_prev_stage_rgb_mse
|
||||
- loss_prev_stage_rgb_psnr_fg
|
||||
- loss_prev_stage_rgb_psnr
|
||||
- loss_prev_stage_mask_bce
|
||||
- objective
|
||||
- epoch
|
||||
- sec/it
|
||||
global_encoder_SequenceAutodecoder_args:
|
||||
autodecoder_args:
|
||||
encoding_dim: 0
|
||||
|
||||
@@ -90,5 +90,5 @@ class TestGenericModel(unittest.TestCase):
|
||||
remove_unused_components(instance_args)
|
||||
yaml = OmegaConf.to_yaml(instance_args, sort_keys=False)
|
||||
if DEBUG:
|
||||
(DATA_DIR / "overrides.yaml_").write_text(yaml)
|
||||
(DATA_DIR / "overrides_.yaml").write_text(yaml)
|
||||
self.assertEqual(yaml, (DATA_DIR / "overrides.yaml").read_text())
|
||||
|
||||
Reference in New Issue
Block a user