From 3239594f78632fe207195e4622c00fb1656c4675 Mon Sep 17 00:00:00 2001 From: Roman Shapovalov Date: Thu, 26 Jan 2023 03:00:46 -0800 Subject: [PATCH] Fix: Correct concatenation of datasets in train conditioning Summary: ChainDataset is iterable, and it toes not go along with a custom batch sampler. Reviewed By: bottler Differential Revision: D42742315 fbshipit-source-id: 40a715c8d24abe72cb2777634247d7467f628564 --- pytorch3d/implicitron/dataset/data_loader_map_provider.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pytorch3d/implicitron/dataset/data_loader_map_provider.py b/pytorch3d/implicitron/dataset/data_loader_map_provider.py index 8c0841cc..50a79218 100644 --- a/pytorch3d/implicitron/dataset/data_loader_map_provider.py +++ b/pytorch3d/implicitron/dataset/data_loader_map_provider.py @@ -12,7 +12,7 @@ import torch from pytorch3d.implicitron.tools.config import registry, ReplaceableBase from torch.utils.data import ( BatchSampler, - ChainDataset, + ConcatDataset, DataLoader, RandomSampler, Sampler, @@ -482,7 +482,7 @@ class SequenceDataLoaderMapProvider(DataLoaderMapProviderBase): num_batches=num_batches, ) return DataLoader( - ChainDataset([dataset, train_dataset]), + ConcatDataset([dataset, train_dataset]), batch_sampler=sampler, **data_loader_kwargs, )