From 8df4f890b837ea07f8ff2d4111351111c105fe57 Mon Sep 17 00:00:00 2001 From: Shuai Yang Date: Wed, 14 Aug 2024 22:46:29 -0700 Subject: [PATCH] Don't skip_torchrec when using torchrec PT2 pipeline Differential Revision: D61219995 --- torchrec/distributed/train_pipeline/train_pipelines.py | 1 + 1 file changed, 1 insertion(+) diff --git a/torchrec/distributed/train_pipeline/train_pipelines.py b/torchrec/distributed/train_pipeline/train_pipelines.py index 8a75e8602..094b4a236 100644 --- a/torchrec/distributed/train_pipeline/train_pipelines.py +++ b/torchrec/distributed/train_pipeline/train_pipelines.py @@ -248,6 +248,7 @@ def progress(self, dataloader_iter: Iterator[In]) -> Out: torch._dynamo.config.force_unspec_int_unbacked_size_like_on_torchrec_kjt = ( True ) + torch._dynamo.config.skip_torchrec = False # Importing only before compilation to not slow-done train_pipelines import torch.ops.import_module("fbgemm_gpu.sparse_ops")