From 47e04af63ae7d51ab85443b5e0bc4d2aa3dc38bf Mon Sep 17 00:00:00 2001 From: Shuai Yang Date: Mon, 19 Aug 2024 11:25:18 -0700 Subject: [PATCH] turn off optimize ddp --- torchrec/distributed/train_pipeline/train_pipelines.py | 1 + 1 file changed, 1 insertion(+) diff --git a/torchrec/distributed/train_pipeline/train_pipelines.py b/torchrec/distributed/train_pipeline/train_pipelines.py index a59ad72f8..86c32894b 100644 --- a/torchrec/distributed/train_pipeline/train_pipelines.py +++ b/torchrec/distributed/train_pipeline/train_pipelines.py @@ -249,6 +249,7 @@ def progress(self, dataloader_iter: Iterator[In]) -> Out: True ) torch._dynamo.config.skip_torchrec = False + torch._dynamo.config.optimize_ddp = False # Importing only before compilation to not slow-done train_pipelines import torch.ops.import_module("fbgemm_gpu.sparse_ops")