diff --git a/intermediate_source/torchrec_intro_tutorial.py b/intermediate_source/torchrec_intro_tutorial.py index 5f9464decd..748b8f9697 100644 --- a/intermediate_source/torchrec_intro_tutorial.py +++ b/intermediate_source/torchrec_intro_tutorial.py @@ -744,7 +744,7 @@ def _wait_impl(self) -> torch.Tensor: # ``EmbeddingBagCollection`` to generate a # ``ShardedEmbeddingBagCollection`` module. This workflow is fine, but # typically when implementing model parallel, -# `DistributedModelParallel `__ +# `DistributedModelParallel `__ # (DMP) is used as the standard interface. When wrapping your model (in # our case ``ebc``), with DMP, the following will occur: #