From aa2c771bf22a2edafe6bcc8f53a90a359b0c6090 Mon Sep 17 00:00:00 2001 From: Andrew Gu Date: Thu, 27 May 2021 22:58:02 +0000 Subject: [PATCH] Fix comment from "point-to-point" -> "collective" for describing all-reduce --- intermediate_source/dist_tuto.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/intermediate_source/dist_tuto.rst b/intermediate_source/dist_tuto.rst index f680be4fb77..ddcda7b4c0c 100644 --- a/intermediate_source/dist_tuto.rst +++ b/intermediate_source/dist_tuto.rst @@ -207,7 +207,7 @@ to obtain the sum of all tensors at all processes, we can use the """ All-Reduce example.""" def run(rank, size): - """ Simple point-to-point communication. """ + """ Simple collective communication. """ group = dist.new_group([0, 1]) tensor = torch.ones(1) dist.all_reduce(tensor, op=dist.ReduceOp.SUM, group=group)