Skip to content

Commit 08590b4

Browse files
authored
Merge branch 'master' into cache_sphinx
2 parents 668b457 + 2751bf3 commit 08590b4

File tree

9 files changed

+32
-714
lines changed

9 files changed

+32
-714
lines changed

.jenkins/remove_runnable_code.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,17 @@
1616
if line.startswith('#'):
1717
ret_lines.append(line)
1818
state = STATE_NORMAL
19+
elif ((line.startswith('"""') or line.startswith('r"""')) and
20+
line.endswith('"""')):
21+
ret_lines.append(line)
22+
state = STATE_NORMAL
1923
elif line.startswith('"""') or line.startswith('r"""'):
2024
ret_lines.append(line)
2125
state = STATE_IN_MULTILINE_COMMENT_BLOCK_DOUBLE_QUOTE
26+
elif ((line.startswith("'''") or line.startswith("r'''")) and
27+
line.endswith("'''")):
28+
ret_lines.append(line)
29+
state = STATE_NORMAL
2230
elif line.startswith("'''") or line.startswith("r'''"):
2331
ret_lines.append(line)
2432
state = STATE_IN_MULTILINE_COMMENT_BLOCK_SINGLE_QUOTE

advanced_source/dispatcher.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
1-
Dispatcher in C++
2-
=================
1+
Registering a Dispatched Operator in C++
2+
========================================
33

44
The dispatcher is an internal component of PyTorch which is responsible for
55
figuring out what code should actually get run when you call a function like

advanced_source/rpc_ddp_tutorial/main.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import torch.distributed as dist
77
import torch.distributed.autograd as dist_autograd
88
import torch.distributed.rpc as rpc
9-
from torch.distributed.rpc import ProcessGroupRpcBackendOptions
9+
from torch.distributed.rpc import TensorPipeRpcBackendOptions
1010
import torch.multiprocessing as mp
1111
import torch.optim as optim
1212
from torch.distributed.optim import DistributedOptimizer
@@ -128,7 +128,7 @@ def run_worker(rank, world_size):
128128
os.environ['MASTER_PORT'] = '29500'
129129

130130

131-
rpc_backend_options = ProcessGroupRpcBackendOptions()
131+
rpc_backend_options = TensorPipeRpcBackendOptions()
132132
rpc_backend_options.init_method='tcp://localhost:29501'
133133

134134
# Rank 2 is master, 3 is ps and 0 and 1 are trainers.

0 commit comments

Comments
 (0)