Skip to content

Commit 575a4a9

Browse files
apaszkesoumith
authored andcommitted
Remove assertions with side effects
1 parent 02e23f4 commit 575a4a9

File tree

2 files changed

+6
-4
lines changed

2 files changed

+6
-4
lines changed

torch/autograd/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -152,5 +152,5 @@ def grad(outputs, inputs, grad_outputs=None, retain_graph=None, create_graph=Non
152152
outputs, grad_outputs, retain_graph,
153153
inputs, only_inputs)
154154

155-
status = torch._C._autograd_init()
156-
assert status, "Autograd failed to initialize."
155+
if not torch._C._autograd_init():
156+
raise RuntimeError("autograd initialization failed")

torch/distributed/__init__.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,8 @@ def init_process_group(backend, init_method='env://', **kwargs):
4545
torch._C._dist_init_process_group(backend, init_method, world_size,
4646
group_name, rank)
4747
_initialized = _INITIALIZED_PG
48-
assert torch._C._dist_init_extension(False, reduce_op, group)
48+
if not torch._C._dist_init_extension(False, reduce_op, group):
49+
raise RuntimeError("distributed module initialization failed")
4950

5051

5152
def init_master_worker(backend, init_method='env://', **kwargs):
@@ -75,7 +76,8 @@ def init_master_worker(backend, init_method='env://', **kwargs):
7576
import torch.distributed.remote_types as remote_types
7677
_extend_scope(collectives)
7778
_extend_scope(remote_types)
78-
assert torch._C._dist_init_extension(True, reduce_op, group)
79+
if not torch._C._dist_init_extension(True, reduce_op, group):
80+
raise RuntimeError("distributed module initialization failed")
7981

8082

8183
class reduce_op(object):

0 commit comments

Comments
 (0)