Skip to content

Commit a7f3974

Browse files
Release (#467)
* smurf ethics * smurf ethics * removed auto ddp fix * removed auto ddp fix * removed auto ddp fix * removed auto ddp fix * removed auto ddp fix * removed auto ddp fix
1 parent 7ce5b3f commit a7f3974

File tree

1 file changed

+6
-7
lines changed

1 file changed

+6
-7
lines changed

pytorch_lightning/trainer/ddp_mixin.py

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -49,14 +49,13 @@ def set_distributed_mode(self, distributed_backend, nb_gpu_nodes):
4949
'Trainer(distributed_backend=dp) (or ddp)'
5050
raise MisconfigurationException(m)
5151

52-
# use ddp automatically if nb_gpu_nodes > 1
53-
if nb_gpu_nodes > 1 and self.use_dp: # pragma: no cover
54-
self.use_ddp = True
55-
self.use_dp = False
52+
# throw error to force user ddp or ddp2 choice
53+
if nb_gpu_nodes > 1 and not (self.use_ddp2 or self.use_ddp): # pragma: no cover
5654
w = 'DataParallel does not support nb_gpu_nodes > 1. ' \
5755
'Switching to DistributedDataParallel for you. ' \
58-
'To silence this warning set distributed_backend=ddp'
59-
warnings.warn(w)
56+
'To silence this warning set distributed_backend=ddp' \
57+
'or distributed_backend=ddp2'
58+
raise MisconfigurationException(w)
6059

6160
logging.info(f'gpu available: {torch.cuda.is_available()}, used: {self.on_gpu}')
6261

@@ -173,7 +172,7 @@ def ddp_train(self, gpu_nb, model):
173172
if self.distributed_backend == 'ddp':
174173
device_ids = [gpu_nb]
175174
elif self.use_ddp2:
176-
device_ids = None
175+
device_ids = self.data_parallel_device_ids
177176

178177
# allow user to configure ddp
179178
model = model.configure_ddp(model, device_ids)

0 commit comments

Comments
 (0)