Browse Source

Fixing concatenation error for fp16 ditributed training

master
gkarch 5 years ago
parent
commit
df4a466af2
1 changed files with 1 additions and 1 deletions
  1. +1
    -1
      distributed.py

+ 1
- 1
distributed.py View File

@ -140,7 +140,7 @@ def apply_gradient_allreduce(module):
buckets = {}
for param in module.parameters():
if param.requires_grad and param.grad is not None:
tp = type(param.data)
tp = param.data.dtype
if tp not in buckets:
buckets[tp] = []
buckets[tp].append(param)

Loading…
Cancel
Save