Implement Scatter double backwards.
diff --git a/test/test_autograd.py b/test/test_autograd.py
index 783c1b2..aa8280f 100644
--- a/test/test_autograd.py
+++ b/test/test_autograd.py
@@ -1850,7 +1850,6 @@
'Cumprod',
'Gather',
'MaskedScatter',
- 'Scatter',
'Sort',
'Topk',
'Norm',
diff --git a/torch/autograd/_functions/tensor.py b/torch/autograd/_functions/tensor.py
index 0066449..b3f6a06 100644
--- a/torch/autograd/_functions/tensor.py
+++ b/torch/autograd/_functions/tensor.py
@@ -536,15 +536,15 @@
return input.scatter_(ctx.dim, index, source)
@staticmethod
- @once_differentiable
def backward(ctx, grad_output):
index, = ctx.saved_tensors
+ index_var = Variable(index)
grad_input = grad_source = None
if ctx.needs_input_grad[0]:
grad_input = grad_output.clone()
- grad_input.scatter_(ctx.dim, index, 0)
+ grad_input.scatter_(ctx.dim, index_var, 0)
if ctx.needs_input_grad[3]:
- grad_source = grad_output.gather(ctx.dim, index)
+ grad_source = grad_output.gather(ctx.dim, index_var)
return grad_input, None, None, grad_source, None