Skip to content

Commit d0ec659

Browse files
fix: add the implementation of GatherND's grad
1 parent 3fcc4d8 commit d0ec659

File tree

4 files changed

+46
-2
lines changed

4 files changed

+46
-2
lines changed

src/TensorFlowNET.Core/APIs/tf.array.cs

+10
Original file line numberDiff line numberDiff line change
@@ -140,6 +140,16 @@ public Tensor identity(Tensor input, string name = null)
140140
public Tensor gather(Tensor @params, Tensor indices, string name = null, int axis = 0)
141141
=> array_ops.gather(@params, indices, name: name, axis: ops.convert_to_tensor(axis));
142142

143+
/// <summary>
144+
/// Gather slices from `params` into a Tensor with shape specified by `indices`.
145+
/// </summary>
146+
/// <param name="params"></param>
147+
/// <param name="indices"></param>
148+
/// <param name="name"></param>
149+
/// <returns></returns>
150+
public Tensor gather_nd(Tensor @params, Tensor indices, string name = null)
151+
=> gen_array_ops.gather_nd(@params, indices, name: name);
152+
143153
/// <summary>
144154
/// Return the elements, either from `x` or `y`, depending on the `condition`.
145155
/// </summary>

src/TensorFlowNET.Core/Gradients/array_grad.cs

+19
Original file line numberDiff line numberDiff line change
@@ -403,7 +403,26 @@ public static Tensor[] _TileGrad(Operation op, Tensor[] grads)
403403
input_grad.set_shape(op.inputs[0].GetShape());
404404
}
405405
return new Tensor[] { input_grad, null };
406+
}
406407

408+
[RegisterGradient("GatherNd")]
409+
public static Tensor[] _GatherNdGrad(Operation op, Tensor[] grads)
410+
{
411+
var @ref = op.inputs[0];
412+
var indices = op.inputs[1];
413+
var grad = grads[0];
414+
var ref_shape = array_ops.shape(@ref, out_type: indices.dtype);
415+
Tensor ref_grad = null;
416+
if (indices.shape.ndim == 2 && indices.shape.dims[indices.shape.Length - 1] == 1)
417+
{
418+
ref_grad = (Tensor)new IndexedSlices(grad, array_ops.squeeze(indices, axis: -1), ref_shape);
419+
}
420+
else
421+
{
422+
ref_grad = gen_array_ops.scatter_nd(indices, grad, ref_shape);
423+
}
424+
return new Tensor[] { ref_grad, null };
407425
}
426+
408427
}
409428
}

src/TensorFlowNET.Core/Operations/array_ops.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -829,7 +829,7 @@ public static Tensor strided_slice_grad(Tensor shape, Tensor begin, Tensor end,
829829
/// <returns>A `Tensor`. Has the same type as `input`.
830830
/// Contains the same data as `input`, but has one or more dimensions of
831831
/// size 1 removed.</returns>
832-
public static Tensor squeeze(Tensor input, int[] axis = null, string name = null)
832+
public static Tensor squeeze(Tensor input, Axis axis = null, string name = null)
833833
=> gen_array_ops.squeeze(input, axis, name);
834834

835835
public static Tensor identity(Tensor input, string name = null)

test/TensorFlowNET.UnitTest/GradientTest/GradientEagerTest.cs

+16-1
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ public void SquaredDifference_1D()
6262
// Calcute the gradient of (x1-x2)^2
6363
// by Automatic Differentiation in Eager mode
6464
// Expected is 2*(abs(x1-x2))
65-
Tensor x1 = new NDArray( new float[] { 1, 3, 5, 21, 19, 17 });
65+
Tensor x1 = new NDArray(new float[] { 1, 3, 5, 21, 19, 17 });
6666
Tensor x2 = new NDArray(new float[] { 29, 27, 23, 7, 11, 13 });
6767
float[] expected = new float[]
6868
{
@@ -187,5 +187,20 @@ public void Tile()
187187
Assert.AreEqual((float)grad.numpy(), 2.0f);
188188
}
189189
}
190+
191+
[TestMethod]
192+
public void GatherNdTest()
193+
{
194+
var x = tf.constant(new float[,] { { 1.0f, 2.0f, 3.0f }, { 1.0f, 2.0f, 3.0f }, { 1.0f, 2.0f, 3.0f } }, dtype: TF_DataType.TF_FLOAT);
195+
var indices = tf.constant(new int[,] { { 0, 1 }, { 1, 1 }, { 2, 1 } }, dtype: TF_DataType.TF_INT32);
196+
using (var tape = tf.GradientTape())
197+
{
198+
tape.watch(x);
199+
var res = tf.gather_nd(x, indices);
200+
var grad = tape.gradient(res, x);
201+
var expected = np.array(new float[,] { { 0f, 1f, 0f }, { 0f, 1f, 0f }, { 0f, 1f, 0f } });
202+
Assert.IsTrue(Enumerable.SequenceEqual(grad.ToArray<float>(), expected.ToArray<float>()));
203+
}
204+
}
190205
}
191206
}

0 commit comments

Comments
 (0)