add `TestExpGrad` to `math_grad_test`
diff --git a/tensorflow/c/experimental/gradients/math_grad_test.cc b/tensorflow/c/experimental/gradients/math_grad_test.cc
index 9a98f4d..dc1936a 100644
--- a/tensorflow/c/experimental/gradients/math_grad_test.cc
+++ b/tensorflow/c/experimental/gradients/math_grad_test.cc
@@ -59,6 +59,34 @@
return Status::OK();
}
+Status ExpModel(AbstractContext* ctx,
+ absl::Span<AbstractTensorHandle* const> inputs,
+ absl::Span<AbstractTensorHandle*> outputs) {
+ return ops::Exp(ctx, inputs, outputs, "Exp");
+}
+
+Status ExpGradModel(AbstractContext* ctx,
+ absl::Span<AbstractTensorHandle* const> inputs,
+ absl::Span<AbstractTensorHandle*> outputs) {
+ GradientRegistry registry;
+ TF_RETURN_IF_ERROR(registry.Register("Exp", ExpRegisterer));
+
+ Tape tape(/*persistent=*/false);
+ tape.Watch(inputs[0]);
+ std::vector<AbstractTensorHandle*> temp_outputs(1);
+ AbstractContextPtr tape_ctx(new TapeContext(ctx, &tape, registry));
+ TF_RETURN_IF_ERROR(ops::Exp(tape_ctx.get(), inputs,
+ absl::MakeSpan(temp_outputs), "ExpGrad"));
+
+ TF_RETURN_IF_ERROR(tape.ComputeGradient(ctx, /*targets=*/temp_outputs,
+ /*sources=*/inputs,
+ /*output_gradients=*/{}, outputs));
+ for (auto temp_output : temp_outputs) {
+ temp_output->Unref();
+ }
+ return Status::OK();
+}
+
class CppGradients
: public ::testing::TestWithParam<std::tuple<const char*, bool, bool>> {
protected:
@@ -105,6 +133,19 @@
AddModel, AddGradModel, ctx_.get(), {x.get(), y.get()}, UseFunction()));
}
+TEST_P(CppGradients, TestExpGrad) {
+ AbstractTensorHandlePtr x;
+ {
+ AbstractTensorHandle* x_raw = nullptr;
+ Status s = TestScalarTensorHandle(ctx_.get(), 2.0f, &x_raw);
+ ASSERT_EQ(errors::OK, s.code()) << s.error_message();
+ x.reset(x_raw);
+ }
+
+ ASSERT_NO_FATAL_FAILURE(CompareNumericalAndAutodiffGradients(
+ ExpModel, ExpGradModel, ctx_.get(), {x.get()}, UseFunction()));
+}
+
#ifdef PLATFORM_GOOGLE
INSTANTIATE_TEST_SUITE_P(
UnifiedCAPI, CppGradients,