nn_grad_test.cc revision 39f04d940e3624e17f8855ae6dd7c7bf55545b70
1/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2
3Licensed under the Apache License, Version 2.0 (the "License");
4you may not use this file except in compliance with the License.
5You may obtain a copy of the License at
6
7    http://www.apache.org/licenses/LICENSE-2.0
8
9Unless required by applicable law or agreed to in writing, software
10distributed under the License is distributed on an "AS IS" BASIS,
11WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12See the License for the specific language governing permissions and
13limitations under the License.
14==============================================================================*/
15
16#include "tensorflow/cc/framework/grad_op_registry.h"
17#include "tensorflow/cc/framework/gradient_checker.h"
18#include "tensorflow/cc/framework/testutil.h"
19#include "tensorflow/cc/gradients/grad_testutil.h"
20#include "tensorflow/cc/ops/standard_ops.h"
21#include "tensorflow/core/framework/tensor_testutil.h"
22#include "tensorflow/core/lib/core/status_test_util.h"
23#include "tensorflow/core/lib/random/random.h"
24
25namespace tensorflow {
26using namespace ops;  // NOLINT(build/namespaces)
27
28namespace {
29
30class NNGradTest : public ::testing::Test {
31 protected:
32  NNGradTest() : scope_(Scope::NewRootScope()) {}
33
34  void RunTest(const Output& x, const TensorShape& x_shape, const Output& y,
35               const TensorShape& y_shape) {
36    float max_error;
37    TF_ASSERT_OK(ComputeGradientError(scope_, {x}, {x_shape}, {y}, {y_shape},
38                                      &max_error));
39    EXPECT_LT(max_error, 1e-4);
40  }
41
42  void RunTest(const Output& x, const Tensor& x_init_value, const Output& y,
43               const TensorShape& y_shape) {
44    float max_error;
45    TF_ASSERT_OK(
46        ComputeGradientError(scope_, x, x_init_value, y, y_shape, &max_error));
47    EXPECT_LT(max_error, 1e-4);
48  }
49
50  Scope scope_;
51};
52
53TEST_F(NNGradTest, SoftmaxGrad) {
54  TensorShape shape({32, 10});
55  auto x = Placeholder(scope_, DT_FLOAT, Placeholder::Shape(shape));
56  auto y = Softmax(scope_, x);
57  RunTest(x, shape, y, shape);
58}
59
60TEST_F(NNGradTest, ReluGrad) {
61  TensorShape shape({5, 2});
62  auto x = Placeholder(scope_, DT_FLOAT, Placeholder::Shape(shape));
63  auto y = Relu(scope_, x);
64  // Avoid input values where ReLU gradient is not well defined (around zero).
65  Tensor x_init_value = test::AsTensor<float>(
66      {-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9}, {5, 2});
67  RunTest(x, x_init_value, y, shape);
68}
69
70TEST_F(NNGradTest, Relu6Grad) {
71  TensorShape shape({5, 2});
72  auto x = Placeholder(scope_, DT_FLOAT, Placeholder::Shape(shape));
73  auto y = Relu6(scope_, x);
74  // Avoid input values where ReLU gradient is not well defined (around zero
75  // and six).
76  Tensor x_init_value = test::AsTensor<float>(
77      {-0.9, -0.7, -0.5, -0.3, -0.1, 6.1, 6.3, 6.5, 6.7, 6.9}, {5, 2});
78  RunTest(x, x_init_value, y, shape);
79}
80
81TEST_F(NNGradTest, EluGrad) {
82  TensorShape shape({5, 2});
83  auto x = Placeholder(scope_, DT_FLOAT, Placeholder::Shape(shape));
84  auto y = Elu(scope_, x);
85  Tensor x_init_value = test::AsTensor<float>(
86      {-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7, 0.9}, {5, 2});
87  RunTest(x, x_init_value, y, shape);
88}
89
90}  // namespace
91}  // namespace tensorflow
92