1# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Tests for SimpleRNN layer."""
16
17from __future__ import absolute_import
18from __future__ import division
19from __future__ import print_function
20
21import numpy as np
22
23from tensorflow.python.keras._impl import keras
24from tensorflow.python.keras._impl.keras import testing_utils
25from tensorflow.python.platform import test
26
27
28class SimpleRNNLayerTest(test.TestCase):
29
30  def test_return_sequences_SimpleRNN(self):
31    num_samples = 2
32    timesteps = 3
33    embedding_dim = 4
34    units = 2
35    with self.test_session():
36      testing_utils.layer_test(
37          keras.layers.SimpleRNN,
38          kwargs={'units': units,
39                  'return_sequences': True},
40          input_shape=(num_samples, timesteps, embedding_dim))
41
42  def test_dynamic_behavior_SimpleRNN(self):
43    num_samples = 2
44    timesteps = 3
45    embedding_dim = 4
46    units = 2
47    with self.test_session():
48      layer = keras.layers.SimpleRNN(units, input_shape=(None, embedding_dim))
49      model = keras.models.Sequential()
50      model.add(layer)
51      model.compile('sgd', 'mse')
52      x = np.random.random((num_samples, timesteps, embedding_dim))
53      y = np.random.random((num_samples, units))
54      model.train_on_batch(x, y)
55
56  def test_dropout_SimpleRNN(self):
57    num_samples = 2
58    timesteps = 3
59    embedding_dim = 4
60    units = 2
61    with self.test_session():
62      testing_utils.layer_test(
63          keras.layers.SimpleRNN,
64          kwargs={'units': units,
65                  'dropout': 0.1,
66                  'recurrent_dropout': 0.1},
67          input_shape=(num_samples, timesteps, embedding_dim))
68
69  def test_implementation_mode_SimpleRNN(self):
70    num_samples = 2
71    timesteps = 3
72    embedding_dim = 4
73    units = 2
74    with self.test_session():
75      for mode in [0, 1, 2]:
76        testing_utils.layer_test(
77            keras.layers.SimpleRNN,
78            kwargs={'units': units,
79                    'implementation': mode},
80            input_shape=(num_samples, timesteps, embedding_dim))
81
82  def test_statefulness_SimpleRNN(self):
83    num_samples = 2
84    timesteps = 3
85    embedding_dim = 4
86    units = 2
87    layer_class = keras.layers.SimpleRNN
88    with self.test_session():
89      model = keras.models.Sequential()
90      model.add(
91          keras.layers.Embedding(
92              4,
93              embedding_dim,
94              mask_zero=True,
95              input_length=timesteps,
96              batch_input_shape=(num_samples, timesteps)))
97      layer = layer_class(
98          units, return_sequences=False, stateful=True, weights=None)
99      model.add(layer)
100      model.compile(optimizer='sgd', loss='mse')
101      out1 = model.predict(np.ones((num_samples, timesteps)))
102      self.assertEqual(out1.shape, (num_samples, units))
103
104      # train once so that the states change
105      model.train_on_batch(
106          np.ones((num_samples, timesteps)), np.ones((num_samples, units)))
107      out2 = model.predict(np.ones((num_samples, timesteps)))
108
109      # if the state is not reset, output should be different
110      self.assertNotEqual(out1.max(), out2.max())
111
112      # check that output changes after states are reset
113      # (even though the model itself didn't change)
114      layer.reset_states()
115      out3 = model.predict(np.ones((num_samples, timesteps)))
116      self.assertNotEqual(out2.max(), out3.max())
117
118      # check that container-level reset_states() works
119      model.reset_states()
120      out4 = model.predict(np.ones((num_samples, timesteps)))
121      np.testing.assert_allclose(out3, out4, atol=1e-5)
122
123      # check that the call to `predict` updated the states
124      out5 = model.predict(np.ones((num_samples, timesteps)))
125      self.assertNotEqual(out4.max(), out5.max())
126
127      # Check masking
128      layer.reset_states()
129
130      left_padded_input = np.ones((num_samples, timesteps))
131      left_padded_input[0, :1] = 0
132      left_padded_input[1, :2] = 0
133      out6 = model.predict(left_padded_input)
134
135      layer.reset_states()
136
137      right_padded_input = np.ones((num_samples, timesteps))
138      right_padded_input[0, -1:] = 0
139      right_padded_input[1, -2:] = 0
140      out7 = model.predict(right_padded_input)
141
142      np.testing.assert_allclose(out7, out6, atol=1e-5)
143
144  def test_regularizers_SimpleRNN(self):
145    embedding_dim = 4
146    layer_class = keras.layers.SimpleRNN
147    with self.test_session():
148      layer = layer_class(
149          5,
150          return_sequences=False,
151          weights=None,
152          input_shape=(None, embedding_dim),
153          kernel_regularizer=keras.regularizers.l1(0.01),
154          recurrent_regularizer=keras.regularizers.l1(0.01),
155          bias_regularizer='l2',
156          activity_regularizer='l1')
157      layer.build((None, None, 2))
158      self.assertEqual(len(layer.losses), 3)
159
160      x = keras.backend.variable(np.ones((2, 3, 2)))
161      layer(x)
162      self.assertEqual(len(layer.get_losses_for(x)), 1)
163
164  def test_constraints_SimpleRNN(self):
165    embedding_dim = 4
166    layer_class = keras.layers.SimpleRNN
167    with self.test_session():
168      k_constraint = keras.constraints.max_norm(0.01)
169      r_constraint = keras.constraints.max_norm(0.01)
170      b_constraint = keras.constraints.max_norm(0.01)
171      layer = layer_class(
172          5,
173          return_sequences=False,
174          weights=None,
175          input_shape=(None, embedding_dim),
176          kernel_constraint=k_constraint,
177          recurrent_constraint=r_constraint,
178          bias_constraint=b_constraint)
179      layer.build((None, None, embedding_dim))
180      self.assertEqual(layer.cell.kernel.constraint, k_constraint)
181      self.assertEqual(layer.cell.recurrent_kernel.constraint, r_constraint)
182      self.assertEqual(layer.cell.bias.constraint, b_constraint)
183
184  def test_with_masking_layer_SimpleRNN(self):
185    layer_class = keras.layers.SimpleRNN
186    with self.test_session():
187      inputs = np.random.random((2, 3, 4))
188      targets = np.abs(np.random.random((2, 3, 5)))
189      targets /= targets.sum(axis=-1, keepdims=True)
190      model = keras.models.Sequential()
191      model.add(keras.layers.Masking(input_shape=(3, 4)))
192      model.add(layer_class(units=5, return_sequences=True, unroll=False))
193      model.compile(loss='categorical_crossentropy', optimizer='adam')
194      model.fit(inputs, targets, epochs=1, batch_size=2, verbose=1)
195
196  def test_from_config_SimpleRNN(self):
197    layer_class = keras.layers.SimpleRNN
198    for stateful in (False, True):
199      l1 = layer_class(units=1, stateful=stateful)
200      l2 = layer_class.from_config(l1.get_config())
201      assert l1.get_config() == l2.get_config()
202
203
204if __name__ == '__main__':
205  test.main()
206