1# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Tests for TensorFlow 2.0 layer behavior."""
16
17from __future__ import absolute_import
18from __future__ import division
19from __future__ import print_function
20
21import collections
22import itertools as it
23import sys
24import traceback
25from absl.testing import parameterized
26import numpy as np
27
28from tensorflow.python import keras
29from tensorflow.python.eager import context
30from tensorflow.python.eager import def_function
31from tensorflow.python.framework import ops
32from tensorflow.python.framework import tensor_shape
33from tensorflow.python.framework import test_util
34from tensorflow.python.keras import keras_parameterized
35from tensorflow.python.keras import testing_utils
36from tensorflow.python.keras.engine import base_layer
37from tensorflow.python.keras.optimizer_v2 import rmsprop
38from tensorflow.python.keras.utils import tf_utils
39from tensorflow.python.ops import array_ops
40from tensorflow.python.ops import math_ops
41from tensorflow.python.ops import state_ops
42from tensorflow.python.ops import variables
43from tensorflow.python.platform import test
44
45
46class DynamicLayer1(base_layer.Layer):
47
48  def __init__(self, dynamic=False, **kwargs):
49    super(DynamicLayer1, self).__init__(dynamic=dynamic, **kwargs)
50
51  def call(self, inputs):
52    if math_ops.reduce_sum(inputs) > 0:
53      return math_ops.sqrt(inputs)
54    else:
55      return math_ops.square(inputs)
56
57  def compute_output_shape(self, input_shape):
58    return input_shape
59
60
61class DynamicLayer2(base_layer.Layer):
62
63  def __init__(self, dynamic=False, **kwargs):
64    super(DynamicLayer2, self).__init__(dynamic=dynamic, **kwargs)
65
66  def call(self, inputs):
67    samples = []
68    for sample in inputs:
69      samples.append(math_ops.square(sample))
70    return array_ops.stack(samples, axis=0)
71
72  def compute_output_shape(self, input_shape):
73    return input_shape
74
75
76class InvalidLayer(base_layer.Layer):
77
78  def call(self, inputs):
79    raise ValueError('You did something wrong!')
80
81
82class BaseLayerTest(keras_parameterized.TestCase):
83
84  @parameterized.parameters(DynamicLayer1, DynamicLayer2)
85  def test_dynamic_layer_in_functional_model_in_graph_mode(self, layer_class):
86    with context.graph_mode():
87      inputs = keras.Input((3,))
88      # Works when `dynamic=True` is declared.
89      outputs = layer_class(dynamic=True)(inputs)
90      model = keras.Model(inputs, outputs)
91      self.assertEqual(model.dynamic, True)
92      # But then you cannot run the model since you're in a graph scope.
93      with self.assertRaisesRegexp(
94          ValueError, 'You must enable eager execution'):
95        model.compile(rmsprop.RMSprop(0.001), loss='mse')
96
97      # Fails when `dynamic=True` not declared.
98      with self.assertRaisesRegexp(
99          TypeError, 'attempting to use Python control flow'):
100        _ = layer_class()(inputs)
101
102  @parameterized.parameters(DynamicLayer1, DynamicLayer2)
103  def test_dynamic_layer_in_functional_model_in_eager_mode(self, layer_class):
104    inputs = keras.Input((3,))
105    # Fails when `dynamic=True` not declared.
106    with self.assertRaisesRegexp(
107        TypeError, 'attempting to use Python control flow'):
108      _ = layer_class()(inputs)
109    # Works when `dynamic=True` is declared.
110    outputs = layer_class(dynamic=True)(inputs)
111    model = keras.Model(inputs, outputs)
112    self.assertEqual(model.dynamic, True)
113    model.compile(rmsprop.RMSprop(0.001), loss='mse')
114    self.assertEqual(model.run_eagerly, True)
115    model.train_on_batch(np.random.random((2, 3)), np.random.random((2, 3)))
116
117  def test_nested_dynamic_layers_in_eager_mode(self):
118    inputs = keras.Input((3,))
119    outputs = DynamicLayer1(dynamic=True)(inputs)
120    inner_model = keras.Model(inputs, outputs)
121    self.assertEqual(inner_model.dynamic, True)
122
123    inputs = keras.Input((3,))
124    x = DynamicLayer2(dynamic=True)(inputs)
125    outputs = inner_model(x)
126
127    model = keras.Model(inputs, outputs)
128    self.assertEqual(model.dynamic, True)
129    model.compile(rmsprop.RMSprop(0.001), loss='mse')
130    self.assertEqual(model.run_eagerly, True)
131    model.train_on_batch(np.random.random((2, 3)), np.random.random((2, 3)))
132
133  def test_dynamic_layers_in_sequential_model(self):
134    # Without input_shape argument
135    model = keras.Sequential([DynamicLayer1(dynamic=True),
136                              keras.layers.Dense(3),
137                              DynamicLayer2(dynamic=True)])
138    self.assertEqual(model.dynamic, True)
139    model.compile(rmsprop.RMSprop(0.001), loss='mse')
140    self.assertEqual(model.run_eagerly, True)
141    model.train_on_batch(np.random.random((2, 3)), np.random.random((2, 3)))
142
143    # With input_shape argument
144    model = keras.Sequential([DynamicLayer1(dynamic=True, input_shape=(3,)),
145                              DynamicLayer2(dynamic=True)])
146    self.assertEqual(model.dynamic, True)
147    model.compile(rmsprop.RMSprop(0.001), loss='mse')
148    self.assertEqual(model.run_eagerly, True)
149    model.train_on_batch(np.random.random((2, 3)), np.random.random((2, 3)))
150
151  def test_dynamic_layers_in_subclassed_model(self):
152
153    class MyModel(keras.Model):
154
155      def __init__(self):
156        super(MyModel, self).__init__()
157        self.layer1 = DynamicLayer1(dynamic=True)
158
159      def call(self, inputs):
160        return self.layer1(inputs)
161
162    model = MyModel()
163    self.assertEqual(model.dynamic, True)
164    model.compile(rmsprop.RMSprop(0.001), loss='mse')
165    self.assertEqual(model.run_eagerly, True)
166    model.train_on_batch(np.random.random((2, 3)), np.random.random((2, 3)))
167
168  def test_dynamic_subclassed_model_no_shape_inference(self):
169
170    class MyModel(keras.Model):
171
172      def __init__(self):
173        super(MyModel, self).__init__(dynamic=True)
174        self.layer1 = keras.layers.Dense(3)
175        self.layer2 = keras.layers.Dense(3)
176
177      def call(self, inputs):
178        if math_ops.reduce_sum(inputs) > 0:
179          return self.layer1(inputs)
180        else:
181          return self.layer2(inputs)
182
183    model = MyModel()
184    self.assertEqual(model.dynamic, True)
185    model.compile(rmsprop.RMSprop(0.001), loss='mse')
186    self.assertEqual(model.run_eagerly, True)
187    model.train_on_batch(np.random.random((2, 3)), np.random.random((2, 3)))
188    self.assertEqual(model.outputs, [None])
189
190  def test_dynamic_subclassed_model_with_shape_inference(self):
191
192    class MyModel(keras.Model):
193
194      def __init__(self):
195        super(MyModel, self).__init__(dynamic=True)
196        self.layer1 = keras.layers.Dense(3)
197        self.layer2 = keras.layers.Dense(3)
198
199      def call(self, inputs):
200        if math_ops.reduce_sum(inputs) > 0:
201          return self.layer1(inputs)
202        else:
203          return self.layer2(inputs)
204
205      def compute_output_shape(self, input_shape):
206        return tensor_shape.TensorShape(
207            tuple(input_shape[:-1].as_list()) + (3,))
208
209    model = MyModel()
210    self.assertEqual(model.dynamic, True)
211    model.compile(rmsprop.RMSprop(0.001), loss='mse')
212    model.train_on_batch(np.random.random((2, 3)), np.random.random((2, 3)))
213    self.assertEqual(model.outputs[0].shape.as_list(), [None, 3])
214
215  @test_util.run_in_graph_and_eager_modes
216  def test_invalid_forward_pass(self):
217    inputs = keras.Input((3,))
218    with self.assertRaisesRegexp(ValueError, 'You did something wrong!'):
219      _ = InvalidLayer()(inputs)
220
221  @keras_parameterized.run_with_all_model_types
222  @test_util.run_in_graph_and_eager_modes
223  def test_build_with_numpy_data(self):
224    model_layers = [
225        keras.layers.Dense(3, activation='relu', kernel_initializer='ones'),
226        keras.layers.Dense(1, activation='sigmoid', kernel_initializer='ones')
227    ]
228    model = testing_utils.get_model_from_layers(model_layers, input_shape=(4,))
229    model(np.zeros((2, 4), dtype='float32'))
230    self.assertTrue(model.built)
231
232  @test_util.run_in_graph_and_eager_modes
233  def test_default_add_weight(self):
234
235    class TestLayer(keras.layers.Layer):
236
237      def __init__(self):
238        super(TestLayer, self).__init__()
239        self.default_weight = self.add_weight()
240        self.weight_without_name = self.add_weight(shape=(3, 4))
241        self.regularized_weight_without_name = self.add_weight(
242            shape=(3, 4), regularizer='l2')
243
244    layer = TestLayer()
245    self.assertEqual(layer.default_weight.shape.as_list(), [])
246    self.assertEqual(layer.weight_without_name.shape.as_list(), [3, 4])
247    self.assertEqual(layer.default_weight.dtype.name, 'float32')
248    self.assertEqual(layer.weight_without_name.dtype.name, 'float32')
249    self.assertEqual(len(layer.losses), 1)
250    if not context.executing_eagerly():
251      # Cannot access tensor.name in eager execution.
252      self.assertTrue('Variable_2/Regularizer' in layer.losses[0].name)
253
254  def test_learning_phase_freezing_for_layers(self):
255    # This test is only meant to run in graph functions mode (ambient eager).
256    # In forced eager, `model.predict` ignores the global learning phase
257    # and just uses training=False. TODO(fchollet): consider unifying the
258    # behaviors.
259
260    class LearningPhaseLayer(keras.layers.Layer):
261
262      def call(self, inputs):
263        return keras.backend.in_train_phase(
264            lambda: array_ops.ones_like(inputs),
265            lambda: array_ops.zeros_like(inputs))
266
267    def get_learning_phase_value():
268      model = keras.models.Sequential([LearningPhaseLayer(input_shape=(1,))])
269      return np.sum(model.predict(np.ones((1, 1))))
270
271    self.assertEqual(get_learning_phase_value(), 0)
272
273    # Test scope.
274    with keras.backend.learning_phase_scope(1):
275      self.assertEqual(get_learning_phase_value(), 1)
276
277    # The effects of the scope end after exiting it.
278    self.assertEqual(get_learning_phase_value(), 0)
279
280    # Test setting.
281    keras.backend.set_learning_phase(1)
282    self.assertEqual(get_learning_phase_value(), 1)
283    keras.backend.set_learning_phase(0)
284    self.assertEqual(get_learning_phase_value(), 0)
285
286  # Cannot be enabled with `run_eagerly=True`, see b/123904578
287  @test_util.run_all_in_graph_and_eager_modes
288  def test_layer_can_return_variable(self):
289
290    class ComputeSum(keras.layers.Layer):
291
292      def __init__(self):
293        super(ComputeSum, self).__init__()
294        self.total = variables.Variable(
295            initial_value=array_ops.zeros((1, 1)), trainable=False)
296        if not context.executing_eagerly():
297          keras.backend.get_session().run(self.total.initializer)
298
299      def call(self, inputs):
300        self.total.assign_add(inputs)
301        return self.total
302
303    inputs = keras.Input(shape=(1,))
304    model = keras.Model(inputs, ComputeSum()(inputs))
305    model.predict(np.ones((1, 1)))
306
307  def _get_layer_with_training_arg(self):
308
309    class TrainingLayer(keras.layers.Layer):
310      """A layer with a `training` argument in a defuned `call`."""
311
312      @def_function.function
313      def call(self, inputs, training=None):
314        if training is None:
315          training = keras.backend.learning_phase()
316        return tf_utils.smart_cond(training,
317                                   lambda: array_ops.ones_like(inputs),
318                                   lambda: array_ops.zeros_like(inputs))
319
320    return TrainingLayer()
321
322  @keras_parameterized.run_with_all_model_types
323  # b/124459427: can't test with `run_eagerly=True` for now.
324  @test_util.run_in_graph_and_eager_modes
325  def test_training_arg_in_defun(self):
326    layer = self._get_layer_with_training_arg()
327    model = testing_utils.get_model_from_layers([layer], input_shape=(1,))
328    model.compile(rmsprop.RMSprop(0.),
329                  loss='mae')
330    history = model.fit(np.zeros((1, 1)), np.zeros((1, 1)))
331    self.assertEqual(history.history['loss'][0], 1.)
332    loss = model.evaluate(np.zeros((1, 1)), np.zeros((1, 1)))
333    self.assertEqual(loss, 0.)
334
335    # Test that the argument injection performed in `call` is not active
336    # when the argument is passed explicitly.
337    layer = self._get_layer_with_training_arg()
338    inputs = keras.Input(shape=(1,))
339    # Pass `training` by name
340    outputs = layer(inputs, training=False)
341    model = keras.Model(inputs, outputs)
342    model.compile(rmsprop.RMSprop(0.),
343                  loss='mae')
344    history = model.fit(np.zeros((1, 1)), np.zeros((1, 1)))
345    self.assertEqual(history.history['loss'][0], 0.)
346
347  @keras_parameterized.run_with_all_model_types
348  @keras_parameterized.run_all_keras_modes
349  def test_raw_variable_assignment(self):
350
351    class RawVariableLayer(keras.layers.Layer):
352
353      def __init__(self, **kwargs):
354        super(RawVariableLayer, self).__init__(**kwargs)
355        # Test variables in nested structure.
356        self.var_list = [variables.Variable(1.), {'a': variables.Variable(2.)}]
357
358      def call(self, inputs):
359        return inputs * self.var_list[0] * self.var_list[1]['a']
360
361    model = testing_utils.get_model_from_layers([RawVariableLayer()],
362                                                input_shape=(10,))
363    model.compile('sgd', 'mse', run_eagerly=testing_utils.should_run_eagerly())
364    x, y = np.ones((10, 10)), np.ones((10, 10))
365    # Checks that variables get initialized.
366    model.fit(x, y, batch_size=2, epochs=2)
367
368
369class SymbolicSupportTest(test.TestCase):
370
371  def test_using_symbolic_tensors_with_tf_ops(self):
372    # Single-input.
373    x = keras.Input((3,))
374    y = math_ops.square(x)
375    self.assertEqual(y.graph, keras.backend.get_graph())
376
377    # Multi-inputs.
378    x1, x2 = keras.Input((3,)), keras.Input((3,))
379    y = array_ops.concat([x1, x2], axis=1)
380    self.assertEqual(y.graph, keras.backend.get_graph())
381
382    # Mixing Keras symbolic tensors and graph tensors from the same graph works.
383    with keras.backend.get_graph().as_default():
384      x1 = keras.Input((3,))
385    x2 = keras.Input((3,))
386    y = math_ops.matmul(x1, x2)
387    self.assertEqual(y.graph, keras.backend.get_graph())
388
389    # Creating same op type (matmul) multiple times in the Keras graph works.
390    x1 = keras.Input((3,))
391    x2 = keras.Input((3,))
392    y = math_ops.matmul(x1, x2)
393    self.assertEqual(y.graph, keras.backend.get_graph())
394
395  def test_mixing_eager_and_graph_tensors(self):
396    with ops.Graph().as_default():
397      x1 = array_ops.ones((3, 3))
398    x2 = array_ops.ones((3, 3))
399    self.assertIsInstance(x2, ops.EagerTensor)
400    with self.assertRaisesRegexp(TypeError, 'Graph tensors'):
401      math_ops.matmul(x1, x2)
402
403  def test_mixing_numpy_arrays_and_graph_tensors(self):
404    with ops.Graph().as_default():
405      x1 = array_ops.ones((3, 3))
406    x2 = np.ones((3, 3), dtype='float32')
407    with self.assertRaisesRegexp(TypeError, 'Graph tensors'):
408      math_ops.matmul(x1, x2)
409
410  @test_util.run_in_graph_and_eager_modes
411  def test_mixing_keras_symbolic_tensors_and_eager_tensors(self):
412    x1 = keras.Input((3,))
413    x2 = array_ops.ones((3, 3))
414    y = math_ops.matmul(x1, x2)
415    self.assertEqual(y.graph, keras.backend.get_graph())
416    fn = keras.backend.function(inputs=[x1], outputs=[y])
417    x_val = np.random.random((3, 3))
418    y_val = np.ones((3, 3))
419    self.assertAllClose(fn([x_val])[0],
420                        np.matmul(x_val, y_val),
421                        atol=1e-5)
422
423  @test_util.run_in_graph_and_eager_modes
424  def test_mixing_keras_symbolic_tensors_and_numpy_arrays(self):
425    x1 = keras.Input((3,))
426    x2 = np.ones((3, 3), dtype='float32')
427    y = math_ops.matmul(x1, x2)
428    self.assertEqual(y.graph, keras.backend.get_graph())
429    fn = keras.backend.function(inputs=[x1], outputs=[y])
430    x_val = np.random.random((3, 3))
431    y_val = np.ones((3, 3))
432    self.assertAllClose(fn([x_val])[0],
433                        np.matmul(x_val, y_val),
434                        atol=1e-5)
435
436  @test_util.run_in_graph_and_eager_modes
437  def test_reraising_exception(self):
438    # When layer is not dynamic, we have some pattern matching during exception
439    # handling to detect when the user is trying to use python control flow.
440    # When an exception is thrown but the pattern doesn't match, we want to
441    # preserve the originating stack trace. An early implementation of this
442    # logic lost the stack trace. We test the correct behavior here.
443
444    class TypeErrorLayer(base_layer.Layer):
445
446      def call(self, inputs):
447        def easily_identifiable_name():
448          raise TypeError('Non-matching TypeError message.')
449        easily_identifiable_name()
450
451    inputs = keras.Input((3,))
452
453    try:
454      _ = TypeErrorLayer()(inputs)
455    except TypeError:
456      tb = traceback.extract_tb(sys.exc_info()[2])
457      last_entry = tb[-1]
458      function_name = last_entry[2]
459      self.assertEqual(function_name, 'easily_identifiable_name')
460
461
462@test_util.run_all_in_graph_and_eager_modes
463class NestedTrackingTest(test.TestCase):
464
465  def test_nested_layer_variable_tracking(self):
466    # Test that variables from nested sublayers are
467    # being tracked by subclassed layers.
468
469    class MyLayer(keras.layers.Layer):
470
471      def __init__(self):
472        super(MyLayer, self).__init__()
473        self.dense1 = keras.layers.Dense(1)
474        self.dense2 = keras.layers.BatchNormalization()
475
476      def build(self, input_shape):
477        self.v1 = self.add_weight('v1', shape=input_shape[1:].as_list())
478        self.v2 = variables.Variable(
479            name='v2',
480            initial_value=np.zeros(input_shape[1:].as_list(), dtype='float32'),
481            trainable=False)
482
483      def call(self, inputs):
484        x = self.dense1(inputs) + self.dense2(inputs)
485        return x + self.v1 + self.v2
486
487    layer = MyLayer()
488    inputs = keras.Input((1,))
489    _ = layer(inputs)
490
491    self.assertEqual(len(layer.weights), 8)
492    self.assertEqual(len(layer.trainable_weights), 5)
493    self.assertEqual(len(layer.non_trainable_weights), 3)
494
495    layer.dense1.trainable = False
496    self.assertEqual(len(layer.weights), 8)
497    self.assertEqual(len(layer.trainable_weights), 3)
498    self.assertEqual(len(layer.non_trainable_weights), 5)
499
500    layer.trainable = False
501    self.assertEqual(len(layer.weights), 8)
502    self.assertEqual(len(layer.trainable_weights), 0)
503    self.assertEqual(len(layer.non_trainable_weights), 8)
504    self.assertEqual(
505        set([layer.dense1, layer.dense2, layer.v1, layer.v2]),
506        set([obj for unused_name, obj in layer._checkpoint_dependencies]))
507
508  def test_nested_layer_updates_losses_tracking(self):
509    # Test that updates and losses from nested sublayers are
510    # being tracked by subclassed layers.
511
512    class UpdateAndLossLayer(keras.layers.Layer):
513
514      def build(self, _):
515        self.v1 = self.add_weight('v1', shape=())
516
517      def call(self, inputs):
518        self.add_loss(math_ops.reduce_sum(inputs))
519        self.add_update(state_ops.assign_add(self.v1, 1))
520        return inputs + 1
521
522    class MyLayer(keras.layers.Layer):
523
524      def build(self, _):
525        self.v1 = self.add_weight('v1', shape=())
526
527      def __init__(self):
528        super(MyLayer, self).__init__()
529        self.ul1 = UpdateAndLossLayer()
530        self.ul2 = UpdateAndLossLayer()
531
532      def call(self, inputs):
533        self.add_loss(math_ops.reduce_sum(inputs))
534        self.add_update(state_ops.assign_add(self.v1, 1))
535        x = self.ul1(inputs)
536        return self.ul2(x)
537
538    layer = MyLayer()
539
540    if context.executing_eagerly():
541      inputs = array_ops.ones((3, 1))
542      _ = layer(inputs)
543      self.assertEqual(len(layer.losses), 3)
544    else:
545      inputs = keras.Input((1,))
546      _ = layer(inputs)
547      self.assertEqual(len(layer.losses), 3)
548      self.assertEqual(len(layer.updates), 3)
549
550  def test_attribute_reassignment(self):
551    l = keras.layers.Layer()
552    l.a = keras.layers.Layer()
553    l.a = []
554    l.a = variables.Variable(1.)
555    l.a = keras.layers.Layer()
556    last_assignment = keras.layers.Layer()
557    l.a = last_assignment
558    l.b = variables.Variable(1.)
559    del l.b
560    l.c = keras.layers.Layer()
561    del l.c
562    l.d = last_assignment
563    del l.d
564    self.assertEqual([last_assignment], l._layers)
565    self.assertEqual([], l.trainable_weights)
566    self.assertEqual([], l.non_trainable_weights)
567    self.assertEqual([], l.weights)
568    del l.a
569    self.assertEqual([], l._layers)
570
571  def test_assign_op_not_tracked_as_variable(self):
572
573    class LayerWithAssignAttr(keras.layers.Layer):
574
575      def build(self, input_shape):
576        self.v = variables.Variable(1.)
577        self.v_assign = self.v.assign_add(2.)
578
579    layer = LayerWithAssignAttr()
580    layer.build((10, 10))
581
582    self.assertEqual([layer.v], layer.variables)
583
584
585@test_util.run_all_in_graph_and_eager_modes
586class NameScopingTest(keras_parameterized.TestCase):
587
588  def test_name_scope_layer(self):
589    x = keras.backend.placeholder(shape=(10, 10))
590    layer = keras.layers.Dense(10, name='MyName')
591    layer(x)
592    self.assertEqual(layer.bias.name, 'MyName/bias:0')
593    self.assertEqual(layer.kernel.name, 'MyName/kernel:0')
594
595  def test_name_scope_sublayer(self):
596    x = keras.backend.placeholder(shape=(10, 10))
597    layer = keras.layers.Dense(
598        10, activation=keras.layers.ReLU(name='MyAct'), name='MyName2')
599    y = layer(x)
600    self.assertEqual(layer.bias.name, 'MyName2/bias:0')
601    self.assertEqual(layer.kernel.name, 'MyName2/kernel:0')
602    self.assertEqual(y.name, 'MyName2/MyAct/Relu:0')
603
604  def test_name_scope_tf_tensor(self):
605    x = ops.convert_to_tensor(np.ones((10, 10)))
606    layer = keras.layers.Dense(
607        10, activation=keras.layers.ReLU(name='MyAct'), name='MyName3')
608    layer(x)
609    self.assertEqual(layer.bias.name, 'MyName3/bias:0')
610    self.assertEqual(layer.kernel.name, 'MyName3/kernel:0')
611
612
613_LAYERS_TO_TEST = [
614    (keras.layers.Dense, (1,), collections.OrderedDict(units=[1])),
615    (keras.layers.Activation, (2, 2),
616     collections.OrderedDict(activation=['relu'])),
617    (keras.layers.Dropout, (16,), collections.OrderedDict(rate=[0.25])),
618    (keras.layers.BatchNormalization, (8, 8, 3), collections.OrderedDict(
619        axis=[3], center=[True, False], scale=[True, False])),
620    (keras.layers.Conv1D, (8, 8), collections.OrderedDict(
621        filters=[1], kernel_size=[1, 3], strides=[1, 2],
622        padding=['valid', 'same'], use_bias=[True, False],
623        kernel_regularizer=[None, 'l2'])),
624    (keras.layers.Conv2D, (8, 8, 3), collections.OrderedDict(
625        filters=[1], kernel_size=[1, 3], strides=[1, 2],
626        padding=['valid', 'same'], use_bias=[True, False],
627        kernel_regularizer=[None, 'l2'])),
628    (keras.layers.LSTM, (8, 8), collections.OrderedDict(
629        units=[1],
630        activation=[None, 'relu'],
631        kernel_regularizer=[None, 'l2'],
632        dropout=[0, 0.5],
633        stateful=[True, False],
634        unroll=[True, False])),
635]
636
637OUTPUT_TEST_CASES = []
638for layer_type, inp_shape, arg_dict in _LAYERS_TO_TEST:
639  arg_combinations = [[(k, i) for i in v] for k, v in arg_dict.items()]  # pylint: disable=g-complex-comprehension
640  for args in it.product(*arg_combinations):
641    name = '_{}_{}'.format(
642        layer_type.__name__, '_'.join('{}_{}'.format(k, v) for k, v in args))
643    OUTPUT_TEST_CASES.append(
644        (name, layer_type, inp_shape, {k: v for k, v in args}))
645
646
647class OutputTypeTest(keras_parameterized.TestCase):
648  """Test that layers and models produce the correct tensor types."""
649
650  # In v1 graph there are only symbolic tensors.
651  @keras_parameterized.run_all_keras_modes(always_skip_v1=True)
652  @parameterized.named_parameters(*OUTPUT_TEST_CASES)
653  def test_layer_outputs(self, layer_to_test, input_shape, layer_kwargs):
654    layer = layer_to_test(**layer_kwargs)
655
656    input_data = np.ones(shape=(2,) + input_shape, dtype=np.float32)
657    layer_result = layer(input_data)
658
659    inp = keras.layers.Input(shape=input_shape, batch_size=2)
660    model = keras.models.Model(inp, layer_to_test(**layer_kwargs)(inp))
661    model_result = model(input_data)
662
663    for x in [layer_result, model_result]:
664      if not isinstance(x, ops.Tensor):
665        raise ValueError('Tensor or EagerTensor expected, got type {}'
666                         .format(type(x)))
667
668      if isinstance(x, ops.EagerTensor) != context.executing_eagerly():
669        expected_type = (ops.EagerTensor if context.executing_eagerly()
670                         else ops.Tensor)
671        raise ValueError('Expected type {}, got type {}'
672                         .format(expected_type, type(x)))
673
674
675if __name__ == '__main__':
676  ops.enable_eager_execution()
677  test.main()
678