1# Copyright 2018 The TensorFlow Authors. All Rights Reserved. 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================== 15"""Tests for Keras composite tensor support.""" 16 17from __future__ import absolute_import 18from __future__ import division 19from __future__ import print_function 20 21import numpy as np 22 23from tensorflow.python.framework import dtypes 24from tensorflow.python.framework import ops 25from tensorflow.python.framework import sparse_tensor 26from tensorflow.python.keras import keras_parameterized 27from tensorflow.python.keras import testing_utils 28from tensorflow.python.keras.layers import core 29from tensorflow.python.keras.layers import Layer 30from tensorflow.python.ops import array_ops 31from tensorflow.python.ops import math_ops 32from tensorflow.python.ops import sparse_ops 33from tensorflow.python.ops.ragged import ragged_tensor 34from tensorflow.python.platform import test 35 36 37# Define test-only Layer classes to validate passing Sparse and Ragged tensors 38# between layers. 39class ToDense(Layer): 40 """Create a dense (standard) tensor from the given input tensor.""" 41 42 def __init__(self, default_value, **kwargs): 43 super(ToDense, self).__init__(**kwargs) 44 self._default_value = default_value 45 46 def call(self, inputs): 47 if isinstance(inputs, ragged_tensor.RaggedTensor): 48 return inputs.to_tensor(default_value=self._default_value) 49 elif isinstance(inputs, sparse_tensor.SparseTensor): 50 return sparse_ops.sparse_tensor_to_dense( 51 inputs, default_value=self._default_value) 52 elif isinstance(inputs, ops.Tensor): 53 return inputs 54 else: 55 raise TypeError("Unexpected tensor type %s" % type(inputs).__name__) 56 57 58class ToRagged(Layer): 59 """Create a ragged tensor based on a given dense tensor.""" 60 61 def __init__(self, padding, ragged_rank=1, **kwargs): 62 super(ToRagged, self).__init__(**kwargs) 63 self._padding = padding 64 self._ragged_rank = ragged_rank 65 66 def call(self, inputs): 67 return ragged_tensor.RaggedTensor.from_tensor( 68 inputs, padding=self._padding, ragged_rank=self._ragged_rank) 69 70 71class ToSparse(Layer): 72 """Create a sparse tensor based on a given dense tensor.""" 73 74 def call(self, inputs): 75 indices = array_ops.where(math_ops.not_equal(inputs, 0)) 76 values = array_ops.gather_nd(inputs, indices) 77 shape = array_ops.shape(inputs, out_type=dtypes.int64) 78 return sparse_tensor.SparseTensor(indices, values, dense_shape=shape) 79 80 81@keras_parameterized.run_with_all_model_types 82@keras_parameterized.run_all_keras_modes 83class InternalCompositeTest(keras_parameterized.TestCase): 84 85 def test_model_with_internal_ragged_tensors(self): 86 # Create a model that accepts an input, converts it to Ragged, and 87 # converts the ragged tensor back to a dense tensor. 88 layers = [ToRagged(padding=0), ToDense(default_value=-1)] 89 model = testing_utils.get_model_from_layers(layers, input_shape=(None,)) 90 91 # Define some training data with additional padding. 92 input_data = np.array([[1, 0, 0], [2, 3, 0]]) 93 expected_output = np.array([[1, -1], [2, 3]]) 94 output = model.predict(input_data) 95 self.assertAllEqual(expected_output, output) 96 97 def test_model_with_internal_sparse_tensors(self): 98 # Create a model that accepts an input, converts it to Sparse, and 99 # converts the sparse tensor back to a dense tensor. 100 layers = [ToSparse(), ToDense(default_value=-1)] 101 model = testing_utils.get_model_from_layers(layers, input_shape=(None,)) 102 103 # Define some training data with additional padding. 104 input_data = np.array([[1, 0, 0], [2, 3, 0]]) 105 expected_output = np.array([[1, -1, -1], [2, 3, -1]]) 106 output = model.predict(input_data) 107 self.assertAllEqual(expected_output, output) 108 109 def test_training_model_with_internal_ragged_tensors(self): 110 111 # Create a model that implements y=Mx. This is easy to learn and will 112 # demonstrate appropriate gradient passing. (We have to use RaggedTensors 113 # for this test, as ToSparse() doesn't support gradient propagation through 114 # the layer.) TODO(b/124796939): Investigate this. 115 layers = [core.Dense(2), ToRagged(padding=0), ToDense(default_value=-1)] 116 model = testing_utils.get_model_from_layers(layers, input_shape=(1,)) 117 118 input_data = np.random.rand(1024, 1) 119 expected_data = np.concatenate((input_data * 3, input_data * .5), axis=-1) 120 121 model.compile( 122 loss="mse", 123 optimizer="adam", 124 run_eagerly=testing_utils.should_run_eagerly()) 125 history = model.fit(input_data, expected_data, epochs=10, verbose=0) 126 127 # If the model trained, the loss stored at history[0] should be different 128 # than the one stored at history[-1]. 129 self.assertNotEqual(history.history["loss"][-1], history.history["loss"][0]) 130 131 132if __name__ == "__main__": 133 test.main() 134