1# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ==============================================================================
15"""Test utilities for tf.signal."""
16
17from __future__ import absolute_import
18from __future__ import division
19from __future__ import print_function
20
21from tensorflow.core.protobuf import config_pb2
22from tensorflow.lite.python import interpreter
23from tensorflow.lite.python import lite
24from tensorflow.python.eager import def_function
25from tensorflow.python.grappler import tf_optimizer
26from tensorflow.python.training import saver
27
28
29def grappler_optimize(graph, fetches=None, config_proto=None):
30  """Tries to optimize the provided graph using grappler.
31
32  Args:
33    graph: A `tf.Graph` instance containing the graph to optimize.
34    fetches: An optional list of `Tensor`s to fetch (i.e. not optimize away).
35      Grappler uses the 'train_op' collection to look for fetches, so if not
36      provided this collection should be non-empty.
37    config_proto: An optional `tf.compat.v1.ConfigProto` to use when rewriting
38      the graph.
39
40  Returns:
41    A `tf.compat.v1.GraphDef` containing the rewritten graph.
42  """
43  if config_proto is None:
44    config_proto = config_pb2.ConfigProto()
45    config_proto.graph_options.rewrite_options.min_graph_nodes = -1
46  if fetches is not None:
47    for fetch in fetches:
48      graph.add_to_collection('train_op', fetch)
49  metagraph = saver.export_meta_graph(graph_def=graph.as_graph_def())
50  return tf_optimizer.OptimizeGraph(config_proto, metagraph)
51
52
53def tflite_convert(fn, input_templates):
54  """Converts the provided fn to tf.lite model.
55
56  Args:
57    fn: A callable that expects a list of inputs like input_templates that
58      returns a tensor or structure of tensors.
59    input_templates: A list of Tensors, ndarrays or TensorSpecs describing the
60      inputs that fn expects. The actual values of the Tensors or ndarrays are
61      unused.
62
63  Returns:
64    The serialized tf.lite model.
65  """
66  fn = def_function.function(fn)
67  concrete_func = fn.get_concrete_function(*input_templates)
68  converter = lite.TFLiteConverterV2([concrete_func])
69  return converter.convert()
70
71
72def evaluate_tflite_model(tflite_model, input_ndarrays):
73  """Evaluates the provided tf.lite model with the given input ndarrays.
74
75  Args:
76    tflite_model: bytes. The serialized tf.lite model.
77    input_ndarrays: A list of NumPy arrays to feed as input to the model.
78
79  Returns:
80    A list of ndarrays produced by the model.
81
82  Raises:
83    ValueError: If the number of input arrays does not match the number of
84      inputs the model expects.
85  """
86  the_interpreter = interpreter.Interpreter(model_content=tflite_model)
87  the_interpreter.allocate_tensors()
88
89  input_details = the_interpreter.get_input_details()
90  output_details = the_interpreter.get_output_details()
91
92  if len(input_details) != len(input_ndarrays):
93    raise ValueError('Wrong number of inputs: provided=%s, '
94                     'input_details=%s output_details=%s' % (
95                         input_ndarrays, input_details, output_details))
96  for input_tensor, data in zip(input_details, input_ndarrays):
97    the_interpreter.set_tensor(input_tensor['index'], data)
98  the_interpreter.invoke()
99  return [the_interpreter.get_tensor(details['index'])
100          for details in output_details]
101