1{
2  "models": [
3    {
4      "name": "mobilenet_v1_1.0_224_topk_aosp",
5      "modelFile": "mobilenet_v1_1.0_224",
6      "baselineSec": 0.060,
7      "evaluator": {
8          "className": "TopK",
9          "expectedTop1": 0.617
10      },
11      "inputSize": [1, 224, 224, 3],
12      "dataSize": 4,
13      "dataset": {
14          "inputPath": "image_classification",
15          "groundTruth": "image_classification/ground_truth_labels.txt",
16          "labels": "image_classification/labels.txt",
17          "preprocessor": "Inception"
18      }
19    },
20    {
21      "name": "mobilenet_v1_1.0_224_quant_topk_aosp",
22      "modelFile": "mobilenet_v1_1.0_224_quant",
23      "baselineSec": 0.060,
24      "evaluator": {
25          "className": "TopK",
26          "expectedTop1": 0.596
27      },
28      "inputSize": [1, 224, 224, 3],
29      "dataSize": 1,
30      "inputScale": 0.007812,
31      "inputZeroPoint": 128.0,
32      "dataset": {
33          "inputPath": "image_classification",
34          "groundTruth": "image_classification/ground_truth_labels.txt",
35          "labels": "image_classification/labels.txt",
36          "preprocessor": "Inception"
37      }
38    },
39    {
40      "name": "mobilenet_v1_0.75_192_topk_aosp",
41      "modelFile": "mobilenet_v1_0.75_192",
42      "baselineSec": 0.060,
43      "evaluator": {
44          "className": "TopK",
45          "expectedTop1": 0.604
46      },
47      "inputSize": [1, 192, 192, 3],
48      "dataSize": 4,
49      "dataset": {
50          "inputPath": "image_classification",
51          "groundTruth": "image_classification/ground_truth_labels.txt",
52          "labels": "image_classification/labels.txt",
53          "preprocessor": "Inception"
54      }
55    },
56    {
57      "name": "mobilenet_v1_0.75_192_quant_topk_aosp",
58      "modelFile": "mobilenet_v1_0.75_192_quant",
59      "baselineSec": 0.060,
60      "evaluator": {
61          "className": "TopK",
62          "expectedTop1": 0.582
63      },
64      "inputSize": [1, 192, 192, 3],
65      "dataSize": 1,
66      "inputScale": 0.007812,
67      "inputZeroPoint": 128.0,
68      "dataset": {
69          "inputPath": "image_classification",
70          "groundTruth": "image_classification/ground_truth_labels.txt",
71          "labels": "image_classification/labels.txt",
72          "preprocessor": "Inception"
73      }
74    },
75    {
76      "name": "mobilenet_v1_0.5_160_topk_aosp",
77      "modelFile": "mobilenet_v1_0.5_160",
78      "baselineSec": 0.060,
79      "evaluator": {
80          "className": "TopK",
81          "expectedTop1": 0.582
82      },
83      "inputSize": [1, 160, 160, 3],
84      "dataSize": 4,
85      "dataset": {
86          "inputPath": "image_classification",
87          "groundTruth": "image_classification/ground_truth_labels.txt",
88          "labels": "image_classification/labels.txt",
89          "preprocessor": "Inception"
90      }
91    },
92    {
93      "name": "mobilenet_v1_0.5_160_quant_topk_aosp",
94      "modelFile": "mobilenet_v1_0.5_160_quant",
95      "baselineSec": 0.060,
96      "evaluator": {
97          "className": "TopK",
98          "expectedTop1": 0.550
99      },
100      "inputSize": [1, 160, 160, 3],
101      "dataSize": 1,
102      "inputScale": 0.007812,
103      "inputZeroPoint": 128.0,
104      "dataset": {
105          "inputPath": "image_classification",
106          "groundTruth": "image_classification/ground_truth_labels.txt",
107          "labels": "image_classification/labels.txt",
108          "preprocessor": "Inception"
109      }
110    },
111    {
112      "name": "mobilenet_v1_0.25_128_topk_aosp",
113      "modelFile": "mobilenet_v1_0.25_128",
114      "baselineSec": 0.060,
115      "evaluator": {
116          "className": "TopK",
117          "expectedTop1": 0.444
118      },
119      "inputSize": [1, 128, 128, 3],
120      "dataSize": 4,
121      "dataset": {
122          "inputPath": "image_classification",
123          "groundTruth": "image_classification/ground_truth_labels.txt",
124          "labels": "image_classification/labels.txt",
125          "preprocessor": "Inception"
126      }
127    },
128    {
129      "name": "mobilenet_v1_0.25_128_quant_topk_aosp",
130      "modelFile": "mobilenet_v1_0.25_128_quant",
131      "baselineSec": 0.060,
132      "evaluator": {
133          "className": "TopK",
134          "expectedTop1": 0.402
135      },
136      "inputSize": [1, 128, 128, 3],
137      "dataSize": 1,
138      "inputScale": 0.007812,
139      "inputZeroPoint": 128.0,
140      "dataset": {
141          "inputPath": "image_classification",
142          "groundTruth": "image_classification/ground_truth_labels.txt",
143          "labels": "image_classification/labels.txt",
144          "preprocessor": "Inception"
145      }
146    },
147    {
148      "name": "mobilenet_v2_0.35_128_topk_aosp",
149      "modelFile": "mobilenet_v2_0.35_128",
150      "baselineSec": 0.060,
151      "evaluator": {
152          "className": "TopK",
153          "expectedTop1": 0.516
154      },
155      "inputSize": [1, 128, 128, 3],
156      "dataSize": 4,
157      "dataset": {
158          "inputPath": "image_classification",
159          "groundTruth": "image_classification/ground_truth_labels.txt",
160          "labels": "image_classification/labels.txt",
161          "preprocessor": "Inception"
162      }
163    },
164    {
165      "name": "mobilenet_v2_0.5_160_topk_aosp",
166      "modelFile": "mobilenet_v2_0.5_160",
167      "baselineSec": 0.060,
168      "evaluator": {
169          "className": "TopK",
170          "expectedTop1": 0.583
171      },
172      "inputSize": [1, 160, 160, 3],
173      "dataSize": 4,
174      "dataset": {
175          "inputPath": "image_classification",
176          "groundTruth": "image_classification/ground_truth_labels.txt",
177          "labels": "image_classification/labels.txt",
178          "preprocessor": "Inception"
179      }
180    },
181    {
182      "name": "mobilenet_v2_0.75_192_topk_aosp",
183      "modelFile": "mobilenet_v2_0.75_192",
184      "baselineSec": 0.060,
185      "evaluator": {
186          "className": "TopK",
187          "expectedTop1": 0.617
188      },
189      "inputSize": [1, 192, 192, 3],
190      "dataSize": 4,
191      "dataset": {
192          "inputPath": "image_classification",
193          "groundTruth": "image_classification/ground_truth_labels.txt",
194          "labels": "image_classification/labels.txt",
195          "preprocessor": "Inception"
196      }
197    },
198    {
199      "name": "mobilenet_v2_1.0_224_topk_aosp",
200      "modelFile": "mobilenet_v2_1.0_224",
201      "baselineSec": 0.060,
202      "evaluator": {
203          "className": "TopK",
204          "expectedTop1": 0.639
205      },
206      "inputSize": [1, 224, 224, 3],
207      "dataSize": 4,
208      "dataset": {
209          "inputPath": "image_classification",
210          "groundTruth": "image_classification/ground_truth_labels.txt",
211          "labels": "image_classification/labels.txt",
212          "preprocessor": "Inception"
213      }
214    },
215    {
216      "name": "mobilenet_v2_1.0_224_quant_topk_aosp",
217      "modelFile": "mobilenet_v2_1.0_224_quant",
218      "baselineSec": 0.060,
219      "evaluator": {
220          "className": "TopK",
221          "expectedTop1": 0.617
222      },
223      "inputSize": [1, 224, 224, 3],
224      "dataSize": 1,
225      "inputScale": 0.007812,
226      "inputZeroPoint": 128.0,
227      "dataset": {
228          "inputPath": "image_classification",
229          "groundTruth": "image_classification/ground_truth_labels.txt",
230          "labels": "image_classification/labels.txt",
231          "preprocessor": "Inception"
232      }
233    },
234    {
235      "name": "mobilenet_v3-small_224_0.75_float_topk_aosp",
236      "modelFile": "mobilenet_v3-small_224_0.75_float",
237      "baselineSec": 0.060,
238      "evaluator": {
239          "className": "TopK",
240          "expectedTop1": 0.60536915
241      },
242      "inputSize": [1, 224, 224, 3],
243      "dataSize": 4,
244      "dataset": {
245          "inputPath": "image_classification",
246          "groundTruth": "image_classification/ground_truth_labels.txt",
247          "labels": "image_classification/labels.txt",
248          "preprocessor": "Inception"
249      }
250    },
251    {
252      "name": "mobilenet_v3-small_224_1.0_float_topk_aosp",
253      "modelFile": "mobilenet_v3-small_224_1.0_float",
254      "baselineSec": 0.060,
255      "evaluator": {
256          "className": "TopK",
257          "expectedTop1": 0.6107383
258      },
259      "inputSize": [1, 224, 224, 3],
260      "dataSize": 4,
261      "dataset": {
262          "inputPath": "image_classification",
263          "groundTruth": "image_classification/ground_truth_labels.txt",
264          "labels": "image_classification/labels.txt",
265          "preprocessor": "Inception"
266      }
267    },
268    {
269      "name": "mobilenet_v3-small-minimalistic_224_1.0_float_topk_aosp",
270      "modelFile": "mobilenet_v3-small-minimalistic_224_1.0_float",
271      "baselineSec": 0.060,
272      "evaluator": {
273          "className": "TopK",
274          "expectedTop1": 0.5885906
275      },
276      "inputSize": [1, 224, 224, 3],
277      "dataSize": 4,
278      "dataset": {
279          "inputPath": "image_classification",
280          "groundTruth": "image_classification/ground_truth_labels.txt",
281          "labels": "image_classification/labels.txt",
282          "preprocessor": "Inception"
283      }
284    },
285    {
286      "name": "mobilenet_v3-large_224_0.75_float_topk_aosp",
287      "modelFile": "mobilenet_v3-large_224_0.75_float",
288      "baselineSec": 0.060,
289      "evaluator": {
290          "className": "TopK",
291          "expectedTop1": 0.63892615
292      },
293      "inputSize": [1, 224, 224, 3],
294      "dataSize": 4,
295      "dataset": {
296          "inputPath": "image_classification",
297          "groundTruth": "image_classification/ground_truth_labels.txt",
298          "labels": "image_classification/labels.txt",
299          "preprocessor": "Inception"
300      }
301    },
302    {
303      "name": "mobilenet_v3-large_224_1.0_float_topk_aosp",
304      "modelFile": "mobilenet_v3-large_224_1.0_float",
305      "baselineSec": 0.060,
306      "evaluator": {
307          "className": "TopK",
308          "expectedTop1": 0.6577181
309      },
310      "inputSize": [1, 224, 224, 3],
311      "dataSize": 4,
312      "dataset": {
313          "inputPath": "image_classification",
314          "groundTruth": "image_classification/ground_truth_labels.txt",
315          "labels": "image_classification/labels.txt",
316          "preprocessor": "Inception"
317      }
318    },
319    {
320      "name": "mobilenet_v3-large-minimalistic_224_1.0_float_topk_aosp",
321      "modelFile": "mobilenet_v3-large-minimalistic_224_1.0_float",
322      "baselineSec": 0.060,
323      "evaluator": {
324          "className": "TopK",
325          "expectedTop1": 0.6375839
326      },
327      "inputSize": [1, 224, 224, 3],
328      "dataSize": 4,
329      "dataset": {
330          "inputPath": "image_classification",
331          "groundTruth": "image_classification/ground_truth_labels.txt",
332          "labels": "image_classification/labels.txt",
333          "preprocessor": "Inception"
334      }
335    },
336    {
337      "name": "mobilenet_v3-small_224_1.0_uint8_topk_aosp",
338      "modelFile": "mobilenet_v3-small_224_1.0_uint8",
339      "baselineSec": 0.060,
340      "evaluator": {
341          "className": "TopK",
342          "expectedTop1": 0.56510067
343      },
344      "inputSize": [1, 224, 224, 3],
345      "dataSize": 1,
346      "inputScale": 0.007812,
347      "inputZeroPoint": 128.0,
348      "dataset": {
349          "inputPath": "image_classification",
350          "groundTruth": "image_classification/ground_truth_labels.txt",
351          "labels": "image_classification/labels.txt",
352          "preprocessor": "Inception"
353      }
354    },
355    {
356      "name": "mobilenet_v3-large_224_1.0_uint8_topk_aosp",
357      "modelFile": "mobilenet_v3-large_224_1.0_uint8",
358      "baselineSec": 0.060,
359      "evaluator": {
360          "className": "TopK",
361          "expectedTop1": 0.61879194
362      },
363      "inputSize": [1, 224, 224, 3],
364      "dataSize": 1,
365      "inputScale": 0.007812,
366      "inputZeroPoint": 128.0,
367      "dataset": {
368          "inputPath": "image_classification",
369          "groundTruth": "image_classification/ground_truth_labels.txt",
370          "labels": "image_classification/labels.txt",
371          "preprocessor": "Inception"
372      }
373    },
374    {
375      "name": "mobilenet_v3-large-minimalistic_224_1.0_uint8_topk_aosp",
376      "modelFile": "mobilenet_v3-large-minimalistic_224_1.0_uint8",
377      "baselineSec": 0.060,
378      "evaluator": {
379          "className": "TopK",
380          "expectedTop1": 0.59395975
381      },
382      "inputSize": [1, 224, 224, 3],
383      "dataSize": 1,
384      "inputScale": 0.007812,
385      "inputZeroPoint": 128.0,
386      "dataset": {
387          "inputPath": "image_classification",
388          "groundTruth": "image_classification/ground_truth_labels.txt",
389          "labels": "image_classification/labels.txt",
390          "preprocessor": "Inception"
391      }
392    }
393  ]
394}
395