1{ 2 "models": [ 3 { 4 "name": "mobilenet_v1_1.0_224_topk_aosp", 5 "modelFile": "mobilenet_v1_1.0_224", 6 "baselineSec": 0.060, 7 "evaluator": { 8 "className": "TopK", 9 "expectedTop1": 0.617 10 }, 11 "inputSize": [1, 224, 224, 3], 12 "dataSize": 4, 13 "dataset": { 14 "inputPath": "image_classification", 15 "groundTruth": "image_classification/ground_truth_labels.txt", 16 "labels": "image_classification/labels.txt", 17 "preprocessor": "Inception" 18 } 19 }, 20 { 21 "name": "mobilenet_v1_1.0_224_quant_topk_aosp", 22 "modelFile": "mobilenet_v1_1.0_224_quant", 23 "baselineSec": 0.060, 24 "evaluator": { 25 "className": "TopK", 26 "expectedTop1": 0.596 27 }, 28 "inputSize": [1, 224, 224, 3], 29 "dataSize": 1, 30 "inputScale": 0.007812, 31 "inputZeroPoint": 128.0, 32 "dataset": { 33 "inputPath": "image_classification", 34 "groundTruth": "image_classification/ground_truth_labels.txt", 35 "labels": "image_classification/labels.txt", 36 "preprocessor": "Inception" 37 } 38 }, 39 { 40 "name": "mobilenet_v1_0.75_192_topk_aosp", 41 "modelFile": "mobilenet_v1_0.75_192", 42 "baselineSec": 0.060, 43 "evaluator": { 44 "className": "TopK", 45 "expectedTop1": 0.604 46 }, 47 "inputSize": [1, 192, 192, 3], 48 "dataSize": 4, 49 "dataset": { 50 "inputPath": "image_classification", 51 "groundTruth": "image_classification/ground_truth_labels.txt", 52 "labels": "image_classification/labels.txt", 53 "preprocessor": "Inception" 54 } 55 }, 56 { 57 "name": "mobilenet_v1_0.75_192_quant_topk_aosp", 58 "modelFile": "mobilenet_v1_0.75_192_quant", 59 "baselineSec": 0.060, 60 "evaluator": { 61 "className": "TopK", 62 "expectedTop1": 0.582 63 }, 64 "inputSize": [1, 192, 192, 3], 65 "dataSize": 1, 66 "inputScale": 0.007812, 67 "inputZeroPoint": 128.0, 68 "dataset": { 69 "inputPath": "image_classification", 70 "groundTruth": "image_classification/ground_truth_labels.txt", 71 "labels": "image_classification/labels.txt", 72 "preprocessor": "Inception" 73 } 74 }, 75 { 76 "name": "mobilenet_v1_0.5_160_topk_aosp", 77 "modelFile": "mobilenet_v1_0.5_160", 78 "baselineSec": 0.060, 79 "evaluator": { 80 "className": "TopK", 81 "expectedTop1": 0.582 82 }, 83 "inputSize": [1, 160, 160, 3], 84 "dataSize": 4, 85 "dataset": { 86 "inputPath": "image_classification", 87 "groundTruth": "image_classification/ground_truth_labels.txt", 88 "labels": "image_classification/labels.txt", 89 "preprocessor": "Inception" 90 } 91 }, 92 { 93 "name": "mobilenet_v1_0.5_160_quant_topk_aosp", 94 "modelFile": "mobilenet_v1_0.5_160_quant", 95 "baselineSec": 0.060, 96 "evaluator": { 97 "className": "TopK", 98 "expectedTop1": 0.550 99 }, 100 "inputSize": [1, 160, 160, 3], 101 "dataSize": 1, 102 "inputScale": 0.007812, 103 "inputZeroPoint": 128.0, 104 "dataset": { 105 "inputPath": "image_classification", 106 "groundTruth": "image_classification/ground_truth_labels.txt", 107 "labels": "image_classification/labels.txt", 108 "preprocessor": "Inception" 109 } 110 }, 111 { 112 "name": "mobilenet_v1_0.25_128_topk_aosp", 113 "modelFile": "mobilenet_v1_0.25_128", 114 "baselineSec": 0.060, 115 "evaluator": { 116 "className": "TopK", 117 "expectedTop1": 0.444 118 }, 119 "inputSize": [1, 128, 128, 3], 120 "dataSize": 4, 121 "dataset": { 122 "inputPath": "image_classification", 123 "groundTruth": "image_classification/ground_truth_labels.txt", 124 "labels": "image_classification/labels.txt", 125 "preprocessor": "Inception" 126 } 127 }, 128 { 129 "name": "mobilenet_v1_0.25_128_quant_topk_aosp", 130 "modelFile": "mobilenet_v1_0.25_128_quant", 131 "baselineSec": 0.060, 132 "evaluator": { 133 "className": "TopK", 134 "expectedTop1": 0.402 135 }, 136 "inputSize": [1, 128, 128, 3], 137 "dataSize": 1, 138 "inputScale": 0.007812, 139 "inputZeroPoint": 128.0, 140 "dataset": { 141 "inputPath": "image_classification", 142 "groundTruth": "image_classification/ground_truth_labels.txt", 143 "labels": "image_classification/labels.txt", 144 "preprocessor": "Inception" 145 } 146 }, 147 { 148 "name": "mobilenet_v2_0.35_128_topk_aosp", 149 "modelFile": "mobilenet_v2_0.35_128", 150 "baselineSec": 0.060, 151 "evaluator": { 152 "className": "TopK", 153 "expectedTop1": 0.516 154 }, 155 "inputSize": [1, 128, 128, 3], 156 "dataSize": 4, 157 "dataset": { 158 "inputPath": "image_classification", 159 "groundTruth": "image_classification/ground_truth_labels.txt", 160 "labels": "image_classification/labels.txt", 161 "preprocessor": "Inception" 162 } 163 }, 164 { 165 "name": "mobilenet_v2_0.5_160_topk_aosp", 166 "modelFile": "mobilenet_v2_0.5_160", 167 "baselineSec": 0.060, 168 "evaluator": { 169 "className": "TopK", 170 "expectedTop1": 0.583 171 }, 172 "inputSize": [1, 160, 160, 3], 173 "dataSize": 4, 174 "dataset": { 175 "inputPath": "image_classification", 176 "groundTruth": "image_classification/ground_truth_labels.txt", 177 "labels": "image_classification/labels.txt", 178 "preprocessor": "Inception" 179 } 180 }, 181 { 182 "name": "mobilenet_v2_0.75_192_topk_aosp", 183 "modelFile": "mobilenet_v2_0.75_192", 184 "baselineSec": 0.060, 185 "evaluator": { 186 "className": "TopK", 187 "expectedTop1": 0.617 188 }, 189 "inputSize": [1, 192, 192, 3], 190 "dataSize": 4, 191 "dataset": { 192 "inputPath": "image_classification", 193 "groundTruth": "image_classification/ground_truth_labels.txt", 194 "labels": "image_classification/labels.txt", 195 "preprocessor": "Inception" 196 } 197 }, 198 { 199 "name": "mobilenet_v2_1.0_224_topk_aosp", 200 "modelFile": "mobilenet_v2_1.0_224", 201 "baselineSec": 0.060, 202 "evaluator": { 203 "className": "TopK", 204 "expectedTop1": 0.639 205 }, 206 "inputSize": [1, 224, 224, 3], 207 "dataSize": 4, 208 "dataset": { 209 "inputPath": "image_classification", 210 "groundTruth": "image_classification/ground_truth_labels.txt", 211 "labels": "image_classification/labels.txt", 212 "preprocessor": "Inception" 213 } 214 }, 215 { 216 "name": "mobilenet_v2_1.0_224_quant_topk_aosp", 217 "modelFile": "mobilenet_v2_1.0_224_quant", 218 "baselineSec": 0.060, 219 "evaluator": { 220 "className": "TopK", 221 "expectedTop1": 0.617 222 }, 223 "inputSize": [1, 224, 224, 3], 224 "dataSize": 1, 225 "inputScale": 0.007812, 226 "inputZeroPoint": 128.0, 227 "dataset": { 228 "inputPath": "image_classification", 229 "groundTruth": "image_classification/ground_truth_labels.txt", 230 "labels": "image_classification/labels.txt", 231 "preprocessor": "Inception" 232 } 233 } 234 ] 235} 236