/tools/asuite/aidegen/lib/ |
D | aidegen_metrics.py | 42 from asuite.metrics import metrics 45 metrics = None variable 48 from asuite.metrics import metrics_base 54 from asuite.metrics import metrics_utils 69 if not metrics: 75 metrics.AtestStartEvent( 137 if not metrics: 140 metrics.LocalDetectEvent(
|
D | aidegen_metrics_unittest.py | 28 from asuite.metrics import metrics 29 from asuite.metrics import metrics_utils 31 metrics = None variable 42 if not metrics: 47 with mock.patch.object(metrics, 'AtestStartEvent') as mk_start:
|
/tools/test/connectivity/acts_tests/tests/google/experimental/ |
D | BluetoothPairAndConnectTest.py | 126 metrics = {} 160 metrics['pair_attempt_count'] = PAIR_CONNECT_ATTEMPTS 161 metrics['pair_successful_count'] = pair_connect_success 162 metrics['pair_failed_count'] = (PAIR_CONNECT_ATTEMPTS - 166 metrics['pair_max_time_millis'] = int(max(pair_times)) 167 metrics['pair_min_time_millis'] = int(min(pair_times)) 168 metrics['pair_avg_time_millis'] = int(statistics.mean(pair_times)) 171 metrics['first_connection_max_time_millis'] = int( 173 metrics['first_connection_min_time_millis'] = int( 175 metrics['first_connection_avg_time_millis'] = int( [all …]
|
D | BluetoothReconnectTest.py | 132 metrics = {} 159 metrics['connection_attempt_count'] = RECONNECTION_ATTEMPTS 160 metrics['connection_successful_count'] = connection_success 161 metrics['connection_failed_count'] = (RECONNECTION_ATTEMPTS 164 metrics['connection_max_time_millis'] = int(max(connection_times)) 165 metrics['connection_min_time_millis'] = int(min(connection_times)) 166 metrics['connection_avg_time_millis'] = int(statistics.mean( 170 metrics['connection_failure_info'] = reconnection_failures 172 proto = self.bt_logger.get_results(metrics, 177 self.log.info('Metrics: {}'.format(metrics))
|
D | BluetoothLatencyTest.py | 120 metrics = {} 126 metrics['data_transfer_protocol'] = self.data_transfer_type 127 metrics['data_latency_min_millis'] = int(min(latency_list)) 128 metrics['data_latency_max_millis'] = int(max(latency_list)) 129 metrics['data_latency_avg_millis'] = int(statistics.mean(latency_list)) 130 self.log.info('Latency: {}'.format(metrics)) 132 proto = self.bt_logger.get_results(metrics, 137 asserts.assert_true(metrics['data_latency_min_millis'] > 0,
|
/tools/test/connectivity/acts/framework/tests/controllers/ |
D | bits_test.py | 49 metrics = bits._raw_data_to_metrics(raw_data) 50 self.assertEqual(2, len(metrics)) 53 metrics[0]) 56 metrics[1]) 67 metrics = bits._raw_data_to_metrics(raw_data) 68 self.assertEqual(0, len(metrics))
|
/tools/asuite/atest/asuite_lib_test/ |
D | asuite_cc_client_test.py | 33 from asuite.metrics import metrics 34 from asuite.metrics import metrics_base 35 from asuite.metrics import metrics_utils
|
/tools/test/connectivity/acts/framework/tests/metrics/loggers/ |
D | usage_metadata_logger_test.py | 21 from acts.metrics.loggers import usage_metadata_logger 22 from acts.metrics.loggers.protos.gen import acts_usage_metadata_pb2 23 from acts.metrics.loggers.usage_metadata_logger import UsageMetadataKey 24 from acts.metrics.loggers.usage_metadata_logger import UsageMetadataPublisher 25 from acts.metrics.loggers.usage_metadata_logger import _usage_map 26 from acts.metrics.loggers.usage_metadata_logger import log_usage 27 from acts.metrics.core import ProtoMetric
|
/tools/trebuchet/trebuchet/viewer/src/main/kotlin/traceviewer/ui/tracks/ |
D | SliceTrack.kt | 46 val metrics = g.fontMetrics in paintComponent() constant 47 var ty = metrics.ascent in paintComponent() 63 if (height >= metrics.height) { in paintComponent() 64 drawLabel(it, g, metrics, x, ty, width) in paintComponent() 70 open fun drawLabel(slice: T, g: Graphics, metrics: FontMetrics, x: Int, y: Int, width: Int) { in drawLabel() 74 strWidth += metrics.charWidth(slice.name[strLimit]) in drawLabel()
|
/tools/test/graphicsbenchmark/performance_tests/hostside/src/com/android/game/qualification/reporter/ |
D | GameQualificationResultReporter.java | 26 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric; 76 … public void testEnded(TestDescription testId, long elapsedTime, HashMap<String, Metric> metrics) { in testEnded() argument 77 super.testEnded(testId, elapsedTime, metrics); in testEnded() 78 if (!metrics.isEmpty()) { in testEnded() 79 … MetricSummary summary = MetricSummary.parseRunMetrics(getInvocationContext(), metrics); in testEnded() 82 } else if (metrics.containsKey("memory_allocated")) { in testEnded() 83 … mTotalAllocated = (int) metrics.get("memory_allocated").getMeasurements().getSingleInt(); in testEnded() 306 MetricSummary metrics = entry.getValue(); in createPerformanceReport() local 314 List<LoopSummary> loopSummaries = metrics.getLoopSummaries(); in createPerformanceReport() 344 if (metrics.getLoadTimeMs() == -1) { in createPerformanceReport() [all …]
|
/tools/test/graphicsbenchmark/performance_tests/hostside/src/com/android/game/qualification/metric/ |
D | MetricSummary.java | 22 import com.android.tradefed.metrics.proto.MetricMeasurement.DataType; 23 import com.android.tradefed.metrics.proto.MetricMeasurement.Measurements; 24 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric; 60 IInvocationContext context, HashMap<String, Metric> metrics) { in parseRunMetrics() argument 62 if (metrics.containsKey("loop_count")) { in parseRunMetrics() 63 loopCount = (int) metrics.get("loop_count").getMeasurements().getSingleInt(); in parseRunMetrics() 74 LoopSummary loopSummary = LoopSummary.parseRunMetrics(context, type, i, metrics); in parseRunMetrics() 80 metrics.get("load_time").getMeasurements().getSingleInt(), in parseRunMetrics()
|
/tools/test/connectivity/acts/framework/tests/metrics/ |
D | core_test.py | 23 from acts.metrics.core import MetricPublisher 24 from acts.metrics.core import ProtoMetric 25 from acts.metrics.core import ProtoMetricPublisher 104 metrics = Mock() 107 self.assertRaises(NotImplementedError, lambda: publisher.publish(metrics)) 163 metrics = [Mock()] 171 publisher.publish(metrics) 181 metrics = [Mock()] 189 publisher.publish(metrics)
|
/tools/test/connectivity/acts/framework/acts/controllers/ |
D | bits.py | 78 metrics = [] 95 metrics.append(power_metrics.Metric(avg, unit_type, unit, name=name)) 97 return metrics 276 metrics = {} 295 metrics[segment_name] = _raw_data_to_metrics(raw_metrics) 296 return metrics
|
/tools/test/connectivity/acts/framework/acts/metrics/ |
D | core.py | 86 def publish(self, metrics): argument 141 def publish(self, metrics): argument 152 if isinstance(metrics, list): 153 for metric in metrics: 156 self._publish_single(metrics)
|
/tools/test/graphicsbenchmark/performance_tests/hostside/test/com/android/game/qualification/metric/ |
D | MetricSummaryTest.java | 28 import com.android.tradefed.metrics.proto.MetricMeasurement; 74 HashMap<String, MetricMeasurement.Metric> metrics = new HashMap<>(); in testConversion() local 75 runData.addToMetrics(metrics); in testConversion() 77 MetricSummary result = MetricSummary.parseRunMetrics(context, metrics); in testConversion()
|
D | LoopSummaryTest.java | 9 import com.android.tradefed.metrics.proto.MetricMeasurement; 10 import com.android.tradefed.metrics.proto.MetricMeasurement.Metric; 137 HashMap<String, MetricMeasurement.Metric> metrics = new HashMap<>(); in testParseRunMetrics() local 138 runData.addToMetrics(metrics); in testParseRunMetrics() 144 metrics); in testParseRunMetrics()
|
/tools/asuite/atest-py2/ |
D | test_runner_handler.py | 27 from metrics import metrics 28 from metrics import metrics_utils 137 metrics.RunnerFinishEvent(
|
/tools/asuite/atest/ |
D | test_runner_handler.py | 30 from metrics import metrics 31 from metrics import metrics_utils 141 metrics.RunnerFinishEvent(
|
D | atest.py | 52 from metrics import metrics 53 from metrics import metrics_base 54 from metrics import metrics_utils 723 metrics.AtestStartEvent( 778 metrics.BuildFinishEvent( 785 metrics.LocalDetectEvent( 804 metrics.LocalDetectEvent( 811 metrics.LocalDetectEvent( 837 metrics.RunTestsFinishEvent( 842 metrics.RunnerFinishEvent( [all …]
|
/tools/asuite/atest-py2/metrics/ |
D | metrics_utils.py | 25 from . import metrics 96 clearcut = metrics.AtestExitEvent( 125 metrics.AtestStartEvent(command_line=command_line,
|
/tools/asuite/atest/metrics/ |
D | metrics_utils.py | 25 from . import metrics 96 clearcut = metrics.AtestExitEvent( 125 metrics.AtestStartEvent(command_line=command_line,
|
/tools/test/connectivity/acts/framework/acts/metrics/loggers/ |
D | bounded_metrics.py | 17 from acts.metrics.core import ProtoMetric 18 from acts.metrics.logger import MetricLogger 20 from acts.metrics.loggers.protos.gen import metrics_pb2
|
D | usage_metadata_logger.py | 20 from acts.metrics.core import ProtoMetric 21 from acts.metrics.core import ProtoMetricPublisher 22 from acts.metrics.loggers.protos.gen import acts_usage_metadata_pb2
|
D | blackbox.py | 18 from acts.metrics.core import ProtoMetric 19 from acts.metrics.logger import MetricLogger 20 from acts.metrics.loggers.protos.gen import acts_blackbox_pb2
|
/tools/asuite/atest/test_runners/ |
D | suite_plan_test_runner.py | 24 from metrics import metrics 127 metrics.LocalDetectEvent(
|