1 /* 2 * Copyright (C) 2021 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #pragma once 18 19 /****************************************************************** 20 * 21 * IMPORTANT NOTICE: 22 * 23 * This file is part of Android's set of stable system headers 24 * exposed by the Android NDK (Native Development Kit). 25 * 26 * Third-party source AND binary code relies on the definitions 27 * here to be FROZEN ON ALL UPCOMING PLATFORM RELEASES. 28 * 29 * - DO NOT MODIFY ENUMS (EXCEPT IF YOU ADD NEW 32-BIT VALUES) 30 * - DO NOT MODIFY CONSTANTS OR FUNCTIONAL MACROS 31 * - DO NOT CHANGE THE SIGNATURE OF FUNCTIONS IN ANY WAY 32 * - DO NOT CHANGE THE LAYOUT OR SIZE OF STRUCTURES 33 */ 34 35 #include <NeuralNetworksTypes.h> 36 #include <stdbool.h> 37 #include <stdint.h> 38 #include <stdio.h> 39 #include <stdlib.h> 40 41 __BEGIN_DECLS 42 43 /** 44 * Performance information for the reference workload. 45 * 46 * Used by a driver to report its performance characteristics. 47 */ 48 typedef struct { 49 /** 50 * Ratio of the time taken by the driver to execute the workload compared to the time the CPU 51 * would take for the same workload. A lower number is better. 52 */ 53 float execTime; 54 55 /** 56 * Ratio of the energy used by the driver compared to what the CPU would use for doing the same 57 * workload. A lower number is better. 58 */ 59 float powerUsage; 60 } SL_ANeuralNetworksPerformanceInfo; 61 62 /** 63 * Driver performance when operating on a particular data type. In the case of float32 data, this is 64 * used when the calculations are not relaxed. 65 */ 66 typedef struct { 67 int32_t operandType; 68 SL_ANeuralNetworksPerformanceInfo performanceInfo; 69 } SL_ANeuralNetworksOperandPerformanceInfo; 70 71 /** 72 * Information about NNAPI Vendor extension operand type. 73 */ 74 typedef struct { 75 /** 76 * The byte size of the operand (if scalar) or of a single element (if tensor). 77 */ 78 uint32_t byteSize; 79 80 /** 81 * The extension operand type. 82 */ 83 uint16_t type; 84 85 /** 86 * Indicates whether the extension operand type represents a tensor or a scalar. 87 */ 88 bool isTensor; 89 } SL_ANeuralNetworksExtensionOperandTypeInformation; 90 91 /** 92 * The different performance info kinds. 93 */ 94 typedef enum { 95 /** 96 * Driver performance when operating on float32 data but performing calculations with range 97 * and/or precision as low as that of the IEEE 754 16-bit floating-point format. 98 */ 99 SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_RELAXED_SCALAR = 0, 100 101 /** 102 * Driver performance when operating on float32 data but performing calculations with range 103 * and/or precision as low as that of the IEEE 754 16-bit floating-point format. 104 */ 105 SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_RELAXED_TENSOR = 1, 106 107 /** 108 * Performance of an {@link ANEURALNETWORKS_IF} operation is the sum of {@link 109 * ANEURALNETWORKS_IF}'s performance and the mean of performance for the two branch subgraphs, 110 * where performance for a subgraph is the sum of the performance of all operations within the 111 * subgraph. 112 */ 113 SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_IF = 2, 114 115 /** 116 * Performance of a {@link ANEURALNETWORKS_WHILE} operation is the sum of {@link 117 * ANEURALNETWORKS_WHILE}'s performance, performance for the condition subgraph and performance 118 * for the body subgraph, where performance for a subgraph is the sum of the performance of all 119 * operations within the subgraph. 120 */ 121 SL_ANEURALNETWORKS_CAPABILITIES_PERFORMANCE_WHILE = 3, 122 } SL_ANeuralNetworksPerformanceInfoCode; 123 124 /** 125 * Sets the compilation caching signature and file descriptors. 126 * 127 * Provides optional caching information to the support library driver for 128 * faster repeated compilation. 129 * 130 * See {@link ANeuralNetworksCompilation} for information on multithreaded usage. 131 * 132 * @param compilation The compilation to be modified. 133 * @param modelCacheFds An array of file descriptors for the security-sensitive cache. 134 * The file descriptors will be duplicated. 135 * @param numModelCacheFiles The number of the model cache files. 136 * @param dataCacheFds An array of file descriptors for the constants' cache. 137 * The file descriptors will be duplicated. 138 * @param numDataCacheFiles The number of the data cache files. 139 * @param token The token provided by the user to specify a model must be of length 140 * ANEURALNETWORKS_BYTE_SIZE_OF_CACHE_TOKEN. The user should ensure that 141 * the token is unique to a model within the application. The NNAPI 142 * runtime cannot detect token collisions; a collision will result in a 143 * failed execution or in a successful execution that produces incorrect 144 * output values. 145 * 146 * @return ANEURALNETWORKS_NO_ERROR if successful. 147 * 148 * Available in the compabibility library build only. 149 */ 150 int SL_ANeuralNetworksCompilation_setCachingFromFds(ANeuralNetworksCompilation* compilation, 151 const int* modelCacheFds, 152 const uint32_t numModelCacheFiles, 153 const int* dataCacheFds, 154 const uint32_t numDataCacheFiles, 155 const uint8_t* token); 156 157 /** 158 * Gets the caching requirements of the driver implementation. 159 * 160 * There are two types of cache file descriptors provided to the driver: model cache and data cache. 161 * 162 * The data cache is for caching constant data, possibly including preprocessed and transformed 163 * tensor buffers. Any modification to the data cache should have no worse effect than generating 164 * bad output values at execution time. 165 * 166 * The model cache is for caching security-sensitive data such as compiled executable machine code 167 * in the device's native binary format. A modification to the model cache may affect the driver's 168 * execution behavior, and a malicious client could make use of this to execute beyond the granted 169 * permission. 170 * 171 * ANeuralNetworksDevice_getNumberOfCacheFilesNeeded returns how many of each type of cache files 172 * the driver implementation needs to cache a single compilation. Returning 0 for both types 173 * indicates compilation caching is not supported by this driver. The driver may still choose not to 174 * cache certain compiled models even if it reports that caching is supported. 175 * 176 * @param device The representation of the specified device. 177 * @param numModelCacheFiles The number of the model cache files. A value of 0 is returned on error. 178 * @param numDataCacheFiles The number of the data cache files. A value of 0 is returned on error. 179 * 180 * @return ANEURALNETWORKS_NO_ERROR if successful. 181 * 182 * Available in the compabibility library build only. 183 */ 184 int SL_ANeuralNetworksDevice_getNumberOfCacheFilesNeeded(const ANeuralNetworksDevice* device, 185 uint32_t* numModelCacheFiles, 186 uint32_t* numDataCacheFiles); 187 188 /** 189 * Get NNAPI Device performance/power capabilities. 190 * 191 * This returns performance of non-extension operations. 192 * 193 * Performance of an operation other than {@link ANEURALNETWORKS_IF} and {@link 194 * ANEURALNETWORKS_WHILE} comes from the type of its first operand. 195 * 196 * @param device The representation of the specified device. 197 * @param performanceInfoKind The kind of performance info to be queried. Must be one of the values 198 * from {@link SL_ANeuralNetworksPerformanceInfoCode}. 199 * @return ANEURALNETWORKS_NO_ERROR if successful. 200 * 201 * Available in the compabibility library build only. 202 */ 203 int SL_ANeuralNetworksDevice_getPerformanceInfo(const ANeuralNetworksDevice* device, 204 int32_t performanceInfoKind, 205 SL_ANeuralNetworksPerformanceInfo* performanceInfo); 206 207 /** 208 * Get NNAPI Device operand performance/power capabilities. 209 * 210 * This returns performance of non-extension operations. 211 * 212 * Performance of an operation other than {@link ANEURALNETWORKS_IF} and {@link 213 * ANEURALNETWORKS_WHILE} comes from the type of its first operand. 214 * 215 * @param device The representation of the specified device. 216 * @param context Context to pass to the callback. 217 * @param callback Callback taking operand performance and context. 218 * @return ANEURALNETWORKS_NO_ERROR if successful. 219 * 220 * Available in the compabibility library build only. 221 */ 222 int SL_ANeuralNetworksDevice_forEachOperandPerformanceInfo( 223 const ANeuralNetworksDevice* device, void* context, 224 void (*callback)(SL_ANeuralNetworksOperandPerformanceInfo, void*)); 225 226 /** 227 * Get the number of extensions supported by the driver implementation. 228 * 229 * @param device The representation of the specified device. 230 * @param vendorExtensionCount The number of vendor extensions the device supports. To be used in 231 * {@link ANeuralNetworksDevice_getVendorExtensionName} and {@link 232 * ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation}. 233 * @return ANEURALNETWORKS_NO_ERROR if successful. 234 * 235 * Available in the compabibility library build only. 236 */ 237 int SL_ANeuralNetworksDevice_getVendorExtensionCount(const ANeuralNetworksDevice* device, 238 uint32_t* vendorExtensionCount); 239 240 /** 241 * Gets information about a specified extension supported by the driver implementation. 242 * 243 * @param device The representation of the specified device. 244 * @param vendorExtensionIndex The index of the specified vendor extension. Must be less than the 245 * number of available vendor extensions. 246 * @param extensionName Name of the NNAPI HAL Extension. 247 * @return ANEURALNETWORKS_NO_ERROR if successful. 248 * 249 * Available in the compabibility library build only. 250 */ 251 int SL_ANeuralNetworksDevice_getVendorExtensionName(const ANeuralNetworksDevice* device, 252 uint32_t vendorExtensionIndex, 253 const char** extensionName); 254 255 /** 256 * Gets a specified extension's operand type information supported by the driver implementation. 257 * 258 * @param device The representation of the specified device. 259 * @param vendorExtensionIndex The index of the specified vendor extension. Must be less than the 260 * number of available vendor extensions. 261 * @param context Context to pass to the callback. 262 * @param callback Callback taking operand type information and context. 263 * @return ANEURALNETWORKS_NO_ERROR if successful. 264 * 265 * Available in the compabibility library build only. 266 */ 267 int SL_ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation( 268 const ANeuralNetworksDevice* device, uint32_t vendorExtensionIndex, void* context, 269 void (*callback)(SL_ANeuralNetworksExtensionOperandTypeInformation, void*)); 270 271 /** 272 * Result codes. 273 */ 274 typedef enum { 275 ANNDIAG_NO_ERROR = 0, 276 277 /** 278 * Failure caused by failure to load support library driver. 279 */ 280 ANNDIAG_FAILED_TO_LOAD_SL = 1, 281 282 /** 283 * Failure caused by failure to register HAL service. 284 */ 285 ANNDIAG_FAILED_TO_REGISTER_SERVICE = 2, 286 287 /** 288 * General failure. 289 */ 290 ANNDIAG_GENERAL_ERROR = 3, 291 292 /** 293 * Invalid argument 294 */ 295 ANNDIAG_INVALID_ARGUMENT = 4, 296 297 } ANeuralNetworksDiagnosticResultCode; 298 299 /** 300 * Data class. 301 */ 302 typedef enum { 303 ANNDIAG_DATA_CLASS_UNKNOWN = 0, 304 ANNDIAG_DATA_CLASS_OTHER = 1, 305 ANNDIAG_DATA_CLASS_FLOAT32 = 2, 306 ANNDIAG_DATA_CLASS_FLOAT16 = 3, 307 ANNDIAG_DATA_CLASS_QUANT = 4, 308 ANNDIAG_DATA_CLASS_MIXED = 5 309 } ANeuralNetworksDiagnosticDataClass; 310 311 typedef enum { 312 ANNDIAG_EXECUTION_MODE_UNKNOWN = 0, 313 ANNDIAG_EXECUTION_MODE_ASYNC = 1, 314 ANNDIAG_EXECUTION_MODE_SYNC = 2, 315 ANNDIAG_EXECUTION_MODE_BURST = 3, 316 ANNDIAG_EXECUTION_MODE_ASYNC_WITH_DEPS = 4, 317 } ANeuralNetworksDiagnosticExecutionMode; 318 319 typedef struct ANeuralNetworksDiagnosticCompilationInfo ANeuralNetworksDiagnosticCompilationInfo; 320 321 /** 322 * Gets the ID that identifies a single session of client interacting with NNAPI runtime. 323 * 324 * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info object. 325 * @return Session info id. 326 */ 327 int32_t SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId( 328 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 329 330 /** 331 * Gets NNAPI version. 332 * 333 * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info object. 334 * @return NNAPI version. 335 */ 336 int64_t SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion( 337 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 338 339 /** 340 * Gets the hash of the model architecture (without weights). 341 * 342 * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info object. 343 * @return Model hash. 344 */ 345 const uint8_t* SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash( 346 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 347 348 /** 349 * Gets the device IDs as a comma-concatenated string. 350 * 351 * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info object. 352 * @return Device ID. 353 */ 354 const char* SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds( 355 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 356 357 /** 358 * Gets the error code. 359 * 360 * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info object. 361 * @return Error code. 362 */ 363 int32_t SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode( 364 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 365 366 /** 367 * Gets the type of tensors used for inputs. 368 * 369 * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info object. 370 * @return Input data class. 371 */ 372 ANeuralNetworksDiagnosticDataClass SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass( 373 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 374 375 /** 376 * Gets the type of tensors used for outputs. 377 * 378 * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info object. 379 * @return Output data class. 380 */ 381 ANeuralNetworksDiagnosticDataClass SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass( 382 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 383 384 /** 385 * Gets how many nanoseconds elapsed when compiling the model. 386 * 387 * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info object. 388 * @return Time to compile the model in nanoseconds. UINT64_MAX indicates that timing information is 389 * not available. 390 */ 391 uint64_t SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos( 392 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 393 394 /** 395 * Is caching enabled? 396 * 397 * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info object. 398 * @return Whether caching is enabled. 399 */ 400 bool SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled( 401 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 402 403 /** 404 * Is control flow used? 405 * 406 * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info object. 407 * @return Whether control flow was used. 408 */ 409 bool SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed( 410 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 411 412 /** 413 * Are dynamic tensors used? 414 * 415 * @param diagnosticCompilationInfo The NNAPI diagnostic compilation info object. 416 * @return Whether dynamic tensors were used. 417 */ 418 bool SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed( 419 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 420 421 typedef struct ANeuralNetworksDiagnosticExecutionInfo ANeuralNetworksDiagnosticExecutionInfo; 422 423 /** 424 * Gets the ID that identifies a single session of client interacting with NNAPI runtime. 425 * 426 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 427 * @return Session info id. 428 */ 429 int32_t SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId( 430 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 431 432 /** 433 * Gets NNAPI version. 434 * 435 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 436 * @return NNAPI version. 437 */ 438 int64_t SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion( 439 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 440 441 /** 442 * Gets the hash of the model architecture (without weights). 443 * 444 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 445 * @return Model hash. 446 */ 447 const uint8_t* SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash( 448 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 449 450 /** 451 * Gets the device IDs as a comma-concatenated string. 452 * 453 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 454 * @return Device ID. 455 */ 456 const char* SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds( 457 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 458 459 /** 460 * Gets the execution mode. 461 * 462 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 463 * @return Execution mode. 464 */ 465 ANeuralNetworksDiagnosticExecutionMode SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode( 466 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 467 468 /** 469 * Gets the input data class. 470 * 471 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 472 * @return Input data class. 473 */ 474 ANeuralNetworksDiagnosticDataClass SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass( 475 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 476 477 /** 478 * Gets the output data class. 479 * 480 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 481 * @return Output data class. 482 */ 483 ANeuralNetworksDiagnosticDataClass SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass( 484 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 485 486 /** 487 * Gets the error code. 488 * 489 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 490 * @return Error code. 491 */ 492 uint32_t SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode( 493 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 494 495 /** 496 * Gets the time taken to execute from runtime, including runtime/ipc overhead. 497 * 498 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 499 * @return Time taken to execute as measured by the runtime in nanoseconds. UINT64_MAX indicates 500 * that timing information is not available. 501 */ 502 uint64_t SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos( 503 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 504 505 /** 506 * Gets the time taken to execute in the driver, excluding runtime/ipc overhead. 507 * 508 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 509 * @return Time taken to execute on the driver in nanoseconds. UINT64_MAX indicates that timing 510 * information is not available. 511 */ 512 uint64_t SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos( 513 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 514 515 /** 516 * Gets the time taken to execute on the hardware, excluding driver overhead. 517 * 518 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 519 * @return Time taken to execute on the hardware in nanoseconds. UINT64_MAX indicates that timing 520 * information is not available. 521 */ 522 uint64_t SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos( 523 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 524 525 /** 526 * Is caching enabled? 527 * 528 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 529 * @return Whether caching is enabled. 530 */ 531 bool SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled( 532 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 533 534 /** 535 * Is control flow used? 536 * 537 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 538 * @return Whether control flow was used. 539 */ 540 bool SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed( 541 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 542 543 /** 544 * Are dynamic tensors used? 545 * 546 * @param diagnosticExecutionInfo The NNAPI diagnostic compilation info object. 547 * @return Whether dynamic tensors were used. 548 */ 549 bool SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed( 550 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 551 552 typedef void (*ANeuralNetworksDiagnosticCompilationFinishedCallback)( 553 const void* context, const ANeuralNetworksDiagnosticCompilationInfo* info); 554 555 typedef void (*ANeuralNetworksDiagnosticExecutionFinishedCallback)( 556 const void* context, const ANeuralNetworksDiagnosticExecutionInfo* info); 557 558 /** 559 * Sets the callbacks to be called when compilations or executions finish. 560 * 561 * Example usage: 562 * 563 * // Callback to be invoked whenever a compilation has completed. 564 * void compilationCallback(void* context, ANeuralNetworksDiagnosticCompilationInfo* info) { 565 * // The context object can be used to store state without the use of a global variable. 566 * ExampleLoggerObject* logger = static_cast<ExampleLoggerObject*>(context); 567 * 568 * // Calls to getters to get the details... 569 * const int32_t sessionId = ANeuralNetworksDiagnosticCompilationInfo_getSessionId(info); 570 * 571 * ... 572 * 573 * logger->write(...); 574 * } 575 * 576 * void executionCallback(void* context, ANeuralNetworksDiagnosticExecutionInfo* info) { 577 * ... 578 * } 579 * 580 * ExampleLoggerObject exampleLoggerObject; 581 * ANeuralNetworksDiagnostic_registerCallbacks(&compilationCallback, &executionCallback, 582 * static_cast<void*>(&exampleLoggerObject)); 583 * 584 * @param compilationCallback The compilation callback to set. 585 * @param executionCallback The execution callback to set. 586 * @param callbackContext The context to be passed to the callbacks when they are invoked. 587 * The context object may be used by multiple threads simulatenously, so it must be 588 * thread-safe. 589 */ 590 void SL_ANeuralNetworksDiagnostic_registerCallbacks( 591 ANeuralNetworksDiagnosticCompilationFinishedCallback compilationCallback, 592 ANeuralNetworksDiagnosticExecutionFinishedCallback executionCallback, 593 void* callbackContext); 594 595 /** 596 * Base version of NnApiSLDriverImpl with version information. 597 * 598 * NnApiSLDriverImpl is non-opaque, versioning struct to make it possible to pass 599 * its instance straight from the SL Driver to the shim registration. The glue code 600 * that loads the SL and calls the shim is non-updatable. An opaque struct would require the 601 * glue code to be updated if we would like to use newer NNAPI Feature Level. 602 * 603 * There's expectation that for M>N, NnApiSLDriverImplFL(M) is 604 * a strict superset of NnApiSLDriverImplFL(N), and NnApiSLDriverImplFL(M)* can 605 * be reinterpret_cast to NnApiSLDriverImplFL(N)* safely. 606 */ 607 typedef struct NnApiSLDriverImpl { 608 /** 609 * Version of the NnApiSLDriverImpl struct. Uses {@link FeatureLevelCode} values 610 * for versioning. 611 */ 612 int64_t implFeatureLevel; 613 } NnApiSLDriverImpl; 614 615 /** 616 * NnApiSLDriverImpl for an Updatable SL Driver implementing {@link 617 * ANEURALNETWORKS_FEATURE_LEVEL_5}. 618 * 619 * This struct must set its implFeatureLevel to {@link ANEURALNETWORKS_FEATURE_LEVEL_5}. 620 */ 621 typedef struct NnApiSLDriverImplFL5 { 622 /** 623 * Base type with version information. Allows to cast a pointer of this type 624 * to NnApiSLDriverImpl* with valid version information. 625 * For this type, its .version fields should be always set to {@link 626 * ANEURALNETWORKS_FEATURE_LEVEL_5}. 627 */ 628 NnApiSLDriverImpl base; 629 630 /** 631 * SL Driver implementation of {@link ANeuralNetworksBurst_create}. 632 * Behavior, arguments, and outputs match NNAPI Runtime function 633 * {@link ANeuralNetworksBurst_create}, 634 * at the feature level of this NnApiSLDriver struct. 635 */ 636 int (*ANeuralNetworksBurst_create)(ANeuralNetworksCompilation* compilation, 637 ANeuralNetworksBurst** burst); 638 639 /** 640 * SL Driver implementation of {@link ANeuralNetworksBurst_free}. 641 * Behavior, arguments, and outputs match NNAPI Runtime function 642 * {@link ANeuralNetworksBurst_free}, 643 * at the feature level of this NnApiSLDriver struct. 644 */ 645 void (*ANeuralNetworksBurst_free)(ANeuralNetworksBurst* burst); 646 647 /** 648 * SL Driver implementation of {@link ANeuralNetworksCompilation_createForDevices}. 649 * Behavior, arguments, and outputs match NNAPI Runtime function 650 * {@link ANeuralNetworksCompilation_createForDevices}, 651 * at the feature level of this NnApiSLDriver struct. 652 */ 653 int (*ANeuralNetworksCompilation_createForDevices)(ANeuralNetworksModel* model, 654 const ANeuralNetworksDevice* const* devices, 655 uint32_t numDevices, 656 ANeuralNetworksCompilation** compilation); 657 658 /** 659 * SL Driver implementation of {@link ANeuralNetworksCompilation_finish}. 660 * Behavior, arguments, and outputs match NNAPI Runtime function 661 * {@link ANeuralNetworksCompilation_finish}, 662 * at the feature level of this NnApiSLDriver struct. 663 */ 664 int (*ANeuralNetworksCompilation_finish)(ANeuralNetworksCompilation* compilation); 665 666 /** 667 * SL Driver implementation of {@link ANeuralNetworksCompilation_free}. 668 * Behavior, arguments, and outputs match NNAPI Runtime function 669 * {@link ANeuralNetworksCompilation_free}, 670 * at the feature level of this NnApiSLDriver struct. 671 */ 672 void (*ANeuralNetworksCompilation_free)(ANeuralNetworksCompilation* compilation); 673 674 /** 675 * SL Driver implementation of {@link 676 * ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput}. Behavior, arguments, and 677 * outputs match NNAPI Runtime function 678 * {@link ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput}, 679 * at the feature level of this NnApiSLDriver struct. 680 */ 681 int (*ANeuralNetworksCompilation_getPreferredMemoryAlignmentForInput)( 682 const ANeuralNetworksCompilation* compilation, uint32_t index, uint32_t* alignment); 683 684 /** 685 * SL Driver implementation of {@link 686 * ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput}. Behavior, arguments, and 687 * outputs match NNAPI Runtime function 688 * {@link ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput}, 689 * at the feature level of this NnApiSLDriver struct. 690 */ 691 int (*ANeuralNetworksCompilation_getPreferredMemoryAlignmentForOutput)( 692 const ANeuralNetworksCompilation* compilation, uint32_t index, uint32_t* alignment); 693 694 /** 695 * SL Driver implementation of {@link 696 * ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput}. Behavior, arguments, and 697 * outputs match NNAPI Runtime function 698 * {@link ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput}, 699 * at the feature level of this NnApiSLDriver struct. 700 */ 701 int (*ANeuralNetworksCompilation_getPreferredMemoryPaddingForInput)( 702 const ANeuralNetworksCompilation* compilation, uint32_t index, uint32_t* padding); 703 704 /** 705 * SL Driver implementation of {@link 706 * ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput}. Behavior, arguments, and 707 * outputs match NNAPI Runtime function 708 * {@link ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput}, 709 * at the feature level of this NnApiSLDriver struct. 710 */ 711 int (*ANeuralNetworksCompilation_getPreferredMemoryPaddingForOutput)( 712 const ANeuralNetworksCompilation* compilation, uint32_t index, uint32_t* padding); 713 714 /** 715 * SL Driver implementation of {@link ANeuralNetworksCompilation_setCaching}. 716 * Behavior, arguments, and outputs match NNAPI Runtime function 717 * {@link ANeuralNetworksCompilation_setCaching}, 718 * at the feature level of this NnApiSLDriver struct. 719 */ 720 int (*ANeuralNetworksCompilation_setCaching)(ANeuralNetworksCompilation* compilation, 721 const char* cacheDir, const uint8_t* token); 722 723 /** 724 * SL Driver implementation of {@link ANeuralNetworksCompilation_setPreference}. 725 * Behavior, arguments, and outputs match NNAPI Runtime function 726 * {@link ANeuralNetworksCompilation_setPreference}, 727 * at the feature level of this NnApiSLDriver struct. 728 */ 729 int (*ANeuralNetworksCompilation_setPreference)(ANeuralNetworksCompilation* compilation, 730 int32_t preference); 731 732 /** 733 * SL Driver implementation of {@link ANeuralNetworksCompilation_setPriority}. 734 * Behavior, arguments, and outputs match NNAPI Runtime function 735 * {@link ANeuralNetworksCompilation_setPriority}, 736 * at the feature level of this NnApiSLDriver struct. 737 */ 738 int (*ANeuralNetworksCompilation_setPriority)(ANeuralNetworksCompilation* compilation, 739 int priority); 740 741 /** 742 * SL Driver implementation of {@link ANeuralNetworksCompilation_setTimeout}. 743 * Behavior, arguments, and outputs match NNAPI Runtime function 744 * {@link ANeuralNetworksCompilation_setTimeout}, 745 * at the feature level of this NnApiSLDriver struct. 746 */ 747 int (*ANeuralNetworksCompilation_setTimeout)(ANeuralNetworksCompilation* compilation, 748 uint64_t duration); 749 750 /** 751 * SL Driver implementation of {@link ANeuralNetworksDevice_getExtensionSupport}. 752 * Behavior, arguments, and outputs match NNAPI Runtime function 753 * {@link ANeuralNetworksDevice_getExtensionSupport}, 754 * at the feature level of this NnApiSLDriver struct. 755 */ 756 int (*ANeuralNetworksDevice_getExtensionSupport)(const ANeuralNetworksDevice* device, 757 const char* extensionName, 758 bool* isExtensionSupported); 759 760 /** 761 * SL Driver implementation of {@link ANeuralNetworksDevice_getFeatureLevel}. 762 * Behavior, arguments, and outputs match NNAPI Runtime function 763 * {@link ANeuralNetworksDevice_getFeatureLevel}, 764 * at the feature level of this NnApiSLDriver struct. 765 */ 766 int (*ANeuralNetworksDevice_getFeatureLevel)(const ANeuralNetworksDevice* device, 767 int64_t* featureLevel); 768 769 /** 770 * SL Driver implementation of {@link ANeuralNetworksDevice_getName}. 771 * Behavior, arguments, and outputs match NNAPI Runtime function 772 * {@link ANeuralNetworksDevice_getName}, 773 * at the feature level of this NnApiSLDriver struct. 774 */ 775 int (*ANeuralNetworksDevice_getName)(const ANeuralNetworksDevice* device, const char** name); 776 777 /** 778 * SL Driver implementation of {@link ANeuralNetworksDevice_getType}. 779 * Behavior, arguments, and outputs match NNAPI Runtime function 780 * {@link ANeuralNetworksDevice_getType}, 781 * at the feature level of this NnApiSLDriver struct. 782 */ 783 int (*ANeuralNetworksDevice_getType)(const ANeuralNetworksDevice* device, int32_t* type); 784 785 /** 786 * SL Driver implementation of {@link ANeuralNetworksDevice_getVersion}. 787 * Behavior, arguments, and outputs match NNAPI Runtime function 788 * {@link ANeuralNetworksDevice_getVersion}, 789 * at the feature level of this NnApiSLDriver struct. 790 */ 791 int (*ANeuralNetworksDevice_getVersion)(const ANeuralNetworksDevice* device, 792 const char** version); 793 794 /** 795 * SL Driver implementation of {@link ANeuralNetworksDevice_wait}. 796 * Behavior, arguments, and outputs match NNAPI Runtime function 797 * {@link ANeuralNetworksDevice_wait}, 798 * at the feature level of this NnApiSLDriver struct. 799 */ 800 int (*ANeuralNetworksDevice_wait)(const ANeuralNetworksDevice* device); 801 802 /** 803 * SL Driver implementation of {@link ANeuralNetworksEvent_createFromSyncFenceFd}. 804 * Behavior, arguments, and outputs match NNAPI Runtime function 805 * {@link ANeuralNetworksEvent_createFromSyncFenceFd}, 806 * at the feature level of this NnApiSLDriver struct. 807 */ 808 int (*ANeuralNetworksEvent_createFromSyncFenceFd)(int sync_fence_fd, 809 ANeuralNetworksEvent** event); 810 811 /** 812 * SL Driver implementation of {@link ANeuralNetworksEvent_free}. 813 * Behavior, arguments, and outputs match NNAPI Runtime function 814 * {@link ANeuralNetworksEvent_free}, 815 * at the feature level of this NnApiSLDriver struct. 816 */ 817 void (*ANeuralNetworksEvent_free)(ANeuralNetworksEvent* event); 818 819 /** 820 * SL Driver implementation of {@link ANeuralNetworksEvent_getSyncFenceFd}. 821 * Behavior, arguments, and outputs match NNAPI Runtime function 822 * {@link ANeuralNetworksEvent_getSyncFenceFd}, 823 * at the feature level of this NnApiSLDriver struct. 824 */ 825 int (*ANeuralNetworksEvent_getSyncFenceFd)(const ANeuralNetworksEvent* event, 826 int* sync_fence_fd); 827 828 /** 829 * SL Driver implementation of {@link ANeuralNetworksEvent_wait}. 830 * Behavior, arguments, and outputs match NNAPI Runtime function 831 * {@link ANeuralNetworksEvent_wait}, 832 * at the feature level of this NnApiSLDriver struct. 833 */ 834 int (*ANeuralNetworksEvent_wait)(ANeuralNetworksEvent* event); 835 836 /** 837 * SL Driver implementation of {@link ANeuralNetworksExecution_burstCompute}. 838 * Behavior, arguments, and outputs match NNAPI Runtime function 839 * {@link ANeuralNetworksExecution_burstCompute}, 840 * at the feature level of this NnApiSLDriver struct. 841 */ 842 int (*ANeuralNetworksExecution_burstCompute)(ANeuralNetworksExecution* execution, 843 ANeuralNetworksBurst* burst); 844 845 /** 846 * SL Driver implementation of {@link ANeuralNetworksExecution_compute}. 847 * Behavior, arguments, and outputs match NNAPI Runtime function 848 * {@link ANeuralNetworksExecution_compute}, 849 * at the feature level of this NnApiSLDriver struct. 850 */ 851 int (*ANeuralNetworksExecution_compute)(ANeuralNetworksExecution* execution); 852 853 /** 854 * SL Driver implementation of {@link ANeuralNetworksExecution_create}. 855 * Behavior, arguments, and outputs match NNAPI Runtime function 856 * {@link ANeuralNetworksExecution_create}, 857 * at the feature level of this NnApiSLDriver struct. 858 */ 859 int (*ANeuralNetworksExecution_create)(ANeuralNetworksCompilation* compilation, 860 ANeuralNetworksExecution** execution); 861 862 /** 863 * SL Driver implementation of {@link ANeuralNetworksExecution_enableInputAndOutputPadding}. 864 * Behavior, arguments, and outputs match NNAPI Runtime function 865 * {@link ANeuralNetworksExecution_enableInputAndOutputPadding}, 866 * at the feature level of this NnApiSLDriver struct. 867 */ 868 int (*ANeuralNetworksExecution_enableInputAndOutputPadding)(ANeuralNetworksExecution* execution, 869 bool enable); 870 871 /** 872 * SL Driver implementation of {@link ANeuralNetworksExecution_free}. 873 * Behavior, arguments, and outputs match NNAPI Runtime function 874 * {@link ANeuralNetworksExecution_free}, 875 * at the feature level of this NnApiSLDriver struct. 876 */ 877 void (*ANeuralNetworksExecution_free)(ANeuralNetworksExecution* execution); 878 879 /** 880 * SL Driver implementation of {@link ANeuralNetworksExecution_getDuration}. 881 * Behavior, arguments, and outputs match NNAPI Runtime function 882 * {@link ANeuralNetworksExecution_getDuration}, 883 * at the feature level of this NnApiSLDriver struct. 884 */ 885 int (*ANeuralNetworksExecution_getDuration)(const ANeuralNetworksExecution* execution, 886 int32_t durationCode, uint64_t* duration); 887 888 /** 889 * SL Driver implementation of {@link ANeuralNetworksExecution_getOutputOperandDimensions}. 890 * Behavior, arguments, and outputs match NNAPI Runtime function 891 * {@link ANeuralNetworksExecution_getOutputOperandDimensions}, 892 * at the feature level of this NnApiSLDriver struct. 893 */ 894 int (*ANeuralNetworksExecution_getOutputOperandDimensions)(ANeuralNetworksExecution* execution, 895 int32_t index, uint32_t* dimensions); 896 897 /** 898 * SL Driver implementation of {@link ANeuralNetworksExecution_getOutputOperandRank}. 899 * Behavior, arguments, and outputs match NNAPI Runtime function 900 * {@link ANeuralNetworksExecution_getOutputOperandRank}, 901 * at the feature level of this NnApiSLDriver struct. 902 */ 903 int (*ANeuralNetworksExecution_getOutputOperandRank)(ANeuralNetworksExecution* execution, 904 int32_t index, uint32_t* rank); 905 906 /** 907 * SL Driver implementation of {@link ANeuralNetworksExecution_setInput}. 908 * Behavior, arguments, and outputs match NNAPI Runtime function 909 * {@link ANeuralNetworksExecution_setInput}, 910 * at the feature level of this NnApiSLDriver struct. 911 */ 912 int (*ANeuralNetworksExecution_setInput)(ANeuralNetworksExecution* execution, int32_t index, 913 const ANeuralNetworksOperandType* type, 914 const void* buffer, size_t length); 915 916 /** 917 * SL Driver implementation of {@link ANeuralNetworksExecution_setInputFromMemory}. 918 * Behavior, arguments, and outputs match NNAPI Runtime function 919 * {@link ANeuralNetworksExecution_setInputFromMemory}, 920 * at the feature level of this NnApiSLDriver struct. 921 */ 922 int (*ANeuralNetworksExecution_setInputFromMemory)(ANeuralNetworksExecution* execution, 923 int32_t index, 924 const ANeuralNetworksOperandType* type, 925 const ANeuralNetworksMemory* memory, 926 size_t offset, size_t length); 927 928 /** 929 * SL Driver implementation of {@link ANeuralNetworksExecution_setLoopTimeout}. 930 * Behavior, arguments, and outputs match NNAPI Runtime function 931 * {@link ANeuralNetworksExecution_setLoopTimeout}, 932 * at the feature level of this NnApiSLDriver struct. 933 */ 934 int (*ANeuralNetworksExecution_setLoopTimeout)(ANeuralNetworksExecution* execution, 935 uint64_t duration); 936 937 /** 938 * SL Driver implementation of {@link ANeuralNetworksExecution_setMeasureTiming}. 939 * Behavior, arguments, and outputs match NNAPI Runtime function 940 * {@link ANeuralNetworksExecution_setMeasureTiming}, 941 * at the feature level of this NnApiSLDriver struct. 942 */ 943 int (*ANeuralNetworksExecution_setMeasureTiming)(ANeuralNetworksExecution* execution, 944 bool measure); 945 946 /** 947 * SL Driver implementation of {@link ANeuralNetworksExecution_setOutput}. 948 * Behavior, arguments, and outputs match NNAPI Runtime function 949 * {@link ANeuralNetworksExecution_setOutput}, 950 * at the feature level of this NnApiSLDriver struct. 951 */ 952 int (*ANeuralNetworksExecution_setOutput)(ANeuralNetworksExecution* execution, int32_t index, 953 const ANeuralNetworksOperandType* type, void* buffer, 954 size_t length); 955 956 /** 957 * SL Driver implementation of {@link ANeuralNetworksExecution_setOutputFromMemory}. 958 * Behavior, arguments, and outputs match NNAPI Runtime function 959 * {@link ANeuralNetworksExecution_setOutputFromMemory}, 960 * at the feature level of this NnApiSLDriver struct. 961 */ 962 int (*ANeuralNetworksExecution_setOutputFromMemory)(ANeuralNetworksExecution* execution, 963 int32_t index, 964 const ANeuralNetworksOperandType* type, 965 const ANeuralNetworksMemory* memory, 966 size_t offset, size_t length); 967 968 /** 969 * SL Driver implementation of {@link ANeuralNetworksExecution_setReusable}. 970 * Behavior, arguments, and outputs match NNAPI Runtime function 971 * {@link ANeuralNetworksExecution_setReusable}, 972 * at the feature level of this NnApiSLDriver struct. 973 */ 974 int (*ANeuralNetworksExecution_setReusable)(ANeuralNetworksExecution* execution, bool reusable); 975 976 /** 977 * SL Driver implementation of {@link ANeuralNetworksExecution_setTimeout}. 978 * Behavior, arguments, and outputs match NNAPI Runtime function 979 * {@link ANeuralNetworksExecution_setTimeout}, 980 * at the feature level of this NnApiSLDriver struct. 981 */ 982 int (*ANeuralNetworksExecution_setTimeout)(ANeuralNetworksExecution* execution, 983 uint64_t duration); 984 985 /** 986 * SL Driver implementation of {@link ANeuralNetworksExecution_startComputeWithDependencies}. 987 * Behavior, arguments, and outputs match NNAPI Runtime function 988 * {@link ANeuralNetworksExecution_startComputeWithDependencies}, 989 * at the feature level of this NnApiSLDriver struct. 990 */ 991 int (*ANeuralNetworksExecution_startComputeWithDependencies)( 992 ANeuralNetworksExecution* execution, const ANeuralNetworksEvent* const* dependencies, 993 uint32_t num_dependencies, uint64_t duration, ANeuralNetworksEvent** event); 994 995 /** 996 * SL Driver implementation of {@link ANeuralNetworksMemoryDesc_addInputRole}. 997 * Behavior, arguments, and outputs match NNAPI Runtime function 998 * {@link ANeuralNetworksMemoryDesc_addInputRole}, 999 * at the feature level of this NnApiSLDriver struct. 1000 */ 1001 int (*ANeuralNetworksMemoryDesc_addInputRole)(ANeuralNetworksMemoryDesc* desc, 1002 const ANeuralNetworksCompilation* compilation, 1003 uint32_t index, float frequency); 1004 1005 /** 1006 * SL Driver implementation of {@link ANeuralNetworksMemoryDesc_addOutputRole}. 1007 * Behavior, arguments, and outputs match NNAPI Runtime function 1008 * {@link ANeuralNetworksMemoryDesc_addOutputRole}, 1009 * at the feature level of this NnApiSLDriver struct. 1010 */ 1011 int (*ANeuralNetworksMemoryDesc_addOutputRole)(ANeuralNetworksMemoryDesc* desc, 1012 const ANeuralNetworksCompilation* compilation, 1013 uint32_t index, float frequency); 1014 1015 /** 1016 * SL Driver implementation of {@link ANeuralNetworksMemoryDesc_create}. 1017 * Behavior, arguments, and outputs match NNAPI Runtime function 1018 * {@link ANeuralNetworksMemoryDesc_create}, 1019 * at the feature level of this NnApiSLDriver struct. 1020 */ 1021 int (*ANeuralNetworksMemoryDesc_create)(ANeuralNetworksMemoryDesc** desc); 1022 1023 /** 1024 * SL Driver implementation of {@link ANeuralNetworksMemoryDesc_finish}. 1025 * Behavior, arguments, and outputs match NNAPI Runtime function 1026 * {@link ANeuralNetworksMemoryDesc_finish}, 1027 * at the feature level of this NnApiSLDriver struct. 1028 */ 1029 int (*ANeuralNetworksMemoryDesc_finish)(ANeuralNetworksMemoryDesc* desc); 1030 1031 /** 1032 * SL Driver implementation of {@link ANeuralNetworksMemoryDesc_free}. 1033 * Behavior, arguments, and outputs match NNAPI Runtime function 1034 * {@link ANeuralNetworksMemoryDesc_free}, 1035 * at the feature level of this NnApiSLDriver struct. 1036 */ 1037 void (*ANeuralNetworksMemoryDesc_free)(ANeuralNetworksMemoryDesc* desc); 1038 1039 /** 1040 * SL Driver implementation of {@link ANeuralNetworksMemoryDesc_setDimensions}. 1041 * Behavior, arguments, and outputs match NNAPI Runtime function 1042 * {@link ANeuralNetworksMemoryDesc_setDimensions}, 1043 * at the feature level of this NnApiSLDriver struct. 1044 */ 1045 int (*ANeuralNetworksMemoryDesc_setDimensions)(ANeuralNetworksMemoryDesc* desc, uint32_t rank, 1046 const uint32_t* dimensions); 1047 1048 /** 1049 * SL Driver implementation of {@link ANeuralNetworksMemory_copy}. 1050 * Behavior, arguments, and outputs match NNAPI Runtime function 1051 * {@link ANeuralNetworksMemory_copy}, 1052 * at the feature level of this NnApiSLDriver struct. 1053 */ 1054 int (*ANeuralNetworksMemory_copy)(const ANeuralNetworksMemory* src, 1055 const ANeuralNetworksMemory* dst); 1056 1057 /** 1058 * SL Driver implementation of {@link ANeuralNetworksMemory_createFromAHardwareBuffer}. 1059 * Behavior, arguments, and outputs match NNAPI Runtime function 1060 * {@link ANeuralNetworksMemory_createFromAHardwareBuffer}, 1061 * at the feature level of this NnApiSLDriver struct. 1062 */ 1063 int (*ANeuralNetworksMemory_createFromAHardwareBuffer)(const AHardwareBuffer* ahwb, 1064 ANeuralNetworksMemory** memory); 1065 1066 /** 1067 * SL Driver implementation of {@link ANeuralNetworksMemory_createFromDesc}. 1068 * Behavior, arguments, and outputs match NNAPI Runtime function 1069 * {@link ANeuralNetworksMemory_createFromDesc}, 1070 * at the feature level of this NnApiSLDriver struct. 1071 */ 1072 int (*ANeuralNetworksMemory_createFromDesc)(const ANeuralNetworksMemoryDesc* desc, 1073 ANeuralNetworksMemory** memory); 1074 1075 /** 1076 * SL Driver implementation of {@link ANeuralNetworksMemory_createFromFd}. 1077 * Behavior, arguments, and outputs match NNAPI Runtime function 1078 * {@link ANeuralNetworksMemory_createFromFd}, 1079 * at the feature level of this NnApiSLDriver struct. 1080 */ 1081 int (*ANeuralNetworksMemory_createFromFd)(size_t size, int protect, int fd, size_t offset, 1082 ANeuralNetworksMemory** memory); 1083 1084 /** 1085 * SL Driver implementation of {@link ANeuralNetworksMemory_free}. 1086 * Behavior, arguments, and outputs match NNAPI Runtime function 1087 * {@link ANeuralNetworksMemory_free}, 1088 * at the feature level of this NnApiSLDriver struct. 1089 */ 1090 void (*ANeuralNetworksMemory_free)(ANeuralNetworksMemory* memory); 1091 1092 /** 1093 * SL Driver implementation of {@link ANeuralNetworksModel_addOperand}. 1094 * Behavior, arguments, and outputs match NNAPI Runtime function 1095 * {@link ANeuralNetworksModel_addOperand}, 1096 * at the feature level of this NnApiSLDriver struct. 1097 */ 1098 int (*ANeuralNetworksModel_addOperand)(ANeuralNetworksModel* model, 1099 const ANeuralNetworksOperandType* type); 1100 1101 /** 1102 * SL Driver implementation of {@link ANeuralNetworksModel_addOperation}. 1103 * Behavior, arguments, and outputs match NNAPI Runtime function 1104 * {@link ANeuralNetworksModel_addOperation}, 1105 * at the feature level of this NnApiSLDriver struct. 1106 */ 1107 int (*ANeuralNetworksModel_addOperation)(ANeuralNetworksModel* model, 1108 ANeuralNetworksOperationType type, uint32_t inputCount, 1109 const uint32_t* inputs, uint32_t outputCount, 1110 const uint32_t* outputs); 1111 1112 /** 1113 * SL Driver implementation of {@link ANeuralNetworksModel_create}. 1114 * Behavior, arguments, and outputs match NNAPI Runtime function 1115 * {@link ANeuralNetworksModel_create}, 1116 * at the feature level of this NnApiSLDriver struct. 1117 */ 1118 int (*ANeuralNetworksModel_create)(ANeuralNetworksModel** model); 1119 1120 /** 1121 * SL Driver implementation of {@link ANeuralNetworksModel_finish}. 1122 * Behavior, arguments, and outputs match NNAPI Runtime function 1123 * {@link ANeuralNetworksModel_finish}, 1124 * at the feature level of this NnApiSLDriver struct. 1125 */ 1126 int (*ANeuralNetworksModel_finish)(ANeuralNetworksModel* model); 1127 1128 /** 1129 * SL Driver implementation of {@link ANeuralNetworksModel_free}. 1130 * Behavior, arguments, and outputs match NNAPI Runtime function 1131 * {@link ANeuralNetworksModel_free}, 1132 * at the feature level of this NnApiSLDriver struct. 1133 */ 1134 void (*ANeuralNetworksModel_free)(ANeuralNetworksModel* model); 1135 1136 /** 1137 * SL Driver implementation of {@link ANeuralNetworksModel_getExtensionOperandType}. 1138 * Behavior, arguments, and outputs match NNAPI Runtime function 1139 * {@link ANeuralNetworksModel_getExtensionOperandType}, 1140 * at the feature level of this NnApiSLDriver struct. 1141 */ 1142 int (*ANeuralNetworksModel_getExtensionOperandType)(ANeuralNetworksModel* model, 1143 const char* extensionName, 1144 uint16_t operandCodeWithinExtension, 1145 int32_t* type); 1146 1147 /** 1148 * SL Driver implementation of {@link ANeuralNetworksModel_getExtensionOperationType}. 1149 * Behavior, arguments, and outputs match NNAPI Runtime function 1150 * {@link ANeuralNetworksModel_getExtensionOperationType}, 1151 * at the feature level of this NnApiSLDriver struct. 1152 */ 1153 int (*ANeuralNetworksModel_getExtensionOperationType)(ANeuralNetworksModel* model, 1154 const char* extensionName, 1155 uint16_t operationCodeWithinExtension, 1156 ANeuralNetworksOperationType* type); 1157 1158 /** 1159 * SL Driver implementation of {@link ANeuralNetworksModel_getSupportedOperationsForDevices}. 1160 * Behavior, arguments, and outputs match NNAPI Runtime function 1161 * {@link ANeuralNetworksModel_getSupportedOperationsForDevices}, 1162 * at the feature level of this NnApiSLDriver struct. 1163 */ 1164 int (*ANeuralNetworksModel_getSupportedOperationsForDevices)( 1165 const ANeuralNetworksModel* model, const ANeuralNetworksDevice* const* devices, 1166 uint32_t numDevices, bool* supportedOps); 1167 1168 /** 1169 * SL Driver implementation of {@link ANeuralNetworksModel_identifyInputsAndOutputs}. 1170 * Behavior, arguments, and outputs match NNAPI Runtime function 1171 * {@link ANeuralNetworksModel_identifyInputsAndOutputs}, 1172 * at the feature level of this NnApiSLDriver struct. 1173 */ 1174 int (*ANeuralNetworksModel_identifyInputsAndOutputs)(ANeuralNetworksModel* model, 1175 uint32_t inputCount, 1176 const uint32_t* inputs, 1177 uint32_t outputCount, 1178 const uint32_t* outputs); 1179 1180 /** 1181 * SL Driver implementation of {@link ANeuralNetworksModel_relaxComputationFloat32toFloat16}. 1182 * Behavior, arguments, and outputs match NNAPI Runtime function 1183 * {@link ANeuralNetworksModel_relaxComputationFloat32toFloat16}, 1184 * at the feature level of this NnApiSLDriver struct. 1185 */ 1186 int (*ANeuralNetworksModel_relaxComputationFloat32toFloat16)(ANeuralNetworksModel* model, 1187 bool allow); 1188 1189 /** 1190 * SL Driver implementation of {@link ANeuralNetworksModel_setOperandExtensionData}. 1191 * Behavior, arguments, and outputs match NNAPI Runtime function 1192 * {@link ANeuralNetworksModel_setOperandExtensionData}, 1193 * at the feature level of this NnApiSLDriver struct. 1194 */ 1195 int (*ANeuralNetworksModel_setOperandExtensionData)(ANeuralNetworksModel* model, int32_t index, 1196 const void* data, size_t length); 1197 1198 /** 1199 * SL Driver implementation of {@link ANeuralNetworksModel_setOperandSymmPerChannelQuantParams}. 1200 * Behavior, arguments, and outputs match NNAPI Runtime function 1201 * {@link ANeuralNetworksModel_setOperandSymmPerChannelQuantParams}, 1202 * at the feature level of this NnApiSLDriver struct. 1203 */ 1204 int (*ANeuralNetworksModel_setOperandSymmPerChannelQuantParams)( 1205 ANeuralNetworksModel* model, int32_t index, 1206 const ANeuralNetworksSymmPerChannelQuantParams* channelQuant); 1207 1208 /** 1209 * SL Driver implementation of {@link ANeuralNetworksModel_setOperandValue}. 1210 * Behavior, arguments, and outputs match NNAPI Runtime function 1211 * {@link ANeuralNetworksModel_setOperandValue}, 1212 * at the feature level of this NnApiSLDriver struct. 1213 */ 1214 int (*ANeuralNetworksModel_setOperandValue)(ANeuralNetworksModel* model, int32_t index, 1215 const void* buffer, size_t length); 1216 1217 /** 1218 * SL Driver implementation of {@link ANeuralNetworksModel_setOperandValueFromMemory}. 1219 * Behavior, arguments, and outputs match NNAPI Runtime function 1220 * {@link ANeuralNetworksModel_setOperandValueFromMemory}, 1221 * at the feature level of this NnApiSLDriver struct. 1222 */ 1223 int (*ANeuralNetworksModel_setOperandValueFromMemory)(ANeuralNetworksModel* model, 1224 int32_t index, 1225 const ANeuralNetworksMemory* memory, 1226 size_t offset, size_t length); 1227 1228 /** 1229 * SL Driver implementation of {@link ANeuralNetworksModel_setOperandValueFromModel}. 1230 * Behavior, arguments, and outputs match NNAPI Runtime function 1231 * {@link ANeuralNetworksModel_setOperandValueFromModel}, 1232 * at the feature level of this NnApiSLDriver struct. 1233 */ 1234 int (*ANeuralNetworksModel_setOperandValueFromModel)(ANeuralNetworksModel* model, int32_t index, 1235 const ANeuralNetworksModel* value); 1236 1237 /** 1238 * SL Driver implementation of {@link ANeuralNetworks_getDefaultLoopTimeout}. 1239 * Behavior, arguments, and outputs match NNAPI Runtime function 1240 * {@link ANeuralNetworks_getDefaultLoopTimeout}, 1241 * at the feature level of this NnApiSLDriver struct. 1242 */ 1243 uint64_t (*ANeuralNetworks_getDefaultLoopTimeout)(); 1244 1245 /** 1246 * SL Driver implementation of {@link ANeuralNetworks_getDevice}. 1247 * Behavior, arguments, and outputs match NNAPI Runtime function 1248 * {@link ANeuralNetworks_getDevice}, 1249 * at the feature level of this NnApiSLDriver struct. 1250 */ 1251 int (*ANeuralNetworks_getDevice)(uint32_t devIndex, ANeuralNetworksDevice** device); 1252 1253 /** 1254 * SL Driver implementation of {@link ANeuralNetworks_getDeviceCount}. 1255 * Behavior, arguments, and outputs match NNAPI Runtime function 1256 * {@link ANeuralNetworks_getDeviceCount}, 1257 * at the feature level of this NnApiSLDriver struct. 1258 */ 1259 int (*ANeuralNetworks_getDeviceCount)(uint32_t* numDevices); 1260 1261 /** 1262 * SL Driver implementation of {@link ANeuralNetworks_getMaximumLoopTimeout}. 1263 * Behavior, arguments, and outputs match NNAPI Runtime function 1264 * {@link ANeuralNetworks_getMaximumLoopTimeout}, 1265 * at the feature level of this NnApiSLDriver struct. 1266 */ 1267 uint64_t (*ANeuralNetworks_getMaximumLoopTimeout)(); 1268 1269 /** 1270 * SL Driver implementation of {@link ANeuralNetworks_getRuntimeFeatureLevel}. 1271 * Behavior, arguments, and outputs match NNAPI Runtime function 1272 * {@link ANeuralNetworks_getRuntimeFeatureLevel}, 1273 * at the feature level of this NnApiSLDriver struct. 1274 */ 1275 int64_t (*ANeuralNetworks_getRuntimeFeatureLevel)(); 1276 1277 /** 1278 * SL Driver implementation of a function similar to 1279 * {@link ANeuralNetworksCompilation_setCaching} that takes file descriptors 1280 * instead of a cache directory. 1281 * Behavior and outputs match NNAPI Runtime function 1282 * {@link ANeuralNetworksCompilation_setCaching}, 1283 * at the feature level of this NnApiSLDriver struct. 1284 */ 1285 int (*SL_ANeuralNetworksCompilation_setCachingFromFds)(ANeuralNetworksCompilation* compilation, 1286 const int* modelCacheFds, 1287 const uint32_t numModelCacheFiles, 1288 const int* dataCacheFds, 1289 const uint32_t numDataCacheFiles, 1290 const uint8_t* token); 1291 1292 /** 1293 * SL Driver implementation of {@link SL_ANeuralNetworksDevice_getNumberOfCacheFilesNeeded}. 1294 * Behavior, arguments, and outputs match NNAPI Runtime function 1295 * {@link SL_ANeuralNetworksDevice_getNumberOfCacheFilesNeeded}, 1296 * at the feature level of this NnApiSLDriver struct. 1297 */ 1298 int (*SL_ANeuralNetworksDevice_getNumberOfCacheFilesNeeded)(const ANeuralNetworksDevice* device, 1299 uint32_t* numModelCacheFiles, 1300 uint32_t* numDataCacheFiles); 1301 1302 /** 1303 * SL Driver implementation of {@link SL_ANeuralNetworksDevice_getPerformanceInfo}. 1304 * Behavior, arguments, and outputs match NNAPI Runtime function 1305 * {@link SL_ANeuralNetworksDevice_getPerformanceInfo}, 1306 * at the feature level of this NnApiSLDriver struct. 1307 */ 1308 int (*SL_ANeuralNetworksDevice_getPerformanceInfo)( 1309 const ANeuralNetworksDevice* device, int32_t performanceInfoKind, 1310 SL_ANeuralNetworksPerformanceInfo* performanceInfo); 1311 1312 /** 1313 * SL Driver implementation of {@link 1314 * SL_ANeuralNetworksDevice_forEachOperandPerformanceInfo}. Behavior, arguments, and 1315 * outputs match NNAPI Runtime function 1316 * {@link SL_ANeuralNetworksDevice_forEachOperandPerformanceInfo}, 1317 * at the feature level of this NnApiSLDriver struct. 1318 */ 1319 int (*SL_ANeuralNetworksDevice_forEachOperandPerformanceInfo)( 1320 const ANeuralNetworksDevice* device, void* context, 1321 void (*callback)(SL_ANeuralNetworksOperandPerformanceInfo, void*)); 1322 1323 /** 1324 * SL Driver implementation of {@link SL_ANeuralNetworksDevice_getVendorExtensionCount}. 1325 * Behavior, arguments, and outputs match NNAPI Runtime function 1326 * {@link SL_ANeuralNetworksDevice_getVendorExtensionCount}, 1327 * at the feature level of this NnApiSLDriver struct. 1328 */ 1329 int (*SL_ANeuralNetworksDevice_getVendorExtensionCount)(const ANeuralNetworksDevice* device, 1330 uint32_t* vendorExtensionCount); 1331 1332 /** 1333 * SL Driver implementation of {@link SL_ANeuralNetworksDevice_getVendorExtensionName}. 1334 * Behavior, arguments, and outputs match NNAPI Runtime function 1335 * {@link SL_ANeuralNetworksDevice_getVendorExtensionName}, 1336 * at the feature level of this NnApiSLDriver struct. 1337 */ 1338 int (*SL_ANeuralNetworksDevice_getVendorExtensionName)(const ANeuralNetworksDevice* device, 1339 uint32_t vendorExtensionIndex, 1340 const char** extensionName); 1341 1342 /** 1343 * SL Driver implementation of {@link 1344 * SL_ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation}. Behavior, arguments, 1345 * and outputs match NNAPI Runtime function 1346 * {@link SL_ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation}, 1347 * at the feature level of this NnApiSLDriver struct. 1348 */ 1349 int (*SL_ANeuralNetworksDevice_forEachVendorExtensionOperandTypeInformation)( 1350 const ANeuralNetworksDevice* device, uint32_t vendorExtensionIndex, void* context, 1351 void (*callback)(SL_ANeuralNetworksExtensionOperandTypeInformation, void*)); 1352 1353 /** 1354 * SL Driver implementation of {@link SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId}. 1355 * Behavior, arguments, and outputs match NNAPI Runtime function {@link 1356 * SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId}, at the feature level of this 1357 * NnApiSLDriver struct. 1358 */ 1359 int32_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getSessionId)( 1360 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 1361 1362 /** 1363 * SL Driver implementation of {@link 1364 * SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion}. Behavior, arguments, and 1365 * outputs match NNAPI Runtime function {@link 1366 * SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion}, at the feature level of this 1367 * NnApiSLDriver struct. 1368 */ 1369 int64_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getNnApiVersion)( 1370 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 1371 1372 /** 1373 * SL Driver implementation of {@link 1374 * SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash}. Behavior, arguments, and 1375 * outputs match NNAPI Runtime function {@link 1376 * SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash}, at the feature level of this 1377 * NnApiSLDriver struct. 1378 */ 1379 const uint8_t* (*SL_ANeuralNetworksDiagnosticCompilationInfo_getModelArchHash)( 1380 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 1381 1382 /** 1383 * SL Driver implementation of {@link SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds}. 1384 * Behavior, arguments, and outputs match NNAPI Runtime function {@link 1385 * SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds}, at the feature level of this 1386 * NnApiSLDriver struct. 1387 */ 1388 const char* (*SL_ANeuralNetworksDiagnosticCompilationInfo_getDeviceIds)( 1389 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 1390 1391 /** 1392 * SL Driver implementation of {@link SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode}. 1393 * Behavior, arguments, and outputs match NNAPI Runtime function {@link 1394 * SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode}, at the feature level of this 1395 * NnApiSLDriver struct. 1396 */ 1397 int32_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getErrorCode)( 1398 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 1399 1400 /** 1401 * SL Driver implementation of {@link 1402 * SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass}. Behavior, arguments, and 1403 * outputs match NNAPI Runtime function {@link 1404 * SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass}, at the feature level of this 1405 * NnApiSLDriver struct. 1406 */ 1407 ANeuralNetworksDiagnosticDataClass ( 1408 *SL_ANeuralNetworksDiagnosticCompilationInfo_getInputDataClass)( 1409 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 1410 1411 /** 1412 * SL Driver implementation of {@link 1413 * SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass}. Behavior, arguments, and 1414 * outputs match NNAPI Runtime function {@link 1415 * SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass}, at the feature level of this 1416 * NnApiSLDriver struct. 1417 */ 1418 ANeuralNetworksDiagnosticDataClass ( 1419 *SL_ANeuralNetworksDiagnosticCompilationInfo_getOutputDataClass)( 1420 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 1421 1422 /** 1423 * SL Driver implementation of {@link 1424 * SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos}. Behavior, arguments, 1425 * and outputs match NNAPI Runtime function {@link 1426 * SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos}, at the feature level of 1427 * this NnApiSLDriver struct. 1428 */ 1429 uint64_t (*SL_ANeuralNetworksDiagnosticCompilationInfo_getCompilationTimeNanos)( 1430 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 1431 1432 /** 1433 * SL Driver implementation of {@link 1434 * SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled}. Behavior, arguments, and 1435 * outputs match NNAPI Runtime function {@link 1436 * SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled}, at the feature level of this 1437 * NnApiSLDriver struct. 1438 */ 1439 bool (*SL_ANeuralNetworksDiagnosticCompilationInfo_isCachingEnabled)( 1440 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 1441 1442 /** 1443 * SL Driver implementation of {@link 1444 * SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed}. Behavior, arguments, and 1445 * outputs match NNAPI Runtime function {@link 1446 * SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed}, at the feature level of this 1447 * NnApiSLDriver struct. 1448 */ 1449 bool (*SL_ANeuralNetworksDiagnosticCompilationInfo_isControlFlowUsed)( 1450 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 1451 1452 /** 1453 * SL Driver implementation of {@link 1454 * SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed}. Behavior, arguments, and 1455 * outputs match NNAPI Runtime function {@link 1456 * SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed}, at the feature level of 1457 * this NnApiSLDriver struct. 1458 */ 1459 bool (*SL_ANeuralNetworksDiagnosticCompilationInfo_areDynamicTensorsUsed)( 1460 const ANeuralNetworksDiagnosticCompilationInfo* diagnosticCompilationInfo); 1461 1462 /** 1463 * SL Driver implementation of {@link SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId}. 1464 * Behavior, arguments, and outputs match NNAPI Runtime function {@link 1465 * SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId}, at the feature level of this 1466 * NnApiSLDriver struct. 1467 */ 1468 int32_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getSessionId)( 1469 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1470 1471 /** 1472 * SL Driver implementation of {@link 1473 * SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion}. Behavior, arguments, and outputs 1474 * match NNAPI Runtime function {@link 1475 * SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion}, at the feature level of this 1476 * NnApiSLDriver struct. 1477 */ 1478 int64_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getNnApiVersion)( 1479 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1480 1481 /** 1482 * SL Driver implementation of {@link 1483 * SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash}. Behavior, arguments, and outputs 1484 * match NNAPI Runtime function {@link 1485 * SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash}, at the feature level of this 1486 * NnApiSLDriver struct. 1487 */ 1488 const uint8_t* (*SL_ANeuralNetworksDiagnosticExecutionInfo_getModelArchHash)( 1489 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1490 1491 /** 1492 * SL Driver implementation of {@link SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds}. 1493 * Behavior, arguments, and outputs match NNAPI Runtime function {@link 1494 * SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds}, at the feature level of this 1495 * NnApiSLDriver struct. 1496 */ 1497 const char* (*SL_ANeuralNetworksDiagnosticExecutionInfo_getDeviceIds)( 1498 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1499 1500 /** 1501 * SL Driver implementation of {@link 1502 * SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode}. Behavior, arguments, and outputs 1503 * match NNAPI Runtime function {@link 1504 * SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode}, at the feature level of this 1505 * NnApiSLDriver struct. 1506 */ 1507 ANeuralNetworksDiagnosticExecutionMode ( 1508 *SL_ANeuralNetworksDiagnosticExecutionInfo_getExecutionMode)( 1509 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1510 1511 /** 1512 * SL Driver implementation of {@link 1513 * SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass}. Behavior, arguments, and 1514 * outputs match NNAPI Runtime function {@link 1515 * SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass}, at the feature level of this 1516 * NnApiSLDriver struct. 1517 */ 1518 ANeuralNetworksDiagnosticDataClass ( 1519 *SL_ANeuralNetworksDiagnosticExecutionInfo_getInputDataClass)( 1520 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1521 1522 /** 1523 * SL Driver implementation of {@link 1524 * SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass}. Behavior, arguments, and 1525 * outputs match NNAPI Runtime function {@link 1526 * SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass}, at the feature level of this 1527 * NnApiSLDriver struct. 1528 */ 1529 ANeuralNetworksDiagnosticDataClass ( 1530 *SL_ANeuralNetworksDiagnosticExecutionInfo_getOutputDataClass)( 1531 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1532 1533 /** 1534 * SL Driver implementation of {@link SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode}. 1535 * Behavior, arguments, and outputs match NNAPI Runtime function {@link 1536 * SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode}, at the feature level of this 1537 * NnApiSLDriver struct. 1538 */ 1539 uint32_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getErrorCode)( 1540 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1541 1542 /** 1543 * SL Driver implementation of {@link 1544 * SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos}. Behavior, arguments, 1545 * and outputs match NNAPI Runtime function {@link 1546 * SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos}, at the feature level 1547 * of this NnApiSLDriver struct. 1548 */ 1549 uint64_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getRuntimeExecutionTimeNanos)( 1550 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1551 1552 /** 1553 * SL Driver implementation of {@link 1554 * SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos}. Behavior, arguments, 1555 * and outputs match NNAPI Runtime function {@link 1556 * SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos}, at the feature level 1557 * of this NnApiSLDriver struct. 1558 */ 1559 uint64_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getDriverExecutionTimeNanos)( 1560 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1561 1562 /** 1563 * SL Driver implementation of {@link 1564 * SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos}. Behavior, 1565 * arguments, and outputs match NNAPI Runtime function {@link 1566 * SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos}, at the feature 1567 * level of this NnApiSLDriver struct. 1568 */ 1569 uint64_t (*SL_ANeuralNetworksDiagnosticExecutionInfo_getHardwareExecutionTimeNanos)( 1570 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1571 1572 /** 1573 * SL Driver implementation of {@link 1574 * SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled}. Behavior, arguments, and outputs 1575 * match NNAPI Runtime function {@link 1576 * SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled}, at the feature level of this 1577 * NnApiSLDriver struct. 1578 */ 1579 bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_isCachingEnabled)( 1580 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1581 1582 /** 1583 * SL Driver implementation of {@link 1584 * SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed}. Behavior, arguments, and 1585 * outputs match NNAPI Runtime function {@link 1586 * SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed}, at the feature level of this 1587 * NnApiSLDriver struct. 1588 */ 1589 bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_isControlFlowUsed)( 1590 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1591 1592 /** 1593 * SL Driver implementation of {@link 1594 * SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed}. Behavior, arguments, and 1595 * outputs match NNAPI Runtime function {@link 1596 * SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed}, at the feature level of 1597 * this NnApiSLDriver struct. 1598 */ 1599 bool (*SL_ANeuralNetworksDiagnosticExecutionInfo_areDynamicTensorsUsed)( 1600 const ANeuralNetworksDiagnosticExecutionInfo* diagnosticExecutionInfo); 1601 1602 /** 1603 * SL Driver implementation of {@link SL_ANeuralNetworksDiagnostic_registerCallbacks}. Behavior, 1604 * arguments, and outputs match NNAPI Runtime function {@link 1605 * SL_ANeuralNetworksDiagnostic_registerCallbacks}, at the feature level of this NnApiSLDriver 1606 * struct. 1607 */ 1608 void (*SL_ANeuralNetworksDiagnostic_registerCallbacks)( 1609 ANeuralNetworksDiagnosticCompilationFinishedCallback compilationCallback, 1610 ANeuralNetworksDiagnosticExecutionFinishedCallback executionCallback, 1611 void* callbackContext); 1612 1613 } NnApiSLDriverImplFL5; 1614 1615 /** 1616 * NnApiSLDriverImpl for an Updatable SL Driver implementing {@link 1617 * ANEURALNETWORKS_FEATURE_LEVEL_6}. 1618 * 1619 * This struct must set its implFeatureLevel to {@link ANEURALNETWORKS_FEATURE_LEVEL_6}. 1620 * 1621 */ 1622 typedef struct NnApiSLDriverImplFL5 NnApiSLDriverImplFL6; 1623 1624 /** 1625 * NnApiSLDriverImpl for an Updatable SL Driver implementing {@link 1626 * ANEURALNETWORKS_FEATURE_LEVEL_7}. 1627 * 1628 * This struct must set its implFeatureLevel to {@link ANEURALNETWORKS_FEATURE_LEVEL_7}. 1629 * 1630 */ 1631 typedef NnApiSLDriverImplFL6 NnApiSLDriverImplFL7; 1632 1633 /** 1634 * NnApiSLDriverImpl for an Updatable SL Driver implementing {@link 1635 * ANEURALNETWORKS_FEATURE_LEVEL_8}. 1636 * 1637 * This struct must set its implFeatureLevel to {@link ANEURALNETWORKS_FEATURE_LEVEL_8}. 1638 */ 1639 typedef struct NnApiSLDriverImplFL8 { 1640 /** 1641 * Base type with version information. Allows to cast a pointer of this type 1642 * to NnApiSLDriverImpl* with valid version information. 1643 * For this type, its .version fields should be always set to {@link 1644 * ANEURALNETWORKS_FEATURE_LEVEL_8}. 1645 */ 1646 NnApiSLDriverImplFL5 base; 1647 1648 /** 1649 * SL Driver implementation of {@link ANeuralNetworksCompilation_addExtensionAttribute}. 1650 * Behavior, arguments, and outputs match NNAPI Runtime function {@link 1651 * ANeuralNetworksCompilation_addExtensionAttribute}, at the feature level of this NnApiSLDriver 1652 * struct. 1653 */ 1654 int (*ANeuralNetworksCompilation_addExtensionAttribute)(ANeuralNetworksCompilation* compilation, 1655 const char* extensionName, 1656 uint16_t attributeCodeWithinExtension, 1657 const void* data, size_t length); 1658 1659 /** 1660 * SL Driver implementation of {@link ANeuralNetworksExecution_addExtensionAttribute}. 1661 * Behavior, arguments, and outputs match NNAPI Runtime function {@link 1662 * ANeuralNetworksExecution_addExtensionAttribute}, at the feature level of this NnApiSLDriver 1663 * struct. 1664 */ 1665 int (*ANeuralNetworksExecution_addExtensionAttribute)(ANeuralNetworksExecution* execution, 1666 const char* extensionName, 1667 uint16_t attributeCodeWithinExtension, 1668 const void* data, size_t length); 1669 1670 } NnApiSLDriverImplFL8; 1671 1672 __END_DECLS 1673