1 /*
2  * Copyright (C) 2020 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_ADAPTER_ADAPTER_H
18 #define ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_ADAPTER_ADAPTER_H
19 
20 #include <android/hardware/neuralnetworks/1.3/IDevice.h>
21 #include <nnapi/IDevice.h>
22 #include <nnapi/Types.h>
23 #include <sys/types.h>
24 #include <functional>
25 #include <memory>
26 
27 // See hardware/interfaces/neuralnetworks/utils/README.md for more information on HIDL interface
28 // lifetimes across processes and for protecting asynchronous calls across HIDL.
29 
30 namespace android::hardware::neuralnetworks::adapter {
31 
32 /**
33  * A self-contained unit of work to be executed.
34  */
35 using Task = std::function<void()>;
36 
37 /**
38  * A type-erased executor which executes a task asynchronously.
39  *
40  * This executor is also provided with an Application ID (Android User ID) and an optional deadline
41  * for when the caller expects is the upper bound for the amount of time to complete the task.
42  */
43 using Executor = std::function<void(Task, uid_t, nn::OptionalTimePoint)>;
44 
45 /**
46  * Adapt an NNAPI canonical interface object to a HIDL NN HAL interface object.
47  *
48  * The IPreparedModel object created from IDevice::prepareModel or IDevice::preparedModelFromCache
49  * must return "const nn::Model*" from IPreparedModel::getUnderlyingResource().
50  *
51  * @param device NNAPI canonical IDevice interface object to be adapted.
52  * @param executor Type-erased executor to handle executing tasks asynchronously.
53  * @return HIDL NN HAL IDevice interface object.
54  */
55 sp<V1_3::IDevice> adapt(nn::SharedDevice device, Executor executor);
56 
57 /**
58  * Adapt an NNAPI canonical interface object to a HIDL NN HAL interface object.
59  *
60  * The IPreparedModel object created from IDevice::prepareModel or IDevice::preparedModelFromCache
61  * must return "const nn::Model*" from IPreparedModel::getUnderlyingResource().
62  *
63  * This function uses a default executor, which will execute tasks from a detached thread.
64  *
65  * @param device NNAPI canonical IDevice interface object to be adapted.
66  * @return HIDL NN HAL IDevice interface object.
67  */
68 sp<V1_3::IDevice> adapt(nn::SharedDevice device);
69 
70 }  // namespace android::hardware::neuralnetworks::adapter
71 
72 #endif  // ANDROID_HARDWARE_INTERFACES_NEURALNETWORKS_UTILS_ADAPTER_ADAPTER_H
73