1 /*
2  * Copyright (C) 2021 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "InvalidDevice"
18 
19 #include "InvalidDevice.h"
20 
21 #include <aidl/android/hardware/neuralnetworks/BnBuffer.h>
22 #include <aidl/android/hardware/neuralnetworks/BnDevice.h>
23 #include <aidl/android/hardware/neuralnetworks/BnPreparedModel.h>
24 #include <android/binder_auto_utils.h>
25 
26 #include "Conversions.h"
27 #include "Utils.h"
28 
29 #include <memory>
30 #include <string>
31 #include <utility>
32 #include <vector>
33 
34 namespace aidl::android::hardware::neuralnetworks {
35 namespace {
36 
toAStatus(ErrorStatus errorStatus,const std::string & errorMessage)37 ndk::ScopedAStatus toAStatus(ErrorStatus errorStatus, const std::string& errorMessage) {
38     if (errorStatus == ErrorStatus::NONE) {
39         return ndk::ScopedAStatus::ok();
40     }
41     return ndk::ScopedAStatus::fromServiceSpecificErrorWithMessage(
42             static_cast<int32_t>(errorStatus), errorMessage.c_str());
43 }
44 
45 }  // namespace
46 
create()47 std::shared_ptr<InvalidDevice> InvalidDevice::create() {
48     constexpr auto perf = PerformanceInfo{
49             .execTime = std::numeric_limits<float>::max(),
50             .powerUsage = std::numeric_limits<float>::max(),
51     };
52     auto capabilities = Capabilities{
53             .relaxedFloat32toFloat16PerformanceScalar = perf,
54             .relaxedFloat32toFloat16PerformanceTensor = perf,
55             .operandPerformance = {},
56             .ifPerformance = perf,
57             .whilePerformance = perf,
58     };
59     constexpr auto numberOfCacheFiles = NumberOfCacheFiles{
60             .numModelCache = 0,
61             .numDataCache = 0,
62     };
63     std::vector<Extension> extensions{};
64     constexpr auto deviceType = DeviceType::OTHER;
65     std::string versionString = "invalid";
66 
67     return ndk::SharedRefBase::make<InvalidDevice>(std::move(capabilities), numberOfCacheFiles,
68                                                    std::move(extensions), deviceType,
69                                                    std::move(versionString));
70 }
71 
InvalidDevice(Capabilities capabilities,const NumberOfCacheFiles & numberOfCacheFiles,std::vector<Extension> extensions,DeviceType deviceType,std::string versionString)72 InvalidDevice::InvalidDevice(Capabilities capabilities,
73                              const NumberOfCacheFiles& numberOfCacheFiles,
74                              std::vector<Extension> extensions, DeviceType deviceType,
75                              std::string versionString)
76     : kCapabilities(std::move(capabilities)),
77       kNumberOfCacheFiles(numberOfCacheFiles),
78       kExtensions(std::move(extensions)),
79       kDeviceType(deviceType),
80       kVersionString(std::move(versionString)) {}
81 
allocate(const BufferDesc &,const std::vector<IPreparedModelParcel> &,const std::vector<BufferRole> &,const std::vector<BufferRole> &,DeviceBuffer *)82 ndk::ScopedAStatus InvalidDevice::allocate(
83         const BufferDesc& /*desc*/, const std::vector<IPreparedModelParcel>& /*preparedModels*/,
84         const std::vector<BufferRole>& /*inputRoles*/,
85         const std::vector<BufferRole>& /*outputRoles*/, DeviceBuffer* /*deviceBuffer*/) {
86     return toAStatus(ErrorStatus::GENERAL_FAILURE, "InvalidDevice");
87 }
88 
getCapabilities(Capabilities * capabilities)89 ndk::ScopedAStatus InvalidDevice::getCapabilities(Capabilities* capabilities) {
90     *capabilities = kCapabilities;
91     return ndk::ScopedAStatus::ok();
92 }
93 
getNumberOfCacheFilesNeeded(NumberOfCacheFiles * numberOfCacheFiles)94 ndk::ScopedAStatus InvalidDevice::getNumberOfCacheFilesNeeded(
95         NumberOfCacheFiles* numberOfCacheFiles) {
96     *numberOfCacheFiles = kNumberOfCacheFiles;
97     return ndk::ScopedAStatus::ok();
98 }
99 
getSupportedExtensions(std::vector<Extension> * extensions)100 ndk::ScopedAStatus InvalidDevice::getSupportedExtensions(std::vector<Extension>* extensions) {
101     *extensions = kExtensions;
102     return ndk::ScopedAStatus::ok();
103 }
104 
getSupportedOperations(const Model & model,std::vector<bool> * supportedOperations)105 ndk::ScopedAStatus InvalidDevice::getSupportedOperations(const Model& model,
106                                                          std::vector<bool>* supportedOperations) {
107     if (const auto result = utils::validate(model); !result.ok()) {
108         return toAStatus(ErrorStatus::INVALID_ARGUMENT, result.error());
109     }
110     *supportedOperations = std::vector<bool>(model.main.operations.size(), false);
111     return ndk::ScopedAStatus::ok();
112 }
113 
getType(DeviceType * deviceType)114 ndk::ScopedAStatus InvalidDevice::getType(DeviceType* deviceType) {
115     *deviceType = kDeviceType;
116     return ndk::ScopedAStatus::ok();
117 }
118 
getVersionString(std::string * versionString)119 ndk::ScopedAStatus InvalidDevice::getVersionString(std::string* versionString) {
120     *versionString = kVersionString;
121     return ndk::ScopedAStatus::ok();
122 }
123 
prepareModel(const Model & model,ExecutionPreference preference,Priority priority,int64_t deadline,const std::vector<ndk::ScopedFileDescriptor> & modelCache,const std::vector<ndk::ScopedFileDescriptor> & dataCache,const std::vector<uint8_t> & token,const std::shared_ptr<IPreparedModelCallback> & callback)124 ndk::ScopedAStatus InvalidDevice::prepareModel(
125         const Model& model, ExecutionPreference preference, Priority priority, int64_t deadline,
126         const std::vector<ndk::ScopedFileDescriptor>& modelCache,
127         const std::vector<ndk::ScopedFileDescriptor>& dataCache, const std::vector<uint8_t>& token,
128         const std::shared_ptr<IPreparedModelCallback>& callback) {
129     if (callback.get() == nullptr) {
130         return toAStatus(ErrorStatus::INVALID_ARGUMENT,
131                          "invalid callback passed to InvalidDevice::prepareModel");
132     }
133     if (const auto result = utils::validate(model); !result.ok()) {
134         callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
135         return toAStatus(ErrorStatus::INVALID_ARGUMENT, result.error());
136     }
137     if (const auto result = utils::validate(preference); !result.ok()) {
138         callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
139         return toAStatus(ErrorStatus::INVALID_ARGUMENT, result.error());
140     }
141     if (const auto result = utils::validate(priority); !result.ok()) {
142         callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
143         return toAStatus(ErrorStatus::INVALID_ARGUMENT, result.error());
144     }
145     if (deadline < -1) {
146         callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
147         return toAStatus(ErrorStatus::INVALID_ARGUMENT,
148                          "Invalid deadline " + std::to_string(deadline));
149     }
150     if (modelCache.size() != static_cast<size_t>(kNumberOfCacheFiles.numModelCache)) {
151         callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
152         return toAStatus(ErrorStatus::INVALID_ARGUMENT,
153                          "Invalid modelCache, size = " + std::to_string(modelCache.size()));
154     }
155     if (dataCache.size() != static_cast<size_t>(kNumberOfCacheFiles.numDataCache)) {
156         callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
157         return toAStatus(ErrorStatus::INVALID_ARGUMENT,
158                          "Invalid modelCache, size = " + std::to_string(dataCache.size()));
159     }
160     if (token.size() != IDevice::BYTE_SIZE_OF_CACHE_TOKEN) {
161         callback->notify(ErrorStatus::INVALID_ARGUMENT, nullptr);
162         return toAStatus(
163                 ErrorStatus::INVALID_ARGUMENT,
164                 "Invalid cache token, size = " + std::to_string(IDevice::BYTE_SIZE_OF_CACHE_TOKEN));
165     }
166     callback->notify(ErrorStatus::GENERAL_FAILURE, nullptr);
167     return ndk::ScopedAStatus::ok();
168 }
169 
prepareModelFromCache(int64_t,const std::vector<ndk::ScopedFileDescriptor> &,const std::vector<ndk::ScopedFileDescriptor> &,const std::vector<uint8_t> &,const std::shared_ptr<IPreparedModelCallback> & callback)170 ndk::ScopedAStatus InvalidDevice::prepareModelFromCache(
171         int64_t /*deadline*/, const std::vector<ndk::ScopedFileDescriptor>& /*modelCache*/,
172         const std::vector<ndk::ScopedFileDescriptor>& /*dataCache*/,
173         const std::vector<uint8_t>& /*token*/,
174         const std::shared_ptr<IPreparedModelCallback>& callback) {
175     callback->notify(ErrorStatus::GENERAL_FAILURE, nullptr);
176     return toAStatus(ErrorStatus::GENERAL_FAILURE, "InvalidDevice");
177 }
178 
179 }  // namespace aidl::android::hardware::neuralnetworks
180