Jean-Luc Brouillet | 4fb1e85 | 2017-08-20 18:16:36 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2017 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #define LOG_TAG "Memory" |
| 18 | |
| 19 | #include "Memory.h" |
| 20 | |
Slava Shklyaev | c958cd8 | 2020-12-10 16:55:55 +0000 | [diff] [blame] | 21 | #include <CpuExecutor.h> |
Slava Shklyaev | c958cd8 | 2020-12-10 16:55:55 +0000 | [diff] [blame] | 22 | #include <LegacyUtils.h> |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 23 | #include <android-base/scopeguard.h> |
Michael Butler | c4c57d9 | 2021-02-11 20:09:15 -0800 | [diff] [blame] | 24 | #include <android/hardware_buffer.h> |
Michael Butler | 7d1ae27 | 2021-02-17 18:00:31 -0800 | [diff] [blame] | 25 | #include <nnapi/IBurst.h> |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 26 | #include <nnapi/SharedMemory.h> |
| 27 | #include <nnapi/TypeUtils.h> |
| 28 | #include <nnapi/Types.h> |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 29 | |
Xusong Wang | 062ec50 | 2019-11-27 11:44:03 -0800 | [diff] [blame] | 30 | #include <algorithm> |
Michael Butler | 90fddbd | 2019-08-02 15:04:00 -0700 | [diff] [blame] | 31 | #include <memory> |
Xusong Wang | 062ec50 | 2019-11-27 11:44:03 -0800 | [diff] [blame] | 32 | #include <set> |
| 33 | #include <tuple> |
Michael Butler | 90fddbd | 2019-08-02 15:04:00 -0700 | [diff] [blame] | 34 | #include <utility> |
Xusong Wang | 062ec50 | 2019-11-27 11:44:03 -0800 | [diff] [blame] | 35 | #include <vector> |
| 36 | |
| 37 | #include "CompilationBuilder.h" |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 38 | #include "Manager.h" |
Xusong Wang | 062ec50 | 2019-11-27 11:44:03 -0800 | [diff] [blame] | 39 | #include "TypeManager.h" |
Slava Shklyaev | c958cd8 | 2020-12-10 16:55:55 +0000 | [diff] [blame] | 40 | |
Jean-Luc Brouillet | 4fb1e85 | 2017-08-20 18:16:36 -0700 | [diff] [blame] | 41 | namespace android { |
| 42 | namespace nn { |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 43 | namespace { |
| 44 | |
| 45 | // The validator for a client-managed single-dimensional memory pool with a known size. |
| 46 | // The memory may be used for request inputs, request outputs, or model constants. |
| 47 | class SizedMemoryValidator : public MemoryValidatorBase { |
| 48 | public: |
Michael Butler | b3082a5 | 2021-02-07 00:10:23 -0800 | [diff] [blame] | 49 | explicit SizedMemoryValidator(uint32_t size) : kSize(size) {} |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 50 | |
| 51 | bool validate(const CompilationBuilder*, IOType, uint32_t, const ANeuralNetworksOperandType*, |
| 52 | uint32_t offset, uint32_t length) const override { |
| 53 | NN_RET_CHECK(offset + length <= kSize) << "request size larger than the memory size."; |
| 54 | NN_RET_CHECK(offset != 0 || length != 0) << "memory size cannot be implied."; |
| 55 | return true; |
| 56 | } |
| 57 | |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 58 | Metadata getMetadata() const override { return {.logicalSize = kSize}; } |
| 59 | bool updateMetadata(const Metadata& metadata) override { |
| 60 | return metadata.logicalSize == 0 || metadata.logicalSize == kSize; |
| 61 | } |
| 62 | |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 63 | private: |
| 64 | const uint32_t kSize; |
| 65 | }; |
| 66 | |
| 67 | // The validator for an AHardwareBuffer with Non-BLOB format. |
| 68 | // We require the memory only used for request inputs or request outputs, |
| 69 | // with both offset and length set to zero. |
| 70 | class AHardwareBufferNonBlobValidator : public MemoryValidatorBase { |
| 71 | public: |
| 72 | AHardwareBufferNonBlobValidator() = default; |
| 73 | |
| 74 | bool validate(const CompilationBuilder* compilation, IOType, uint32_t, |
| 75 | const ANeuralNetworksOperandType*, uint32_t offset, |
| 76 | uint32_t length) const override { |
| 77 | NN_RET_CHECK(compilation != nullptr) |
| 78 | << "cannot use Non-BLOB AHardwareBuffer as model constant"; |
| 79 | NN_RET_CHECK(offset == 0 && length == 0) |
| 80 | << "non-zero offset (" << offset << ") and/or length (" << length |
| 81 | << ") for Non-BLOB format AHardwareBuffer."; |
| 82 | return true; |
| 83 | } |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 84 | |
| 85 | Metadata getMetadata() const override { return {}; } |
| 86 | bool updateMetadata(const Metadata&) override { return true; } |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 87 | }; |
| 88 | |
| 89 | // The validator for a memory created from ANNMemory_createFromDesc. |
| 90 | // We require the memory only used as one of the pre-specified roles, |
| 91 | // with both offset and length set to zero. |
| 92 | class DeviceMemoryValidator : public MemoryValidatorBase { |
| 93 | public: |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 94 | DeviceMemoryValidator(std::set<CompilationRole> roles, Operand operand, |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 95 | std::vector<uint32_t> dimensions) |
| 96 | : kCompilationRoles(std::move(roles)), |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 97 | kOperand(std::move(operand)), |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 98 | kInitialDimensions(std::move(dimensions)), |
| 99 | mUpdatedDimensions(kInitialDimensions) {} |
| 100 | |
| 101 | bool validate(const CompilationBuilder* compilation, IOType ioType, uint32_t index, |
| 102 | const ANeuralNetworksOperandType* type, uint32_t offset, |
| 103 | uint32_t length) const override { |
| 104 | NN_RET_CHECK(kCompilationRoles.count({compilation, ioType, index}) > 0) |
| 105 | << "invalid compilation role."; |
| 106 | NN_RET_CHECK(offset == 0 && length == 0) |
| 107 | << "non-zero offset and/or length for driver-allocated memory."; |
| 108 | if (type) { |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 109 | const bool isTensor = TypeManager::get()->isTensorType(kOperand.type); |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 110 | NN_RET_CHECK(isTensor || type->dimensionCount == 0) |
| 111 | << "invalid dimensions for scalar memory."; |
| 112 | std::vector<uint32_t> dimensions(type->dimensions, |
| 113 | type->dimensions + type->dimensionCount); |
| 114 | // We only check against kInitialDimensions here. |
| 115 | // For input memories, mUpdatedDimensions will be checked in validateInputDimensions |
| 116 | // at the beginning of a computation. |
| 117 | const auto combined = combineDimensions(dimensions, kInitialDimensions); |
| 118 | NN_RET_CHECK(combined.has_value()) |
| 119 | << "incompatible dimensions between request and memory. (request: " |
| 120 | << toString(dimensions) << ", memory: " << toString(kInitialDimensions) << ")"; |
| 121 | } |
| 122 | return true; |
| 123 | } |
| 124 | |
| 125 | bool validateInputDimensions(const std::vector<uint32_t>& dimensions) const override { |
| 126 | NN_RET_CHECK(mInitialized) << "using an uninitialized memory as input"; |
| 127 | NN_RET_CHECK(dimensions == mUpdatedDimensions) |
| 128 | << "incompatible input dimensions between request and memory. (request: " |
| 129 | << toString(dimensions) << ", memory: " << toString(mUpdatedDimensions) << ")"; |
| 130 | return true; |
| 131 | } |
| 132 | |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 133 | Metadata getMetadata() const override { |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 134 | return {.logicalSize = TypeManager::get()->getSizeOfData(kOperand.type, mUpdatedDimensions), |
| 135 | .dimensions = mUpdatedDimensions, |
| 136 | .operand = kOperand}; |
| 137 | } |
| 138 | |
| 139 | bool updateMetadata(const Metadata& metadata) override { |
| 140 | NN_RET_CHECK(!metadata.operand.has_value() || |
| 141 | (metadata.operand->type == kOperand.type && |
| 142 | metadata.operand->scale == kOperand.scale && |
| 143 | metadata.operand->zeroPoint == kOperand.zeroPoint && |
| 144 | metadata.operand->extraParams == kOperand.extraParams)); |
| 145 | |
| 146 | NN_RET_CHECK(metadata.dimensions.empty() || |
| 147 | TypeManager::get()->isTensorType(kOperand.type)); |
| 148 | auto combined = combineDimensions(metadata.dimensions, kInitialDimensions); |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 149 | NN_RET_CHECK(combined.has_value()); |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 150 | NN_RET_CHECK(metadata.logicalSize == 0 || |
| 151 | metadata.logicalSize == |
| 152 | TypeManager::get()->getSizeOfData(kOperand.type, combined.value())); |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 153 | mUpdatedDimensions = std::move(combined.value()); |
| 154 | return true; |
| 155 | } |
| 156 | |
Xusong Wang | b3f9c62 | 2020-02-20 12:37:51 -0800 | [diff] [blame] | 157 | bool createdWithUnknownShape() const override { |
| 158 | return TypeManager::get()->getSizeOfData(kOperand.type, kInitialDimensions) == 0; |
| 159 | } |
| 160 | |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 161 | void setInitialized(bool initialized) override { mInitialized = initialized; } |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 162 | bool isInitialized() const override { return mInitialized; } |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 163 | |
| 164 | private: |
| 165 | const std::set<CompilationRole> kCompilationRoles; |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 166 | |
| 167 | // Keep track of the data type, scale, zero point, and extra parameters of the target operand. |
| 168 | // Other fields will be ignored, including dimensions, lifetime, location, etc. |
| 169 | const Operand kOperand; |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 170 | |
| 171 | // The dimensions of the memory when the memory object is created. |
| 172 | // May have unknown dimensions or rank. |
| 173 | const std::vector<uint32_t> kInitialDimensions; |
| 174 | |
| 175 | // The updated dimensions after a successful execution or memory copying. |
| 176 | std::vector<uint32_t> mUpdatedDimensions; |
| 177 | |
| 178 | bool mInitialized = false; |
| 179 | }; |
| 180 | |
| 181 | } // namespace |
| 182 | |
Michael Butler | b3082a5 | 2021-02-07 00:10:23 -0800 | [diff] [blame] | 183 | RuntimeMemory::RuntimeMemory(SharedMemory memory) : kMemory(std::move(memory)) { |
| 184 | CHECK(kMemory != nullptr); |
Michael Butler | 81550a9 | 2021-03-25 15:28:52 -0700 | [diff] [blame] | 185 | mValidator = std::make_unique<SizedMemoryValidator>(nn::getSize(kMemory)); |
Michael Butler | b3082a5 | 2021-02-07 00:10:23 -0800 | [diff] [blame] | 186 | } |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 187 | |
Michael Butler | b3082a5 | 2021-02-07 00:10:23 -0800 | [diff] [blame] | 188 | RuntimeMemory::RuntimeMemory(SharedMemory memory, std::unique_ptr<MemoryValidatorBase> validator) |
| 189 | : kMemory(std::move(memory)), mValidator(std::move(validator)) { |
| 190 | CHECK(kMemory != nullptr); |
| 191 | } |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 192 | |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 193 | RuntimeMemory::RuntimeMemory(SharedBuffer buffer) : kBuffer(std::move(buffer)) {} |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 194 | |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 195 | Request::MemoryPool RuntimeMemory::getMemoryPool() const { |
| 196 | if (kBuffer != nullptr) { |
| 197 | return kBuffer->getToken(); |
Xusong Wang | 085d000 | 2020-01-08 16:52:37 -0800 | [diff] [blame] | 198 | } |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 199 | return kMemory; |
Xusong Wang | 085d000 | 2020-01-08 16:52:37 -0800 | [diff] [blame] | 200 | } |
| 201 | |
Slava Shklyaev | 9f29f43 | 2020-08-13 13:16:03 +0100 | [diff] [blame] | 202 | std::optional<RunTimePoolInfo> RuntimeMemory::getRunTimePoolInfo() const { |
Xusong Wang | 13df203 | 2020-02-06 18:24:01 -0800 | [diff] [blame] | 203 | std::lock_guard<std::mutex> guard(mMutex); |
| 204 | if (!mHasCachedRunTimePoolInfo) { |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 205 | mCachedRunTimePoolInfo = RunTimePoolInfo::createFromMemory(kMemory); |
Xusong Wang | 13df203 | 2020-02-06 18:24:01 -0800 | [diff] [blame] | 206 | mHasCachedRunTimePoolInfo = true; |
| 207 | } |
| 208 | return mCachedRunTimePoolInfo; |
Slava Shklyaev | 342358d | 2020-03-05 18:02:19 +0000 | [diff] [blame] | 209 | } |
| 210 | |
Michael Butler | 7d1ae27 | 2021-02-17 18:00:31 -0800 | [diff] [blame] | 211 | void RuntimeMemory::hold(const IBurst::OptionalCacheHold& cacheHold) const { |
| 212 | if (cacheHold != nullptr) { |
| 213 | std::lock_guard<std::mutex> guard(mMutex); |
| 214 | mHold.insert(cacheHold); |
| 215 | } |
Michael Butler | 50032c0 | 2019-03-14 17:34:48 -0700 | [diff] [blame] | 216 | } |
| 217 | |
Xusong Wang | 13df203 | 2020-02-06 18:24:01 -0800 | [diff] [blame] | 218 | static int copyHidlMemories(const std::optional<RunTimePoolInfo>& src, |
| 219 | const std::optional<RunTimePoolInfo>& dst) { |
| 220 | if (!src.has_value() || !dst.has_value()) { |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 221 | LOG(ERROR) << "ANeuralNetworksMemory_copy -- unable to map memory"; |
| 222 | return ANEURALNETWORKS_UNMAPPABLE; |
| 223 | } |
Xusong Wang | 13df203 | 2020-02-06 18:24:01 -0800 | [diff] [blame] | 224 | if (src->getSize() != dst->getSize()) { |
| 225 | LOG(ERROR) << "ANeuralNetworksMemory_copy -- incompatible memory size"; |
| 226 | return ANEURALNETWORKS_BAD_DATA; |
| 227 | } |
| 228 | CHECK(src->getBuffer() != nullptr); |
| 229 | CHECK(dst->getBuffer() != nullptr); |
| 230 | std::copy(src->getBuffer(), src->getBuffer() + src->getSize(), dst->getBuffer()); |
| 231 | dst->flush(); |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 232 | return ANEURALNETWORKS_NO_ERROR; |
| 233 | } |
| 234 | |
Michael Butler | b3082a5 | 2021-02-07 00:10:23 -0800 | [diff] [blame] | 235 | int copyIBufferToMemory(const SharedBuffer& src, const SharedMemory& dst) { |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 236 | const auto ret = src->copyTo(dst); |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 237 | if (!ret.has_value()) { |
| 238 | LOG(ERROR) << "ANeuralNetworksMemory_copy failure: " << ret.error().message; |
| 239 | return convertErrorStatusToResultCode(ret.error().code); |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 240 | } |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 241 | return ANEURALNETWORKS_NO_ERROR; |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 242 | } |
| 243 | |
Michael Butler | b3082a5 | 2021-02-07 00:10:23 -0800 | [diff] [blame] | 244 | int copyMemoryToIBuffer(const SharedMemory& src, const SharedBuffer& dst, |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 245 | const std::vector<uint32_t>& dimensions) { |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 246 | const auto ret = dst->copyFrom(src, dimensions); |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 247 | if (!ret.has_value()) { |
| 248 | LOG(ERROR) << "ANeuralNetworksMemory_copy failure: " << ret.error().message; |
| 249 | return convertErrorStatusToResultCode(ret.error().code); |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 250 | } |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 251 | return ANEURALNETWORKS_NO_ERROR; |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 252 | } |
| 253 | |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 254 | static int copyIBuffers(const SharedBuffer& src, const SharedBuffer& dst, |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 255 | const MemoryValidatorBase::Metadata& srcMetadata) { |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 256 | const auto [n, memoryAHWB] = MemoryRuntimeAHWB::create(srcMetadata.logicalSize); |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 257 | NN_RETURN_IF_ERROR(n); |
Michael Butler | b3082a5 | 2021-02-07 00:10:23 -0800 | [diff] [blame] | 258 | const SharedMemory& memory = memoryAHWB->getMemory(); |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 259 | if (!validate(memory).ok()) return ANEURALNETWORKS_OUT_OF_MEMORY; |
| 260 | NN_RETURN_IF_ERROR(copyIBufferToMemory(src, memory)); |
| 261 | NN_RETURN_IF_ERROR(copyMemoryToIBuffer(memory, dst, srcMetadata.dimensions)); |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 262 | return ANEURALNETWORKS_NO_ERROR; |
| 263 | } |
| 264 | |
Slava Shklyaev | 9f29f43 | 2020-08-13 13:16:03 +0100 | [diff] [blame] | 265 | static int copyInternal(const RuntimeMemory& src, const RuntimeMemory& dst) { |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 266 | if (&src == &dst) return ANEURALNETWORKS_NO_ERROR; |
| 267 | |
| 268 | if (!src.getValidator().isInitialized()) { |
| 269 | LOG(ERROR) << "ANeuralNetworksMemory_copy -- uninitialized source memory"; |
| 270 | return ANEURALNETWORKS_BAD_DATA; |
| 271 | } |
| 272 | |
| 273 | const auto srcMetadata = src.getValidator().getMetadata(); |
| 274 | if (!dst.getValidator().updateMetadata(srcMetadata)) { |
| 275 | LOG(ERROR) << "ANeuralNetworksMemory_copy -- incompatible memories"; |
| 276 | return ANEURALNETWORKS_BAD_DATA; |
| 277 | } |
| 278 | |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 279 | bool srcHasMemory = validate(src.getMemory()).ok(); |
| 280 | bool dstHasMemory = validate(dst.getMemory()).ok(); |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 281 | bool srcHasIBuffer = src.getIBuffer() != nullptr; |
| 282 | bool dstHasIBuffer = dst.getIBuffer() != nullptr; |
| 283 | if (srcHasIBuffer && dstHasIBuffer) { |
| 284 | return copyIBuffers(src.getIBuffer(), dst.getIBuffer(), srcMetadata); |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 285 | } else if (srcHasMemory && dstHasMemory) { |
Xusong Wang | 13df203 | 2020-02-06 18:24:01 -0800 | [diff] [blame] | 286 | return copyHidlMemories(src.getRunTimePoolInfo(), dst.getRunTimePoolInfo()); |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 287 | } else if (srcHasMemory && dstHasIBuffer) { |
| 288 | return copyMemoryToIBuffer(src.getMemory(), dst.getIBuffer(), srcMetadata.dimensions); |
| 289 | } else if (srcHasIBuffer && dstHasMemory) { |
| 290 | return copyIBufferToMemory(src.getIBuffer(), dst.getMemory()); |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 291 | } |
| 292 | return ANEURALNETWORKS_OP_FAILED; |
| 293 | } |
| 294 | |
Slava Shklyaev | 9f29f43 | 2020-08-13 13:16:03 +0100 | [diff] [blame] | 295 | int RuntimeMemory::copy(const RuntimeMemory& src, const RuntimeMemory& dst) { |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 296 | int n = copyInternal(src, dst); |
| 297 | dst.getValidator().setInitialized(n == ANEURALNETWORKS_NO_ERROR); |
| 298 | return n; |
| 299 | } |
| 300 | |
Xusong Wang | 062ec50 | 2019-11-27 11:44:03 -0800 | [diff] [blame] | 301 | bool MemoryBuilder::badState(const char* name) const { |
| 302 | if (mFinished) { |
| 303 | LOG(ERROR) << "ANeuralNetworksMemoryDesc_" << name << " can't modify after finished"; |
| 304 | return true; |
| 305 | } |
| 306 | return false; |
| 307 | } |
| 308 | |
| 309 | int MemoryBuilder::addRole(const CompilationBuilder& compilation, IOType ioType, uint32_t index, |
Xusong Wang | 13bed55 | 2021-03-19 14:10:18 -0700 | [diff] [blame] | 310 | float prob) { |
Xusong Wang | 062ec50 | 2019-11-27 11:44:03 -0800 | [diff] [blame] | 311 | const char* tag = ioType == IOType::INPUT ? "addInputRole" : "addOutputRole"; |
| 312 | if (badState(tag)) { |
| 313 | return ANEURALNETWORKS_BAD_STATE; |
| 314 | } |
| 315 | if (mRoles.count({&compilation, ioType, index}) > 0) { |
| 316 | LOG(ERROR) << "ANeuralNetworksMemoryDesc_" << tag |
| 317 | << " -- the same operand is specified twice."; |
| 318 | return ANEURALNETWORKS_BAD_DATA; |
| 319 | } |
| 320 | |
Slava Shklyaev | 9f29f43 | 2020-08-13 13:16:03 +0100 | [diff] [blame] | 321 | std::vector<std::tuple<const RuntimePreparedModel*, IOType, uint32_t>> roles; |
Xusong Wang | 062ec50 | 2019-11-27 11:44:03 -0800 | [diff] [blame] | 322 | auto callback = [&roles](const auto* preparedModel, IOType type, uint32_t index) { |
| 323 | roles.emplace_back(preparedModel, type, index); |
| 324 | }; |
| 325 | if (ioType == IOType::INPUT) { |
| 326 | if (compilation.forEachStepRoleOfInput(index, callback) != ANEURALNETWORKS_NO_ERROR) { |
| 327 | return ANEURALNETWORKS_BAD_DATA; |
| 328 | } |
| 329 | } else { |
| 330 | if (compilation.forEachStepRoleOfOutput(index, callback) != ANEURALNETWORKS_NO_ERROR) { |
| 331 | return ANEURALNETWORKS_BAD_DATA; |
| 332 | } |
| 333 | } |
| 334 | |
| 335 | const ModelBuilder* model = compilation.getModel(); |
| 336 | CHECK(model != nullptr); |
| 337 | Operand operand; |
| 338 | if (ioType == IOType::INPUT) { |
| 339 | if (index >= model->inputCount()) { |
| 340 | LOG(ERROR) << "ANeuralNetworksMemoryDesc_addInputRole -- input index out of range."; |
| 341 | return ANEURALNETWORKS_BAD_DATA; |
| 342 | } |
| 343 | operand = model->getInputOperand(index); |
| 344 | } else { |
| 345 | if (index >= model->outputCount()) { |
| 346 | LOG(ERROR) << "ANeuralNetworksMemoryDesc_addOutputRole -- output index out of range."; |
| 347 | return ANEURALNETWORKS_BAD_DATA; |
| 348 | } |
| 349 | operand = model->getOutputOperand(index); |
| 350 | } |
| 351 | if (mOperand.has_value()) { |
| 352 | if (operand.type != mOperand->type || operand.scale != mOperand->scale || |
| 353 | operand.zeroPoint != mOperand->zeroPoint || |
| 354 | operand.extraParams != mOperand->extraParams) { |
| 355 | LOG(ERROR) << "ANeuralNetworksMemoryDesc_" << tag |
| 356 | << " -- incompatible operand metadata."; |
| 357 | return ANEURALNETWORKS_BAD_DATA; |
| 358 | } |
| 359 | } |
| 360 | if (!TypeManager::get()->isTensorType(operand.type) && !mDesc.dimensions.empty()) { |
| 361 | LOG(ERROR) << "ANeuralNetworksMemoryDesc_" << tag << " -- incompatible dimensions."; |
| 362 | return ANEURALNETWORKS_BAD_DATA; |
| 363 | } |
| 364 | auto combined = combineDimensions(mDesc.dimensions, operand.dimensions); |
| 365 | if (!combined.has_value()) { |
| 366 | LOG(ERROR) << "ANeuralNetworksMemoryDesc_" << tag << " -- incompatible dimensions."; |
| 367 | return ANEURALNETWORKS_BAD_DATA; |
| 368 | } |
| 369 | |
Xusong Wang | 13bed55 | 2021-03-19 14:10:18 -0700 | [diff] [blame] | 370 | if (prob > 1.0f || prob <= 0.0f) { |
| 371 | LOG(ERROR) << "ANeuralNetworksMemoryDesc_" << tag << " -- invalid frequency " << prob; |
Xusong Wang | 062ec50 | 2019-11-27 11:44:03 -0800 | [diff] [blame] | 372 | return ANEURALNETWORKS_BAD_DATA; |
| 373 | } |
| 374 | |
| 375 | mRoles.emplace(&compilation, ioType, index); |
Stuart Langley | 79f8042 | 2020-06-12 16:07:16 +1000 | [diff] [blame] | 376 | for (const auto& [preparedModel, type, ind] : roles) { |
Xusong Wang | 062ec50 | 2019-11-27 11:44:03 -0800 | [diff] [blame] | 377 | uint32_t modelIndex = mDesc.preparedModels.add(preparedModel); |
Xusong Wang | 13bed55 | 2021-03-19 14:10:18 -0700 | [diff] [blame] | 378 | BufferRole role = {.modelIndex = modelIndex, .ioIndex = ind, .probability = prob}; |
Xusong Wang | 062ec50 | 2019-11-27 11:44:03 -0800 | [diff] [blame] | 379 | if (type == IOType::INPUT) { |
| 380 | mDesc.inputRoles.push_back(role); |
| 381 | } else { |
| 382 | mDesc.outputRoles.push_back(role); |
| 383 | } |
| 384 | } |
| 385 | mOperand = std::move(operand); |
| 386 | mDesc.dimensions = std::move(combined.value()); |
| 387 | return ANEURALNETWORKS_NO_ERROR; |
| 388 | } |
| 389 | |
| 390 | int MemoryBuilder::setDimensions(const std::vector<uint32_t>& dimensions) { |
| 391 | if (badState("setDimensions")) return ANEURALNETWORKS_BAD_STATE; |
| 392 | if (mOperand.has_value() && !TypeManager::get()->isTensorType(mOperand->type) && |
| 393 | !dimensions.empty()) { |
| 394 | LOG(ERROR) << "ANeuralNetworksMemoryDesc_setDimensions -- incompatible dimensions for " |
| 395 | "scalars."; |
| 396 | return ANEURALNETWORKS_BAD_DATA; |
| 397 | } |
| 398 | auto combined = combineDimensions(mDesc.dimensions, dimensions); |
| 399 | if (!combined.has_value()) { |
| 400 | LOG(ERROR) << "ANeuralNetworksMemoryDesc_setDimensions -- incompatible dimensions."; |
| 401 | return ANEURALNETWORKS_BAD_DATA; |
| 402 | } |
| 403 | mDesc.dimensions = std::move(combined.value()); |
| 404 | return ANEURALNETWORKS_NO_ERROR; |
| 405 | } |
| 406 | |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 407 | static void logMemoryDescriptorToInfo(const MemoryDescriptor& desc, const Operand& operand) { |
| 408 | LOG(INFO) << "MemoryDescriptor start"; |
Slava Shklyaev | 9f29f43 | 2020-08-13 13:16:03 +0100 | [diff] [blame] | 409 | LOG(INFO) << " Data type: " << operand.type; |
| 410 | LOG(INFO) << " Scale: " << operand.scale; |
| 411 | LOG(INFO) << " Zero point: " << operand.zeroPoint; |
| 412 | LOG(INFO) << " Extra params: " << operand.extraParams; |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 413 | LOG(INFO) << " Dimensions: " << toString(desc.dimensions); |
Slava Shklyaev | c874ca9 | 2020-03-03 11:59:41 +0000 | [diff] [blame] | 414 | LOG(INFO) << " Prepared models [" << desc.preparedModels.size() << "]:"; |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 415 | for (const auto* preparedModel : desc.preparedModels) { |
| 416 | LOG(INFO) << " service = " << preparedModel->getDevice()->getName(); |
| 417 | } |
| 418 | LOG(INFO) << " Input roles [" << desc.inputRoles.size() << "]:"; |
| 419 | for (const auto& usage : desc.inputRoles) { |
Slava Shklyaev | 9f29f43 | 2020-08-13 13:16:03 +0100 | [diff] [blame] | 420 | LOG(INFO) << " " << usage; |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 421 | } |
| 422 | LOG(INFO) << " Output roles [" << desc.outputRoles.size() << "]:"; |
| 423 | for (const auto& usage : desc.outputRoles) { |
Slava Shklyaev | 9f29f43 | 2020-08-13 13:16:03 +0100 | [diff] [blame] | 424 | LOG(INFO) << " " << usage; |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 425 | } |
| 426 | LOG(INFO) << "MemoryDescriptor end"; |
| 427 | } |
| 428 | |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 429 | static std::set<const Device*> getDevices(const MemoryDescriptor& desc) { |
| 430 | std::set<const Device*> devices; |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 431 | for (const auto* preparedModel : desc.preparedModels) { |
| 432 | const auto* device = preparedModel->getDevice(); |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 433 | devices.insert(device); |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 434 | } |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 435 | return devices; |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 436 | } |
| 437 | |
Xusong Wang | 062ec50 | 2019-11-27 11:44:03 -0800 | [diff] [blame] | 438 | int MemoryBuilder::finish() { |
| 439 | if (badState("finish")) return ANEURALNETWORKS_BAD_STATE; |
| 440 | if (mRoles.empty()) { |
| 441 | LOG(ERROR) << "ANeuralNetworksMemoryDesc_finish -- no role has been specified."; |
| 442 | return ANEURALNETWORKS_BAD_DATA; |
| 443 | } |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 444 | CHECK(mOperand.has_value()); |
| 445 | if (VLOG_IS_ON(MEMORY)) { |
| 446 | logMemoryDescriptorToInfo(mDesc, mOperand.value()); |
| 447 | } |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 448 | std::set<const Device*> devices = getDevices(mDesc); |
Slava Shklyaev | c874ca9 | 2020-03-03 11:59:41 +0000 | [diff] [blame] | 449 | if (devices.empty()) { |
| 450 | // This can happen with interpreted control flow. |
| 451 | mAllocator = nullptr; |
| 452 | } else if (devices.size() == 1) { |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 453 | mAllocator = *devices.begin(); |
| 454 | VLOG(MEMORY) << "Using " << mAllocator->getName() << " as allocator."; |
| 455 | } else { |
| 456 | LOG(INFO) << "MemoryBuilder::finish -- cannot handle multiple devices."; |
| 457 | mAllocator = nullptr; |
| 458 | } |
| 459 | mSupportsAhwb = std::all_of(devices.begin(), devices.end(), [](const auto* device) { |
Przemysław Szczepaniak | a6c206a | 2021-01-13 18:03:15 +0000 | [diff] [blame] | 460 | return device->getFeatureLevel() >= kHalVersionV1_3ToApi.featureLevel; |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 461 | }); |
Xusong Wang | 4dce166 | 2020-02-06 15:14:59 -0800 | [diff] [blame] | 462 | mShouldFallback = std::none_of(mRoles.begin(), mRoles.end(), [](const auto& role) { |
| 463 | const auto* cb = std::get<const CompilationBuilder*>(role); |
| 464 | return cb->createdWithExplicitDeviceList(); |
| 465 | }); |
Xusong Wang | 7ad067b | 2020-04-09 14:52:23 -0700 | [diff] [blame] | 466 | const uint32_t size = TypeManager::get()->getSizeOfData(mOperand->type, mDesc.dimensions); |
| 467 | mShouldFallback &= (size != 0); |
Xusong Wang | 062ec50 | 2019-11-27 11:44:03 -0800 | [diff] [blame] | 468 | mFinished = true; |
| 469 | return ANEURALNETWORKS_NO_ERROR; |
| 470 | } |
| 471 | |
Slava Shklyaev | 9f29f43 | 2020-08-13 13:16:03 +0100 | [diff] [blame] | 472 | std::pair<int, std::unique_ptr<RuntimeMemory>> MemoryBuilder::allocate() const { |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 473 | if (!mFinished) { |
| 474 | LOG(ERROR) << "ANeuralNetworksMemory_createFromDesc -- passed an unfinished descriptor"; |
| 475 | return {ANEURALNETWORKS_BAD_STATE, nullptr}; |
| 476 | } |
| 477 | |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 478 | int n = ANEURALNETWORKS_OP_FAILED; |
Slava Shklyaev | 9f29f43 | 2020-08-13 13:16:03 +0100 | [diff] [blame] | 479 | std::unique_ptr<RuntimeMemory> memory; |
Xusong Wang | 7ad067b | 2020-04-09 14:52:23 -0700 | [diff] [blame] | 480 | CHECK(mOperand.has_value()); |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 481 | |
| 482 | // Try allocate the memory on device. |
| 483 | if (mAllocator != nullptr) { |
Xusong Wang | 4dce166 | 2020-02-06 15:14:59 -0800 | [diff] [blame] | 484 | std::tie(n, memory) = mAllocator->allocate(mDesc, mOperand->type); |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 485 | } |
| 486 | |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 487 | // If failed, fallback to ashmem or BLOB mode AHWB. |
Xusong Wang | 4dce166 | 2020-02-06 15:14:59 -0800 | [diff] [blame] | 488 | if (n != ANEURALNETWORKS_NO_ERROR && mShouldFallback) { |
Xusong Wang | 7ad067b | 2020-04-09 14:52:23 -0700 | [diff] [blame] | 489 | const uint32_t size = TypeManager::get()->getSizeOfData(mOperand->type, mDesc.dimensions); |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 490 | if (mSupportsAhwb) { |
| 491 | VLOG(MEMORY) << "MemoryBuilder::allocate -- fallback to BLOB mode AHWB."; |
| 492 | std::tie(n, memory) = MemoryRuntimeAHWB::create(size); |
| 493 | } else { |
| 494 | VLOG(MEMORY) << "MemoryBuilder::allocate -- fallback to ashmem."; |
| 495 | std::tie(n, memory) = MemoryAshmem::create(size); |
| 496 | } |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 497 | } |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 498 | |
| 499 | if (n == ANEURALNETWORKS_NO_ERROR) { |
| 500 | CHECK(memory != nullptr); |
| 501 | auto validator = |
Xusong Wang | 52b860b | 2019-11-27 16:23:36 -0800 | [diff] [blame] | 502 | std::make_unique<DeviceMemoryValidator>(mRoles, mOperand.value(), mDesc.dimensions); |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 503 | memory->setValidator(std::move(validator)); |
| 504 | } |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 505 | return {n, std::move(memory)}; |
| 506 | } |
| 507 | |
Michael Butler | 90fddbd | 2019-08-02 15:04:00 -0700 | [diff] [blame] | 508 | std::pair<int, std::unique_ptr<MemoryAshmem>> MemoryAshmem::create(uint32_t size) { |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 509 | auto memory = createSharedMemory(size); |
| 510 | if (!memory.has_value()) { |
| 511 | LOG(ERROR) << "RuntimeMemory::create() failed: " << memory.error().message; |
| 512 | return {convertErrorStatusToResultCode(memory.error().code), nullptr}; |
| 513 | } |
| 514 | auto mapping = map(memory.value()); |
| 515 | if (!mapping.has_value()) { |
| 516 | LOG(ERROR) << "RuntimeMemory::create() map failed: " << mapping.error().message; |
| 517 | return {convertErrorStatusToResultCode(mapping.error().code), nullptr}; |
David Gross | f9a33a8 | 2017-11-22 11:41:55 -0800 | [diff] [blame] | 518 | } |
Michael Butler | 90fddbd | 2019-08-02 15:04:00 -0700 | [diff] [blame] | 519 | return {ANEURALNETWORKS_NO_ERROR, |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 520 | std::make_unique<MemoryAshmem>(std::move(memory).value(), std::move(mapping).value())}; |
Jean-Luc Brouillet | d409e2c | 2017-09-27 23:59:20 -0700 | [diff] [blame] | 521 | } |
| 522 | |
Michael Butler | 90fddbd | 2019-08-02 15:04:00 -0700 | [diff] [blame] | 523 | uint8_t* MemoryAshmem::getPointer() const { |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 524 | return static_cast<uint8_t*>(std::get<void*>(kMapping.pointer)); |
Michael Butler | 90fddbd | 2019-08-02 15:04:00 -0700 | [diff] [blame] | 525 | } |
| 526 | |
Michael Butler | b3082a5 | 2021-02-07 00:10:23 -0800 | [diff] [blame] | 527 | MemoryAshmem::MemoryAshmem(SharedMemory memory, Mapping mapping) |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 528 | : RuntimeMemory(std::move(memory)), kMapping(std::move(mapping)) {} |
Michael Butler | 90fddbd | 2019-08-02 15:04:00 -0700 | [diff] [blame] | 529 | |
| 530 | std::pair<int, std::unique_ptr<MemoryFd>> MemoryFd::create(size_t size, int prot, int fd, |
| 531 | size_t offset) { |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 532 | auto memory = createSharedMemoryFromFd(size, prot, fd, offset); |
| 533 | if (!memory.has_value()) { |
| 534 | LOG(ERROR) << "Failed to create memory from fd: " << memory.error().message; |
| 535 | return {convertErrorStatusToResultCode(memory.error().code), nullptr}; |
Jean-Luc Brouillet | d409e2c | 2017-09-27 23:59:20 -0700 | [diff] [blame] | 536 | } |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 537 | return {ANEURALNETWORKS_NO_ERROR, std::make_unique<MemoryFd>(std::move(memory).value())}; |
Jean-Luc Brouillet | d409e2c | 2017-09-27 23:59:20 -0700 | [diff] [blame] | 538 | } |
| 539 | |
Michael Butler | b3082a5 | 2021-02-07 00:10:23 -0800 | [diff] [blame] | 540 | MemoryFd::MemoryFd(SharedMemory memory) : RuntimeMemory(std::move(memory)) {} |
David Gross | f9a33a8 | 2017-11-22 11:41:55 -0800 | [diff] [blame] | 541 | |
Michael Butler | 90fddbd | 2019-08-02 15:04:00 -0700 | [diff] [blame] | 542 | std::pair<int, std::unique_ptr<MemoryAHWB>> MemoryAHWB::create(const AHardwareBuffer& ahwb) { |
Michael Butler | 09160fd | 2021-02-04 22:05:22 -0800 | [diff] [blame] | 543 | auto memory = createSharedMemoryFromAHWB(const_cast<AHardwareBuffer*>(&ahwb), |
| 544 | /*takeOwnership=*/false); |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 545 | if (!memory.has_value()) { |
| 546 | LOG(ERROR) << "Failed to create memory from AHWB: " << memory.error().message; |
| 547 | return {convertErrorStatusToResultCode(memory.error().code), nullptr}; |
| 548 | } |
| 549 | |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 550 | std::unique_ptr<MemoryValidatorBase> validator; |
Michael Butler | 81550a9 | 2021-03-25 15:28:52 -0700 | [diff] [blame] | 551 | if (isAhwbBlob(memory.value())) { |
| 552 | validator = std::make_unique<SizedMemoryValidator>(nn::getSize(memory.value())); |
Jean-Luc Brouillet | d409e2c | 2017-09-27 23:59:20 -0700 | [diff] [blame] | 553 | } else { |
Xusong Wang | d39f919 | 2019-11-27 15:45:42 -0800 | [diff] [blame] | 554 | validator = std::make_unique<AHardwareBufferNonBlobValidator>(); |
Jean-Luc Brouillet | d409e2c | 2017-09-27 23:59:20 -0700 | [diff] [blame] | 555 | } |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 556 | |
| 557 | auto memoryAHWB = std::make_unique<MemoryAHWB>(std::move(memory).value(), std::move(validator)); |
| 558 | return {ANEURALNETWORKS_NO_ERROR, std::move(memoryAHWB)}; |
| 559 | } |
Michael Butler | 90fddbd | 2019-08-02 15:04:00 -0700 | [diff] [blame] | 560 | |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 561 | std::pair<int, std::unique_ptr<MemoryRuntimeAHWB>> MemoryRuntimeAHWB::create(uint32_t size) { |
| 562 | AHardwareBuffer* ahwb = nullptr; |
| 563 | const auto usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN; |
| 564 | const AHardwareBuffer_Desc desc = { |
| 565 | .width = size, |
| 566 | .height = 1, |
| 567 | .layers = 1, |
| 568 | .format = AHARDWAREBUFFER_FORMAT_BLOB, |
| 569 | .usage = usage, |
| 570 | .stride = size, |
| 571 | }; |
| 572 | int err = AHardwareBuffer_allocate(&desc, &ahwb); |
| 573 | if (err != 0 || ahwb == nullptr) { |
| 574 | LOG(ERROR) << "Failed to allocate BLOB mode AHWB."; |
| 575 | return {ANEURALNETWORKS_OP_FAILED, nullptr}; |
| 576 | } |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 577 | |
Michael Butler | 09160fd | 2021-02-04 22:05:22 -0800 | [diff] [blame] | 578 | auto memory = createSharedMemoryFromAHWB(ahwb, /*takeOWnership=*/true); |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 579 | if (!memory.has_value()) { |
| 580 | LOG(ERROR) << "Failed to allocate BLOB mode AHWB: " << memory.error().message; |
| 581 | return {convertErrorStatusToResultCode(memory.error().code), nullptr}; |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 582 | } |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 583 | auto mapping = map(memory.value()); |
| 584 | if (!mapping.has_value()) { |
| 585 | LOG(ERROR) << "Failed to map BLOB mode AHWB: " << mapping.error().message; |
| 586 | return {convertErrorStatusToResultCode(mapping.error().code), nullptr}; |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 587 | } |
Michael Butler | 09160fd | 2021-02-04 22:05:22 -0800 | [diff] [blame] | 588 | auto memoryAHWB = std::make_unique<MemoryRuntimeAHWB>(std::move(memory).value(), |
| 589 | std::move(mapping).value()); |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 590 | return {ANEURALNETWORKS_NO_ERROR, std::move(memoryAHWB)}; |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 591 | } |
| 592 | |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 593 | uint8_t* MemoryRuntimeAHWB::getPointer() const { |
| 594 | return static_cast<uint8_t*>(std::get<void*>(kMapping.pointer)); |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 595 | } |
| 596 | |
Michael Butler | 09160fd | 2021-02-04 22:05:22 -0800 | [diff] [blame] | 597 | MemoryRuntimeAHWB::MemoryRuntimeAHWB(SharedMemory memory, Mapping mapping) |
| 598 | : RuntimeMemory(std::move(memory)), kMapping(std::move(mapping)) {} |
Xusong Wang | 1b836a2 | 2020-02-06 13:17:33 -0800 | [diff] [blame] | 599 | |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 600 | std::pair<int, std::unique_ptr<MemoryFromDevice>> MemoryFromDevice::create(SharedBuffer buffer) { |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 601 | if (buffer == nullptr) { |
| 602 | LOG(ERROR) << "nullptr IBuffer for device memory."; |
Xusong Wang | 7ad067b | 2020-04-09 14:52:23 -0700 | [diff] [blame] | 603 | return {ANEURALNETWORKS_OP_FAILED, nullptr}; |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 604 | } |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 605 | return {ANEURALNETWORKS_NO_ERROR, std::make_unique<MemoryFromDevice>(std::move(buffer))}; |
| 606 | } |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 607 | |
Slava Shklyaev | 3698ad4 | 2020-11-06 13:50:31 +0000 | [diff] [blame] | 608 | MemoryFromDevice::MemoryFromDevice(SharedBuffer buffer) : RuntimeMemory(std::move(buffer)) {} |
Xusong Wang | 550e2a5 | 2019-11-27 12:18:19 -0800 | [diff] [blame] | 609 | |
Michael Butler | f20c5b5 | 2019-07-22 18:59:46 -0700 | [diff] [blame] | 610 | } // namespace nn |
| 611 | } // namespace android |