diff options
author | Xusong Wang <xusongw@google.com> | 2021-03-03 16:20:37 -0800 |
---|---|---|
committer | Xusong Wang <xusongw@google.com> | 2021-05-06 13:56:32 -0700 |
commit | 727a7b2104b0962509fedffe720eec508b2ee6de (patch) | |
tree | db56f9f27626747458a78fe1a1c5454fc666794a /neuralnetworks/aidl/utils/test/PreparedModelTest.cpp | |
parent | 57c9114315650206adab4beaa97e07d27e3e4b10 (diff) |
Introduce reusable execution to canonical interface -- HAL.
This CL modifies the canonical interface for reusable executions:
- Add new interface: IExecution with compute and computeFenced methods
- Add new method IPreparedModel::createExecution
In NNAPI runtime, the new interface IExecution is used to
memoize request-specific execution resources (e.g. converted HAL
request). The expected usage is that, IPreparedModel::createExecution
will be invoked in the first computation of a reusable NDK ANNExecution
object, and IExecution::compute* will be invoked repeatedly.
The IPreparedModel::execute* methods are preserved to avoid redundant
object creation and memoization overhead for a single-time
(non-reusable) execution.
For a vendor implementing the canonical interfaces, only the
IPreparedModel::execute* methods will be called because there is
currently no reusable execution at HAL interface. A DefaultExecution
implementation is provided to reduce the work needed on the vendor side.
Bug: 184073769
Test: NNT_static
Test: neuralnetworks_utils_hal_1_0_test
Test: neuralnetworks_utils_hal_1_1_test
Test: neuralnetworks_utils_hal_1_2_test
Test: neuralnetworks_utils_hal_1_3_test
Test: neuralnetworks_utils_hal_common_test
Test: neuralnetworks_utils_hal_aidl_test
Change-Id: I91790bb5ccf5ae648687fe603f88ffda2c9fd2b2
Diffstat (limited to 'neuralnetworks/aidl/utils/test/PreparedModelTest.cpp')
-rw-r--r-- | neuralnetworks/aidl/utils/test/PreparedModelTest.cpp | 220 |
1 files changed, 220 insertions, 0 deletions
diff --git a/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp b/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp index ff98a7d947..8bb5c90d1e 100644 --- a/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp +++ b/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp @@ -21,6 +21,7 @@ #include <aidl/android/hardware/neuralnetworks/IFencedExecutionCallback.h> #include <gmock/gmock.h> #include <gtest/gtest.h> +#include <nnapi/IExecution.h> #include <nnapi/IPreparedModel.h> #include <nnapi/TypeUtils.h> #include <nnapi/Types.h> @@ -253,6 +254,225 @@ TEST(PreparedModelTest, executeFencedDeadObject) { EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT); } +TEST(PreparedModelTest, reusableExecuteSync) { + // setup call + const uint32_t kNumberOfComputations = 2; + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel).value(); + const auto mockExecutionResult = ExecutionResult{ + .outputSufficientSize = true, + .outputShapes = {}, + .timing = kNoTiming, + }; + EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _)) + .Times(kNumberOfComputations) + .WillRepeatedly( + DoAll(SetArgPointee<4>(mockExecutionResult), InvokeWithoutArgs(makeStatusOk))); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute repeatedly + for (uint32_t i = 0; i < kNumberOfComputations; i++) { + const auto computeResult = createResult.value()->compute({}); + EXPECT_TRUE(computeResult.has_value()) << "Failed with " << computeResult.error().code + << ": " << computeResult.error().message; + } +} + +TEST(PreparedModelTest, reusableExecuteSyncError) { + // setup test + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel).value(); + EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _)) + .Times(1) + .WillOnce(Invoke(makeGeneralFailure)); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->compute({}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(PreparedModelTest, reusableExecuteSyncTransportFailure) { + // setup test + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel).value(); + EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure)); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->compute({}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(PreparedModelTest, reusableExecuteSyncDeadObject) { + // setup test + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel).value(); + EXPECT_CALL(*mockPreparedModel, executeSynchronously(_, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure)); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->compute({}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::DEAD_OBJECT); +} + +TEST(PreparedModelTest, reusableExecuteFenced) { + // setup call + const uint32_t kNumberOfComputations = 2; + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel).value(); + const auto mockCallback = MockFencedExecutionCallback::create(); + EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _)) + .Times(kNumberOfComputations) + .WillRepeatedly(DoAll(SetArgPointee<0>(kNoTiming), SetArgPointee<1>(kNoTiming), + SetArgPointee<2>(ErrorStatus::NONE), Invoke(makeStatusOk))); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _)) + .Times(kNumberOfComputations) + .WillRepeatedly(Invoke(makeFencedExecutionResult(mockCallback))); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute repeatedly + for (uint32_t i = 0; i < kNumberOfComputations; i++) { + const auto computeResult = createResult.value()->computeFenced({}, {}, {}); + ASSERT_TRUE(computeResult.has_value()) << "Failed with " << computeResult.error().code + << ": " << computeResult.error().message; + const auto& [syncFence, callback] = computeResult.value(); + EXPECT_EQ(syncFence.syncWait({}), nn::SyncFence::FenceState::SIGNALED); + ASSERT_NE(callback, nullptr); + + // get results from callback + const auto callbackResult = callback(); + ASSERT_TRUE(callbackResult.has_value()) << "Failed with " << callbackResult.error().code + << ": " << callbackResult.error().message; + } +} + +TEST(PreparedModelTest, reusableExecuteFencedCallbackError) { + // setup call + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel).value(); + const auto mockCallback = MockFencedExecutionCallback::create(); + EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _)) + .Times(1) + .WillOnce(Invoke(DoAll(SetArgPointee<0>(kNoTiming), SetArgPointee<1>(kNoTiming), + SetArgPointee<2>(ErrorStatus::GENERAL_FAILURE), + Invoke(makeStatusOk)))); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _)) + .Times(1) + .WillOnce(Invoke(makeFencedExecutionResult(mockCallback))); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->computeFenced({}, {}, {}); + ASSERT_TRUE(computeResult.has_value()) << "Failed with " << computeResult.error().code << ": " + << computeResult.error().message; + const auto& [syncFence, callback] = computeResult.value(); + EXPECT_NE(syncFence.syncWait({}), nn::SyncFence::FenceState::ACTIVE); + ASSERT_NE(callback, nullptr); + + // verify callback failure + const auto callbackResult = callback(); + ASSERT_FALSE(callbackResult.has_value()); + EXPECT_EQ(callbackResult.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(PreparedModelTest, reusableExecuteFencedError) { + // setup test + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel).value(); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeGeneralFailure)); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->computeFenced({}, {}, {}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(PreparedModelTest, reusableExecuteFencedTransportFailure) { + // setup test + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel).value(); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure)); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->computeFenced({}, {}, {}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(PreparedModelTest, reusableExecuteFencedDeadObject) { + // setup test + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel).value(); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure)); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->computeFenced({}, {}, {}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::DEAD_OBJECT); +} + TEST(PreparedModelTest, configureExecutionBurst) { // setup test const auto mockPreparedModel = MockPreparedModel::create(); |