diff options
author | Xusong Wang <xusongw@google.com> | 2021-03-03 16:20:37 -0800 |
---|---|---|
committer | Xusong Wang <xusongw@google.com> | 2021-05-06 13:56:32 -0700 |
commit | 727a7b2104b0962509fedffe720eec508b2ee6de (patch) | |
tree | db56f9f27626747458a78fe1a1c5454fc666794a /neuralnetworks/1.3/utils/test/PreparedModelTest.cpp | |
parent | 57c9114315650206adab4beaa97e07d27e3e4b10 (diff) |
Introduce reusable execution to canonical interface -- HAL.
This CL modifies the canonical interface for reusable executions:
- Add new interface: IExecution with compute and computeFenced methods
- Add new method IPreparedModel::createExecution
In NNAPI runtime, the new interface IExecution is used to
memoize request-specific execution resources (e.g. converted HAL
request). The expected usage is that, IPreparedModel::createExecution
will be invoked in the first computation of a reusable NDK ANNExecution
object, and IExecution::compute* will be invoked repeatedly.
The IPreparedModel::execute* methods are preserved to avoid redundant
object creation and memoization overhead for a single-time
(non-reusable) execution.
For a vendor implementing the canonical interfaces, only the
IPreparedModel::execute* methods will be called because there is
currently no reusable execution at HAL interface. A DefaultExecution
implementation is provided to reduce the work needed on the vendor side.
Bug: 184073769
Test: NNT_static
Test: neuralnetworks_utils_hal_1_0_test
Test: neuralnetworks_utils_hal_1_1_test
Test: neuralnetworks_utils_hal_1_2_test
Test: neuralnetworks_utils_hal_1_3_test
Test: neuralnetworks_utils_hal_common_test
Test: neuralnetworks_utils_hal_aidl_test
Change-Id: I91790bb5ccf5ae648687fe603f88ffda2c9fd2b2
Diffstat (limited to 'neuralnetworks/1.3/utils/test/PreparedModelTest.cpp')
-rw-r--r-- | neuralnetworks/1.3/utils/test/PreparedModelTest.cpp | 358 |
1 files changed, 358 insertions, 0 deletions
diff --git a/neuralnetworks/1.3/utils/test/PreparedModelTest.cpp b/neuralnetworks/1.3/utils/test/PreparedModelTest.cpp index 5303c2ad23..6dbbd6bd7e 100644 --- a/neuralnetworks/1.3/utils/test/PreparedModelTest.cpp +++ b/neuralnetworks/1.3/utils/test/PreparedModelTest.cpp @@ -22,6 +22,7 @@ #include <android/hardware/neuralnetworks/1.3/IFencedExecutionCallback.h> #include <gmock/gmock.h> #include <gtest/gtest.h> +#include <nnapi/IExecution.h> #include <nnapi/IPreparedModel.h> #include <nnapi/TypeUtils.h> #include <nnapi/Types.h> @@ -462,6 +463,363 @@ TEST(PreparedModelTest, executeFencedDeadObject) { EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT); } +TEST(PreparedModelTest, reusableExecuteSync) { + // setup call + const uint32_t kNumberOfComputations = 2; + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value(); + EXPECT_CALL(*mockPreparedModel, executeSynchronously_1_3(_, _, _, _, _)) + .Times(kNumberOfComputations) + .WillRepeatedly( + Invoke(makeExecuteSynchronously(V1_3::ErrorStatus::NONE, {}, kNoTiming))); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute repeatedly + for (uint32_t i = 0; i < kNumberOfComputations; i++) { + const auto computeResult = createResult.value()->compute({}); + EXPECT_TRUE(computeResult.has_value()) << "Failed with " << computeResult.error().code + << ": " << computeResult.error().message; + } +} + +TEST(PreparedModelTest, reusableExecuteSyncError) { + // setup test + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value(); + EXPECT_CALL(*mockPreparedModel, executeSynchronously_1_3(_, _, _, _, _)) + .Times(1) + .WillOnce(Invoke( + makeExecuteSynchronously(V1_3::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming))); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->compute({}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(PreparedModelTest, reusableExecuteSyncTransportFailure) { + // setup test + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value(); + EXPECT_CALL(*mockPreparedModel, executeSynchronously_1_3(_, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure)); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->compute({}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(PreparedModelTest, reusableExecuteSyncDeadObject) { + // setup test + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value(); + EXPECT_CALL(*mockPreparedModel, executeSynchronously_1_3(_, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure)); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->compute({}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::DEAD_OBJECT); +} + +TEST(PreparedModelTest, reusableExecuteAsync) { + // setup call + const uint32_t kNumberOfComputations = 2; + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/false).value(); + EXPECT_CALL(*mockPreparedModel, execute_1_3(_, _, _, _, _)) + .Times(kNumberOfComputations) + .WillRepeatedly(Invoke(makeExecuteAsynchronously( + V1_3::ErrorStatus::NONE, V1_3::ErrorStatus::NONE, {}, kNoTiming))); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute repeatedly + for (uint32_t i = 0; i < kNumberOfComputations; i++) { + const auto computeResult = createResult.value()->compute({}); + EXPECT_TRUE(computeResult.has_value()) << "Failed with " << computeResult.error().code + << ": " << computeResult.error().message; + } +} + +TEST(PreparedModelTest, reusableExecuteAsyncLaunchError) { + // setup test + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/false).value(); + EXPECT_CALL(*mockPreparedModel, execute_1_3(_, _, _, _, _)) + .Times(1) + .WillOnce(Invoke(makeExecuteAsynchronously(V1_3::ErrorStatus::GENERAL_FAILURE, + V1_3::ErrorStatus::GENERAL_FAILURE, {}, + kNoTiming))); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->compute({}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(PreparedModelTest, reusableExecuteAsyncReturnError) { + // setup test + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/false).value(); + EXPECT_CALL(*mockPreparedModel, execute_1_3(_, _, _, _, _)) + .Times(1) + .WillOnce(Invoke(makeExecuteAsynchronously( + V1_3::ErrorStatus::NONE, V1_3::ErrorStatus::GENERAL_FAILURE, {}, kNoTiming))); + + // run test + const auto result = preparedModel->execute({}, {}, {}, {}); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(PreparedModelTest, reusableExecuteAsyncTransportFailure) { + // setup test + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/false).value(); + EXPECT_CALL(*mockPreparedModel, execute_1_3(_, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure)); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->compute({}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(PreparedModelTest, reusableExecuteAsyncDeadObject) { + // setup test + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/false).value(); + EXPECT_CALL(*mockPreparedModel, execute_1_3(_, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure)); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->compute({}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::DEAD_OBJECT); +} + +TEST(PreparedModelTest, reusableExecuteAsyncCrash) { + // setup test + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/false).value(); + const auto ret = [&mockPreparedModel]() -> hardware::Return<V1_3::ErrorStatus> { + mockPreparedModel->simulateCrash(); + return V1_3::ErrorStatus::NONE; + }; + EXPECT_CALL(*mockPreparedModel, execute_1_3(_, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(ret)); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->compute({}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::DEAD_OBJECT); +} + +TEST(PreparedModelTest, reusableExecuteFenced) { + // setup call + const uint32_t kNumberOfComputations = 2; + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value(); + const auto mockCallback = MockFencedExecutionCallback::create(); + EXPECT_CALL(*mockCallback, getExecutionInfo(_)) + .Times(kNumberOfComputations) + .WillRepeatedly(Invoke(makeExecuteFencedCallbackReturn(V1_3::ErrorStatus::NONE, + kNoTiming, kNoTiming))); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _)) + .Times(kNumberOfComputations) + .WillRepeatedly( + Invoke(makeExecuteFencedReturn(V1_3::ErrorStatus::NONE, {}, mockCallback))); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute repeatedly + for (uint32_t i = 0; i < kNumberOfComputations; i++) { + const auto computeResult = createResult.value()->computeFenced({}, {}, {}); + ASSERT_TRUE(computeResult.has_value()) << "Failed with " << computeResult.error().code + << ": " << computeResult.error().message; + const auto& [syncFence, callback] = computeResult.value(); + EXPECT_EQ(syncFence.syncWait({}), nn::SyncFence::FenceState::SIGNALED); + ASSERT_NE(callback, nullptr); + + // get results from callback + const auto callbackResult = callback(); + ASSERT_TRUE(callbackResult.has_value()) << "Failed with " << callbackResult.error().code + << ": " << callbackResult.error().message; + } +} + +TEST(PreparedModelTest, reusableExecuteFencedCallbackError) { + // setup call + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value(); + const auto mockCallback = MockFencedExecutionCallback::create(); + EXPECT_CALL(*mockCallback, getExecutionInfo(_)) + .Times(1) + .WillOnce(Invoke(makeExecuteFencedCallbackReturn(V1_3::ErrorStatus::GENERAL_FAILURE, + kNoTiming, kNoTiming))); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _)) + .Times(1) + .WillOnce(Invoke(makeExecuteFencedReturn(V1_3::ErrorStatus::NONE, {}, mockCallback))); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->computeFenced({}, {}, {}); + ASSERT_TRUE(computeResult.has_value()) << "Failed with " << computeResult.error().code << ": " + << computeResult.error().message; + const auto& [syncFence, callback] = computeResult.value(); + EXPECT_NE(syncFence.syncWait({}), nn::SyncFence::FenceState::ACTIVE); + ASSERT_NE(callback, nullptr); + + // verify callback failure + const auto callbackResult = callback(); + ASSERT_FALSE(callbackResult.has_value()); + EXPECT_EQ(callbackResult.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(PreparedModelTest, reusableExecuteFencedError) { + // setup test + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value(); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _)) + .Times(1) + .WillOnce(Invoke( + makeExecuteFencedReturn(V1_3::ErrorStatus::GENERAL_FAILURE, {}, nullptr))); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->computeFenced({}, {}, {}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(PreparedModelTest, reusableExecuteFencedTransportFailure) { + // setup test + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value(); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure)); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->computeFenced({}, {}, {}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(PreparedModelTest, reusableExecuteFencedDeadObject) { + // setup test + const auto mockPreparedModel = createMockPreparedModel(); + const auto preparedModel = + PreparedModel::create(mockPreparedModel, /*executeSynchronously=*/true).value(); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure)); + + // create execution + const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + ASSERT_TRUE(createResult.has_value()) + << "Failed with " << createResult.error().code << ": " << createResult.error().message; + ASSERT_NE(createResult.value(), nullptr); + + // invoke compute + const auto computeResult = createResult.value()->computeFenced({}, {}, {}); + ASSERT_FALSE(computeResult.has_value()); + EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::DEAD_OBJECT); +} TEST(PreparedModelTest, configureExecutionBurst) { // setup test const auto mockPreparedModel = MockPreparedModel::create(); |