From afc4d7cfe753669b08562eba8f58cbceefed334f Mon Sep 17 00:00:00 2001 From: Michael Butler Date: Thu, 10 Dec 2020 15:38:45 -0800 Subject: Create unit tests for NN interface utility code This CL introduces unit tests to validate the V1_X::utils::Device, *PreparedModel, and *Buffer adapter classes. It does so by mocking the underlying HIDL interface in order to simulate a driver returning bad data, HIDL transport failures, and service crashes. Note that the purpose of these new tests is to validate the adapter classes themselves, not the HIDL interfaces they use. For example, because nn::IPreparedModel does not currently define a method for configuring a burst execution, V1_[23]::utils::PreparedModel similarly does not use hardware::neuralnetworks::V1_[23]::IPreparedModel's configureExecutionBurst method. This CL also introduces unit tests to validate the utils::Resilient* adapter classes, and mocks DEAD_OBJECT failures to ensure that the underyling object can be recovered appropriately. Bug: 163801800 Test: mma Test: atest neuralnetworks_utils_hal_common_test Test: atest neuralnetworks_utils_hal_1_[0-3]_test Change-Id: I2c79865bf666d3f4bf53061ff5090746403583e9 --- .../common/test/ResilientPreparedModelTest.cpp | 297 +++++++++++++++++++++ 1 file changed, 297 insertions(+) create mode 100644 neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp (limited to 'neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp') diff --git a/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp b/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp new file mode 100644 index 0000000000..6d86e10df2 --- /dev/null +++ b/neuralnetworks/utils/common/test/ResilientPreparedModelTest.cpp @@ -0,0 +1,297 @@ +/* + * Copyright (C) 2020 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include "MockPreparedModel.h" + +namespace android::hardware::neuralnetworks::utils { +namespace { + +using ::testing::_; +using ::testing::InvokeWithoutArgs; +using ::testing::Return; + +using SharedMockPreparedModel = std::shared_ptr; +using MockPreparedModelFactory = + ::testing::MockFunction()>; + +SharedMockPreparedModel createConfiguredMockPreparedModel() { + return std::make_shared(); +} + +std::tuple, std::unique_ptr, + std::shared_ptr> +setup() { + auto mockPreparedModel = std::make_shared(); + + auto mockPreparedModelFactory = std::make_unique(); + EXPECT_CALL(*mockPreparedModelFactory, Call()).Times(1).WillOnce(Return(mockPreparedModel)); + + auto buffer = ResilientPreparedModel::create(mockPreparedModelFactory->AsStdFunction()).value(); + return std::make_tuple(std::move(mockPreparedModel), std::move(mockPreparedModelFactory), + std::move(buffer)); +} + +constexpr auto makeError = [](nn::ErrorStatus status) { + return [status](const auto&... /*args*/) { return nn::error(status); }; +}; +const auto kReturnGeneralFailure = makeError(nn::ErrorStatus::GENERAL_FAILURE); +const auto kReturnDeadObject = makeError(nn::ErrorStatus::DEAD_OBJECT); + +const auto kNoExecutionError = + nn::ExecutionResult, nn::Timing>>{}; +const auto kNoFencedExecutionError = + nn::GeneralResult>( + std::make_pair(nn::SyncFence::createAsSignaled(), nullptr)); + +struct FakeResource {}; + +} // namespace + +TEST(ResilientPreparedModelTest, invalidPreparedModelFactory) { + // setup call + const auto invalidPreparedModelFactory = ResilientPreparedModel::Factory{}; + + // run test + const auto result = ResilientPreparedModel::create(invalidPreparedModelFactory); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::INVALID_ARGUMENT); +} + +TEST(ResilientPreparedModelTest, preparedModelFactoryFailure) { + // setup call + const auto invalidPreparedModelFactory = kReturnGeneralFailure; + + // run test + const auto result = ResilientPreparedModel::create(invalidPreparedModelFactory); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(ResilientPreparedModelTest, getPreparedModel) { + // setup call + const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup(); + + // run test + const auto result = preparedModel->getPreparedModel(); + + // verify result + EXPECT_TRUE(result == mockPreparedModel); +} + +TEST(ResilientPreparedModelTest, execute) { + // setup call + const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup(); + EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _)) + .Times(1) + .WillOnce(Return(kNoExecutionError)); + + // run test + const auto result = preparedModel->execute({}, {}, {}, {}); + + // verify result + ASSERT_TRUE(result.has_value()) + << "Failed with " << result.error().code << ": " << result.error().message; +} + +TEST(ResilientPreparedModelTest, executeError) { + // setup call + const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup(); + EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _)).Times(1).WillOnce(kReturnGeneralFailure); + + // run test + const auto result = preparedModel->execute({}, {}, {}, {}); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(ResilientPreparedModelTest, executeDeadObjectFailedRecovery) { + // setup call + const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup(); + EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _)).Times(1).WillOnce(kReturnDeadObject); + constexpr auto ret = [] { return nn::error(nn::ErrorStatus::GENERAL_FAILURE); }; + EXPECT_CALL(*mockPreparedModelFactory, Call()).Times(1).WillOnce(ret); + + // run test + const auto result = preparedModel->execute({}, {}, {}, {}); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT); +} + +TEST(ResilientPreparedModelTest, executeDeadObjectSuccessfulRecovery) { + // setup call + const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup(); + EXPECT_CALL(*mockPreparedModel, execute(_, _, _, _)).Times(1).WillOnce(kReturnDeadObject); + const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel(); + EXPECT_CALL(*recoveredMockPreparedModel, execute(_, _, _, _)) + .Times(1) + .WillOnce(Return(kNoExecutionError)); + EXPECT_CALL(*mockPreparedModelFactory, Call()) + .Times(1) + .WillOnce(Return(recoveredMockPreparedModel)); + + // run test + const auto result = preparedModel->execute({}, {}, {}, {}); + + // verify result + ASSERT_TRUE(result.has_value()) + << "Failed with " << result.error().code << ": " << result.error().message; +} + +TEST(ResilientPreparedModelTest, executeFenced) { + // setup call + const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup(); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _)) + .Times(1) + .WillOnce(Return(kNoFencedExecutionError)); + + // run test + const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}); + + // verify result + ASSERT_TRUE(result.has_value()) + << "Failed with " << result.error().code << ": " << result.error().message; +} + +TEST(ResilientPreparedModelTest, executeFencedError) { + // setup call + const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup(); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _)) + .Times(1) + .WillOnce(kReturnGeneralFailure); + + // run test + const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST(ResilientPreparedModelTest, executeFencedDeadObjectFailedRecovery) { + // setup call + const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup(); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _)) + .Times(1) + .WillOnce(kReturnDeadObject); + EXPECT_CALL(*mockPreparedModelFactory, Call()).Times(1).WillOnce(kReturnGeneralFailure); + + // run test + const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT); +} + +TEST(ResilientPreparedModelTest, executeFencedDeadObjectSuccessfulRecovery) { + // setup call + const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup(); + EXPECT_CALL(*mockPreparedModel, executeFenced(_, _, _, _, _, _)) + .Times(1) + .WillOnce(kReturnDeadObject); + const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel(); + EXPECT_CALL(*recoveredMockPreparedModel, executeFenced(_, _, _, _, _, _)) + .Times(1) + .WillOnce(Return(kNoFencedExecutionError)); + EXPECT_CALL(*mockPreparedModelFactory, Call()) + .Times(1) + .WillOnce(Return(recoveredMockPreparedModel)); + + // run test + const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}); + + // verify result + ASSERT_TRUE(result.has_value()) + << "Failed with " << result.error().code << ": " << result.error().message; +} + +TEST(ResilientPreparedModelTest, getUnderlyingResource) { + // setup call + const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup(); + EXPECT_CALL(*mockPreparedModel, getUnderlyingResource()) + .Times(1) + .WillOnce(Return(FakeResource{})); + + // run test + const auto resource = preparedModel->getUnderlyingResource(); + + // verify resource + const FakeResource* maybeFakeResource = std::any_cast(&resource); + EXPECT_NE(maybeFakeResource, nullptr); +} + +TEST(ResilientPreparedModelTest, recover) { + // setup call + const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup(); + const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel(); + EXPECT_CALL(*mockPreparedModelFactory, Call()) + .Times(1) + .WillOnce(Return(recoveredMockPreparedModel)); + + // run test + const auto result = preparedModel->recover(mockPreparedModel.get()); + + // verify result + ASSERT_TRUE(result.has_value()) + << "Failed with " << result.error().code << ": " << result.error().message; + EXPECT_TRUE(result.value() == recoveredMockPreparedModel); +} + +TEST(ResilientPreparedModelTest, recoverFailure) { + // setup call + const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup(); + const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel(); + EXPECT_CALL(*mockPreparedModelFactory, Call()).Times(1).WillOnce(kReturnGeneralFailure); + + // run test + const auto result = preparedModel->recover(mockPreparedModel.get()); + + // verify result + EXPECT_FALSE(result.has_value()); +} + +TEST(ResilientPreparedModelTest, someoneElseRecovered) { + // setup call + const auto [mockPreparedModel, mockPreparedModelFactory, preparedModel] = setup(); + const auto recoveredMockPreparedModel = createConfiguredMockPreparedModel(); + EXPECT_CALL(*mockPreparedModelFactory, Call()) + .Times(1) + .WillOnce(Return(recoveredMockPreparedModel)); + preparedModel->recover(mockPreparedModel.get()); + + // run test + const auto result = preparedModel->recover(mockPreparedModel.get()); + + // verify result + ASSERT_TRUE(result.has_value()) + << "Failed with " << result.error().code << ": " << result.error().message; + EXPECT_TRUE(result.value() == recoveredMockPreparedModel); +} + +} // namespace android::hardware::neuralnetworks::utils -- cgit v1.2.3