diff options
Diffstat (limited to 'neuralnetworks/aidl/utils/test')
-rw-r--r-- | neuralnetworks/aidl/utils/test/DeviceTest.cpp | 191 | ||||
-rw-r--r-- | neuralnetworks/aidl/utils/test/MockBuffer.h | 1 | ||||
-rw-r--r-- | neuralnetworks/aidl/utils/test/MockBurst.h | 5 | ||||
-rw-r--r-- | neuralnetworks/aidl/utils/test/MockDevice.h | 4 | ||||
-rw-r--r-- | neuralnetworks/aidl/utils/test/MockExecution.h | 2 | ||||
-rw-r--r-- | neuralnetworks/aidl/utils/test/MockFencedExecutionCallback.h | 1 | ||||
-rw-r--r-- | neuralnetworks/aidl/utils/test/MockPreparedModel.h | 13 | ||||
-rw-r--r-- | neuralnetworks/aidl/utils/test/PreparedModelTest.cpp | 287 |
8 files changed, 461 insertions, 43 deletions
diff --git a/neuralnetworks/aidl/utils/test/DeviceTest.cpp b/neuralnetworks/aidl/utils/test/DeviceTest.cpp index fb13af8d9f..73727b3974 100644 --- a/neuralnetworks/aidl/utils/test/DeviceTest.cpp +++ b/neuralnetworks/aidl/utils/test/DeviceTest.cpp @@ -61,7 +61,6 @@ constexpr PerformanceInfo kNoPerformanceInfo = {.execTime = std::numeric_limits< .powerUsage = std::numeric_limits<float>::max()}; constexpr NumberOfCacheFiles kNumberOfCacheFiles = {.numModelCache = nn::kMaxNumberOfCacheFiles - 1, .numDataCache = nn::kMaxNumberOfCacheFiles}; - constexpr auto makeStatusOk = [] { return ndk::ScopedAStatus::ok(); }; std::shared_ptr<MockDevice> createMockDevice() { @@ -124,6 +123,18 @@ auto makePreparedModelReturn(ErrorStatus launchStatus, ErrorStatus returnStatus, }; } +const std::vector<nn::TokenValuePair> kHints = {nn::TokenValuePair{.token = 0, .value = {1}}}; +const std::vector<nn::ExtensionNameAndPrefix> kExtensionNameToPrefix = { + nn::ExtensionNameAndPrefix{.name = "com.android.nn_test", .prefix = 1}}; +auto makePreparedModelWithConfigReturn(ErrorStatus launchStatus, ErrorStatus returnStatus, + const std::shared_ptr<MockPreparedModel>& preparedModel) { + return [launchStatus, returnStatus, preparedModel]( + const Model& /*model*/, const PrepareModelConfig& /*config*/, + const std::shared_ptr<IPreparedModelCallback>& cb) -> ndk::ScopedAStatus { + return makePreparedModelReturnImpl(launchStatus, returnStatus, preparedModel, cb); + }; +} + auto makePreparedModelFromCacheReturn(ErrorStatus launchStatus, ErrorStatus returnStatus, const std::shared_ptr<MockPreparedModel>& preparedModel) { return [launchStatus, returnStatus, preparedModel]( @@ -560,6 +571,8 @@ TEST_P(DeviceTest, getSupportedOperationsDeadObject) { } TEST_P(DeviceTest, prepareModel) { + if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return; + // setup call const auto mockDevice = createMockDevice(); const auto device = Device::create(kName, mockDevice, kVersion).value(); @@ -571,7 +584,7 @@ TEST_P(DeviceTest, prepareModel) { // run test const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, - nn::Priority::DEFAULT, {}, {}, {}, {}); + nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {}); // verify result ASSERT_TRUE(result.has_value()) @@ -580,6 +593,8 @@ TEST_P(DeviceTest, prepareModel) { } TEST_P(DeviceTest, prepareModelLaunchError) { + if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return; + // setup call const auto mockDevice = createMockDevice(); const auto device = Device::create(kName, mockDevice, kVersion).value(); @@ -590,7 +605,7 @@ TEST_P(DeviceTest, prepareModelLaunchError) { // run test const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, - nn::Priority::DEFAULT, {}, {}, {}, {}); + nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); @@ -598,6 +613,8 @@ TEST_P(DeviceTest, prepareModelLaunchError) { } TEST_P(DeviceTest, prepareModelReturnError) { + if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return; + // setup call const auto mockDevice = createMockDevice(); const auto device = Device::create(kName, mockDevice, kVersion).value(); @@ -608,7 +625,7 @@ TEST_P(DeviceTest, prepareModelReturnError) { // run test const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, - nn::Priority::DEFAULT, {}, {}, {}, {}); + nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); @@ -616,6 +633,8 @@ TEST_P(DeviceTest, prepareModelReturnError) { } TEST_P(DeviceTest, prepareModelNullptrError) { + if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return; + // setup call const auto mockDevice = createMockDevice(); const auto device = Device::create(kName, mockDevice, kVersion).value(); @@ -626,7 +645,7 @@ TEST_P(DeviceTest, prepareModelNullptrError) { // run test const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, - nn::Priority::DEFAULT, {}, {}, {}, {}); + nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); @@ -634,6 +653,8 @@ TEST_P(DeviceTest, prepareModelNullptrError) { } TEST_P(DeviceTest, prepareModelTransportFailure) { + if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return; + // setup call const auto mockDevice = createMockDevice(); const auto device = Device::create(kName, mockDevice, kVersion).value(); @@ -643,7 +664,7 @@ TEST_P(DeviceTest, prepareModelTransportFailure) { // run test const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, - nn::Priority::DEFAULT, {}, {}, {}, {}); + nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); @@ -651,6 +672,8 @@ TEST_P(DeviceTest, prepareModelTransportFailure) { } TEST_P(DeviceTest, prepareModelDeadObject) { + if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return; + // setup call const auto mockDevice = createMockDevice(); const auto device = Device::create(kName, mockDevice, kVersion).value(); @@ -660,7 +683,7 @@ TEST_P(DeviceTest, prepareModelDeadObject) { // run test const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, - nn::Priority::DEFAULT, {}, {}, {}, {}); + nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); @@ -668,6 +691,8 @@ TEST_P(DeviceTest, prepareModelDeadObject) { } TEST_P(DeviceTest, prepareModelAsyncCrash) { + if (kVersion.level > nn::Version::Level::FEATURE_LEVEL_7) return; + // setup test const auto mockDevice = createMockDevice(); const auto device = Device::create(kName, mockDevice, kVersion).value(); @@ -681,7 +706,157 @@ TEST_P(DeviceTest, prepareModelAsyncCrash) { // run test const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, - nn::Priority::DEFAULT, {}, {}, {}, {}); + nn::Priority::DEFAULT, {}, {}, {}, {}, {}, {}); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT); +} + +TEST_P(DeviceTest, prepareModelWithConfig) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup call + const auto mockDevice = createMockDevice(); + const auto device = Device::create(kName, mockDevice, kVersion).value(); + const auto mockPreparedModel = MockPreparedModel::create(); + EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _)) + .Times(1) + .WillOnce(Invoke(makePreparedModelWithConfigReturn(ErrorStatus::NONE, ErrorStatus::NONE, + mockPreparedModel))); + + // run test + const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, + nn::Priority::DEFAULT, {}, {}, {}, {}, kHints, + kExtensionNameToPrefix); + + // verify result + ASSERT_TRUE(result.has_value()) + << "Failed with " << result.error().code << ": " << result.error().message; + EXPECT_NE(result.value(), nullptr); +} + +TEST_P(DeviceTest, prepareModelWithConfigLaunchError) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup call + const auto mockDevice = createMockDevice(); + const auto device = Device::create(kName, mockDevice, kVersion).value(); + EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _)) + .Times(1) + .WillOnce(Invoke(makePreparedModelWithConfigReturn( + ErrorStatus::GENERAL_FAILURE, ErrorStatus::GENERAL_FAILURE, nullptr))); + + // run test + const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, + nn::Priority::DEFAULT, {}, {}, {}, {}, kHints, + kExtensionNameToPrefix); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST_P(DeviceTest, prepareModelWithConfigReturnError) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup call + const auto mockDevice = createMockDevice(); + const auto device = Device::create(kName, mockDevice, kVersion).value(); + EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _)) + .Times(1) + .WillOnce(Invoke(makePreparedModelWithConfigReturn( + ErrorStatus::NONE, ErrorStatus::GENERAL_FAILURE, nullptr))); + + // run test + const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, + nn::Priority::DEFAULT, {}, {}, {}, {}, kHints, + kExtensionNameToPrefix); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST_P(DeviceTest, prepareModelWithConfigNullptrError) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup call + const auto mockDevice = createMockDevice(); + const auto device = Device::create(kName, mockDevice, kVersion).value(); + EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _)) + .Times(1) + .WillOnce(Invoke(makePreparedModelWithConfigReturn(ErrorStatus::NONE, ErrorStatus::NONE, + nullptr))); + + // run test + const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, + nn::Priority::DEFAULT, {}, {}, {}, {}, kHints, + kExtensionNameToPrefix); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST_P(DeviceTest, prepareModelWithConfigTransportFailure) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup call + const auto mockDevice = createMockDevice(); + const auto device = Device::create(kName, mockDevice, kVersion).value(); + EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure)); + + // run test + const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, + nn::Priority::DEFAULT, {}, {}, {}, {}, kHints, + kExtensionNameToPrefix); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST_P(DeviceTest, prepareModelWithConfigDeadObject) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup call + const auto mockDevice = createMockDevice(); + const auto device = Device::create(kName, mockDevice, kVersion).value(); + EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure)); + + // run test + const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, + nn::Priority::DEFAULT, {}, {}, {}, {}, kHints, + kExtensionNameToPrefix); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT); +} + +TEST_P(DeviceTest, prepareModelWithConfigAsyncCrash) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup test + const auto mockDevice = createMockDevice(); + const auto device = Device::create(kName, mockDevice, kVersion).value(); + const auto ret = [&device]() { + DeathMonitor::serviceDied(device->getDeathMonitor()); + return ndk::ScopedAStatus::ok(); + }; + EXPECT_CALL(*mockDevice, prepareModelWithConfig(_, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(ret)); + + // run test + const auto result = device->prepareModel(kSimpleModel, nn::ExecutionPreference::DEFAULT, + nn::Priority::DEFAULT, {}, {}, {}, {}, kHints, + kExtensionNameToPrefix); // verify result ASSERT_FALSE(result.has_value()); diff --git a/neuralnetworks/aidl/utils/test/MockBuffer.h b/neuralnetworks/aidl/utils/test/MockBuffer.h index f77fa86953..7a05a0f33f 100644 --- a/neuralnetworks/aidl/utils/test/MockBuffer.h +++ b/neuralnetworks/aidl/utils/test/MockBuffer.h @@ -21,7 +21,6 @@ #include <android/binder_interface_utils.h> #include <gmock/gmock.h> #include <gtest/gtest.h> -#include <hidl/Status.h> namespace aidl::android::hardware::neuralnetworks::utils { diff --git a/neuralnetworks/aidl/utils/test/MockBurst.h b/neuralnetworks/aidl/utils/test/MockBurst.h index 5083bbdc86..609bd305b3 100644 --- a/neuralnetworks/aidl/utils/test/MockBurst.h +++ b/neuralnetworks/aidl/utils/test/MockBurst.h @@ -21,7 +21,6 @@ #include <android/binder_interface_utils.h> #include <gmock/gmock.h> #include <gtest/gtest.h> -#include <hidl/Status.h> namespace aidl::android::hardware::neuralnetworks::utils { @@ -32,6 +31,10 @@ class MockBurst final : public BnBurst { bool measureTiming, int64_t deadline, int64_t loopTimeoutDuration, ExecutionResult* executionResult), (override)); + MOCK_METHOD(ndk::ScopedAStatus, executeSynchronouslyWithConfig, + (const Request& request, const std::vector<int64_t>& memoryIdentifierTokens, + const ExecutionConfig& config, int64_t deadline, ExecutionResult* executionResult), + (override)); MOCK_METHOD(ndk::ScopedAStatus, releaseMemoryResource, (int64_t memoryIdentifierToken), (override)); }; diff --git a/neuralnetworks/aidl/utils/test/MockDevice.h b/neuralnetworks/aidl/utils/test/MockDevice.h index 3a28d55580..47b83460a1 100644 --- a/neuralnetworks/aidl/utils/test/MockDevice.h +++ b/neuralnetworks/aidl/utils/test/MockDevice.h @@ -50,6 +50,10 @@ class MockDevice final : public BnDevice { const std::vector<uint8_t>& token, const std::shared_ptr<IPreparedModelCallback>& callback), (override)); + MOCK_METHOD(ndk::ScopedAStatus, prepareModelWithConfig, + (const Model& model, const PrepareModelConfig& config, + const std::shared_ptr<IPreparedModelCallback>& callback), + (override)); MOCK_METHOD(ndk::ScopedAStatus, prepareModelFromCache, (int64_t deadline, const std::vector<ndk::ScopedFileDescriptor>& modelCache, const std::vector<ndk::ScopedFileDescriptor>& dataCache, diff --git a/neuralnetworks/aidl/utils/test/MockExecution.h b/neuralnetworks/aidl/utils/test/MockExecution.h index 216f569abc..782e54f874 100644 --- a/neuralnetworks/aidl/utils/test/MockExecution.h +++ b/neuralnetworks/aidl/utils/test/MockExecution.h @@ -21,8 +21,6 @@ #include <android/binder_interface_utils.h> #include <gmock/gmock.h> #include <gtest/gtest.h> -#include <hidl/HidlSupport.h> -#include <hidl/Status.h> namespace aidl::android::hardware::neuralnetworks::utils { diff --git a/neuralnetworks/aidl/utils/test/MockFencedExecutionCallback.h b/neuralnetworks/aidl/utils/test/MockFencedExecutionCallback.h index 06f9ea2e41..29449bb88b 100644 --- a/neuralnetworks/aidl/utils/test/MockFencedExecutionCallback.h +++ b/neuralnetworks/aidl/utils/test/MockFencedExecutionCallback.h @@ -22,7 +22,6 @@ #include <android/binder_interface_utils.h> #include <gmock/gmock.h> #include <gtest/gtest.h> -#include <hidl/Status.h> namespace aidl::android::hardware::neuralnetworks::utils { diff --git a/neuralnetworks/aidl/utils/test/MockPreparedModel.h b/neuralnetworks/aidl/utils/test/MockPreparedModel.h index 0ed9af9929..a5b3b66802 100644 --- a/neuralnetworks/aidl/utils/test/MockPreparedModel.h +++ b/neuralnetworks/aidl/utils/test/MockPreparedModel.h @@ -22,8 +22,6 @@ #include <android/binder_interface_utils.h> #include <gmock/gmock.h> #include <gtest/gtest.h> -#include <hidl/HidlSupport.h> -#include <hidl/Status.h> namespace aidl::android::hardware::neuralnetworks::utils { @@ -40,10 +38,19 @@ class MockPreparedModel final : public BnPreparedModel { bool measureTiming, int64_t deadline, int64_t loopTimeoutDuration, int64_t duration, FencedExecutionResult* fencedExecutionResult), (override)); + MOCK_METHOD(ndk::ScopedAStatus, executeSynchronouslyWithConfig, + (const Request& request, const ExecutionConfig& config, int64_t deadline, + ExecutionResult* executionResult), + (override)); + MOCK_METHOD(ndk::ScopedAStatus, executeFencedWithConfig, + (const Request& request, const std::vector<ndk::ScopedFileDescriptor>& waitFor, + const ExecutionConfig& config, int64_t deadline, int64_t duration, + FencedExecutionResult* fencedExecutionResult), + (override)); MOCK_METHOD(ndk::ScopedAStatus, configureExecutionBurst, (std::shared_ptr<IBurst> * burst), (override)); MOCK_METHOD(ndk::ScopedAStatus, createReusableExecution, - (const Request& request, bool measureTiming, int64_t loopTimeoutDuration, + (const Request& request, const ExecutionConfig& config, std::shared_ptr<IExecution>* execution), (override)); }; diff --git a/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp b/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp index 8cfb7c123a..bf6136dabb 100644 --- a/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp +++ b/neuralnetworks/aidl/utils/test/PreparedModelTest.cpp @@ -70,6 +70,21 @@ auto makeFencedExecutionResult(const std::shared_ptr<MockFencedExecutionCallback class PreparedModelTest : public VersionedAidlUtilsTestBase {}; +const std::vector<nn::TokenValuePair> kHints = {nn::TokenValuePair{.token = 0, .value = {1}}}; +const std::vector<nn::ExtensionNameAndPrefix> kExtensionNameToPrefix = { + nn::ExtensionNameAndPrefix{.name = "com.android.nn_test", .prefix = 1}}; +auto makeFencedExecutionWithConfigResult( + const std::shared_ptr<MockFencedExecutionCallback>& callback) { + return [callback](const Request& /*request*/, + const std::vector<ndk::ScopedFileDescriptor>& /*waitFor*/, + const ExecutionConfig& /*config*/, int64_t /*deadline*/, int64_t /*duration*/, + FencedExecutionResult* fencedExecutionResult) { + *fencedExecutionResult = FencedExecutionResult{.callback = callback, + .syncFence = ndk::ScopedFileDescriptor(-1)}; + return ndk::ScopedAStatus::ok(); + }; +} + } // namespace TEST_P(PreparedModelTest, invalidPreparedModel) { @@ -82,6 +97,8 @@ TEST_P(PreparedModelTest, invalidPreparedModel) { } TEST_P(PreparedModelTest, executeSync) { + if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return; + // setup call const auto mockPreparedModel = MockPreparedModel::create(); const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); @@ -96,7 +113,7 @@ TEST_P(PreparedModelTest, executeSync) { DoAll(SetArgPointee<4>(mockExecutionResult), InvokeWithoutArgs(makeStatusOk))); // run test - const auto result = preparedModel->execute({}, {}, {}, {}); + const auto result = preparedModel->execute({}, {}, {}, {}, {}, {}); // verify result EXPECT_TRUE(result.has_value()) @@ -104,6 +121,8 @@ TEST_P(PreparedModelTest, executeSync) { } TEST_P(PreparedModelTest, executeSyncError) { + if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return; + // setup test const auto mockPreparedModel = MockPreparedModel::create(); const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); @@ -112,7 +131,7 @@ TEST_P(PreparedModelTest, executeSyncError) { .WillOnce(Invoke(makeGeneralFailure)); // run test - const auto result = preparedModel->execute({}, {}, {}, {}); + const auto result = preparedModel->execute({}, {}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); @@ -120,6 +139,8 @@ TEST_P(PreparedModelTest, executeSyncError) { } TEST_P(PreparedModelTest, executeSyncTransportFailure) { + if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return; + // setup test const auto mockPreparedModel = MockPreparedModel::create(); const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); @@ -128,7 +149,7 @@ TEST_P(PreparedModelTest, executeSyncTransportFailure) { .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure)); // run test - const auto result = preparedModel->execute({}, {}, {}, {}); + const auto result = preparedModel->execute({}, {}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); @@ -136,6 +157,8 @@ TEST_P(PreparedModelTest, executeSyncTransportFailure) { } TEST_P(PreparedModelTest, executeSyncDeadObject) { + if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return; + // setup test const auto mockPreparedModel = MockPreparedModel::create(); const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); @@ -144,7 +167,7 @@ TEST_P(PreparedModelTest, executeSyncDeadObject) { .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure)); // run test - const auto result = preparedModel->execute({}, {}, {}, {}); + const auto result = preparedModel->execute({}, {}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); @@ -152,6 +175,8 @@ TEST_P(PreparedModelTest, executeSyncDeadObject) { } TEST_P(PreparedModelTest, executeFenced) { + if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return; + // setup call const auto mockPreparedModel = MockPreparedModel::create(); const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); @@ -165,7 +190,7 @@ TEST_P(PreparedModelTest, executeFenced) { .WillOnce(Invoke(makeFencedExecutionResult(mockCallback))); // run test - const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}); + const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {}); // verify result ASSERT_TRUE(result.has_value()) @@ -181,6 +206,8 @@ TEST_P(PreparedModelTest, executeFenced) { } TEST_P(PreparedModelTest, executeFencedCallbackError) { + if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return; + // setup call const auto mockPreparedModel = MockPreparedModel::create(); const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); @@ -195,7 +222,7 @@ TEST_P(PreparedModelTest, executeFencedCallbackError) { .WillOnce(Invoke(makeFencedExecutionResult(mockCallback))); // run test - const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}); + const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {}); // verify result ASSERT_TRUE(result.has_value()) @@ -211,6 +238,8 @@ TEST_P(PreparedModelTest, executeFencedCallbackError) { } TEST_P(PreparedModelTest, executeFencedError) { + if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return; + // setup test const auto mockPreparedModel = MockPreparedModel::create(); const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); @@ -219,7 +248,7 @@ TEST_P(PreparedModelTest, executeFencedError) { .WillOnce(InvokeWithoutArgs(makeGeneralFailure)); // run test - const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}); + const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); @@ -227,6 +256,8 @@ TEST_P(PreparedModelTest, executeFencedError) { } TEST_P(PreparedModelTest, executeFencedTransportFailure) { + if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return; + // setup test const auto mockPreparedModel = MockPreparedModel::create(); const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); @@ -235,7 +266,7 @@ TEST_P(PreparedModelTest, executeFencedTransportFailure) { .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure)); // run test - const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}); + const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); @@ -243,6 +274,8 @@ TEST_P(PreparedModelTest, executeFencedTransportFailure) { } TEST_P(PreparedModelTest, executeFencedDeadObject) { + if (kVersion.level >= nn::Version::Level::FEATURE_LEVEL_8) return; + // setup test const auto mockPreparedModel = MockPreparedModel::create(); const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); @@ -251,7 +284,7 @@ TEST_P(PreparedModelTest, executeFencedDeadObject) { .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure)); // run test - const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}); + const auto result = preparedModel->executeFenced({}, {}, {}, {}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); @@ -276,7 +309,7 @@ TEST_P(PreparedModelTest, reusableExecuteSync) { DoAll(SetArgPointee<4>(mockExecutionResult), InvokeWithoutArgs(makeStatusOk))); // create execution - const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {}); ASSERT_TRUE(createResult.has_value()) << "Failed with " << createResult.error().code << ": " << createResult.error().message; ASSERT_NE(createResult.value(), nullptr); @@ -300,7 +333,7 @@ TEST_P(PreparedModelTest, reusableExecuteSyncError) { .WillOnce(Invoke(makeGeneralFailure)); // create execution - const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {}); ASSERT_TRUE(createResult.has_value()) << "Failed with " << createResult.error().code << ": " << createResult.error().message; ASSERT_NE(createResult.value(), nullptr); @@ -322,7 +355,7 @@ TEST_P(PreparedModelTest, reusableExecuteSyncTransportFailure) { .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure)); // create execution - const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {}); ASSERT_TRUE(createResult.has_value()) << "Failed with " << createResult.error().code << ": " << createResult.error().message; ASSERT_NE(createResult.value(), nullptr); @@ -344,7 +377,7 @@ TEST_P(PreparedModelTest, reusableExecuteSyncDeadObject) { .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure)); // create execution - const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {}); ASSERT_TRUE(createResult.has_value()) << "Failed with " << createResult.error().code << ": " << createResult.error().message; ASSERT_NE(createResult.value(), nullptr); @@ -372,7 +405,7 @@ TEST_P(PreparedModelTest, reusableExecuteFenced) { .WillRepeatedly(Invoke(makeFencedExecutionResult(mockCallback))); // create execution - const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {}); ASSERT_TRUE(createResult.has_value()) << "Failed with " << createResult.error().code << ": " << createResult.error().message; ASSERT_NE(createResult.value(), nullptr); @@ -410,7 +443,7 @@ TEST_P(PreparedModelTest, reusableExecuteFencedCallbackError) { .WillOnce(Invoke(makeFencedExecutionResult(mockCallback))); // create execution - const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {}); ASSERT_TRUE(createResult.has_value()) << "Failed with " << createResult.error().code << ": " << createResult.error().message; ASSERT_NE(createResult.value(), nullptr); @@ -440,7 +473,7 @@ TEST_P(PreparedModelTest, reusableExecuteFencedError) { .WillOnce(InvokeWithoutArgs(makeGeneralFailure)); // create execution - const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {}); ASSERT_TRUE(createResult.has_value()) << "Failed with " << createResult.error().code << ": " << createResult.error().message; ASSERT_NE(createResult.value(), nullptr); @@ -462,7 +495,7 @@ TEST_P(PreparedModelTest, reusableExecuteFencedTransportFailure) { .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure)); // create execution - const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {}); ASSERT_TRUE(createResult.has_value()) << "Failed with " << createResult.error().code << ": " << createResult.error().message; ASSERT_NE(createResult.value(), nullptr); @@ -484,7 +517,7 @@ TEST_P(PreparedModelTest, reusableExecuteFencedDeadObject) { .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure)); // create execution - const auto createResult = preparedModel->createReusableExecution({}, {}, {}); + const auto createResult = preparedModel->createReusableExecution({}, {}, {}, {}, {}); ASSERT_TRUE(createResult.has_value()) << "Failed with " << createResult.error().code << ": " << createResult.error().message; ASSERT_NE(createResult.value(), nullptr); @@ -495,6 +528,206 @@ TEST_P(PreparedModelTest, reusableExecuteFencedDeadObject) { EXPECT_EQ(computeResult.error().code, nn::ErrorStatus::DEAD_OBJECT); } +TEST_P(PreparedModelTest, executeSyncWithConfig) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup call + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); + const auto mockExecutionResult = ExecutionResult{ + .outputSufficientSize = true, + .outputShapes = {}, + .timing = kNoTiming, + }; + EXPECT_CALL(*mockPreparedModel, executeSynchronouslyWithConfig(_, _, _, _)) + .Times(1) + .WillOnce( + DoAll(SetArgPointee<3>(mockExecutionResult), InvokeWithoutArgs(makeStatusOk))); + + // run test + const auto result = preparedModel->execute({}, {}, {}, {}, kHints, kExtensionNameToPrefix); + + // verify result + EXPECT_TRUE(result.has_value()) + << "Failed with " << result.error().code << ": " << result.error().message; +} + +TEST_P(PreparedModelTest, executeSyncWithConfigError) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup test + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); + EXPECT_CALL(*mockPreparedModel, executeSynchronouslyWithConfig(_, _, _, _)) + .Times(1) + .WillOnce(Invoke(makeGeneralFailure)); + + // run test + const auto result = preparedModel->execute({}, {}, {}, {}, kHints, kExtensionNameToPrefix); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST_P(PreparedModelTest, executeSyncWithConfigTransportFailure) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup test + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); + EXPECT_CALL(*mockPreparedModel, executeSynchronouslyWithConfig(_, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure)); + + // run test + const auto result = preparedModel->execute({}, {}, {}, {}, kHints, kExtensionNameToPrefix); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST_P(PreparedModelTest, executeSyncWithConfigDeadObject) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup test + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); + EXPECT_CALL(*mockPreparedModel, executeSynchronouslyWithConfig(_, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure)); + + // run test + const auto result = preparedModel->execute({}, {}, {}, {}, kHints, kExtensionNameToPrefix); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT); +} + +TEST_P(PreparedModelTest, executeFencedWithConfig) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup call + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); + const auto mockCallback = MockFencedExecutionCallback::create(); + EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _)) + .Times(1) + .WillOnce(DoAll(SetArgPointee<0>(kNoTiming), SetArgPointee<1>(kNoTiming), + SetArgPointee<2>(ErrorStatus::NONE), Invoke(makeStatusOk))); + EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _)) + .Times(1) + .WillOnce(Invoke(makeFencedExecutionWithConfigResult(mockCallback))); + + // run test + const auto result = + preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix); + + // verify result + ASSERT_TRUE(result.has_value()) + << "Failed with " << result.error().code << ": " << result.error().message; + const auto& [syncFence, callback] = result.value(); + EXPECT_EQ(syncFence.syncWait({}), nn::SyncFence::FenceState::SIGNALED); + ASSERT_NE(callback, nullptr); + + // get results from callback + const auto callbackResult = callback(); + ASSERT_TRUE(callbackResult.has_value()) << "Failed with " << callbackResult.error().code << ": " + << callbackResult.error().message; +} + +TEST_P(PreparedModelTest, executeFencedWithConfigCallbackError) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup call + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); + const auto mockCallback = MockFencedExecutionCallback::create(); + EXPECT_CALL(*mockCallback, getExecutionInfo(_, _, _)) + .Times(1) + .WillOnce(Invoke(DoAll(SetArgPointee<0>(kNoTiming), SetArgPointee<1>(kNoTiming), + SetArgPointee<2>(ErrorStatus::GENERAL_FAILURE), + Invoke(makeStatusOk)))); + EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _)) + .Times(1) + .WillOnce(Invoke(makeFencedExecutionWithConfigResult(mockCallback))); + + // run test + const auto result = + preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix); + + // verify result + ASSERT_TRUE(result.has_value()) + << "Failed with " << result.error().code << ": " << result.error().message; + const auto& [syncFence, callback] = result.value(); + EXPECT_NE(syncFence.syncWait({}), nn::SyncFence::FenceState::ACTIVE); + ASSERT_NE(callback, nullptr); + + // verify callback failure + const auto callbackResult = callback(); + ASSERT_FALSE(callbackResult.has_value()); + EXPECT_EQ(callbackResult.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST_P(PreparedModelTest, executeFencedWithConfigError) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup test + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); + EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeGeneralFailure)); + + // run test + const auto result = + preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST_P(PreparedModelTest, executeFencedWithConfigTransportFailure) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup test + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); + EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure)); + + // run test + const auto result = + preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::GENERAL_FAILURE); +} + +TEST_P(PreparedModelTest, executeFencedWithConfigDeadObject) { + if (kVersion.level < nn::Version::Level::FEATURE_LEVEL_8) return; + + // setup test + const auto mockPreparedModel = MockPreparedModel::create(); + const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); + EXPECT_CALL(*mockPreparedModel, executeFencedWithConfig(_, _, _, _, _, _)) + .Times(1) + .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure)); + + // run test + const auto result = + preparedModel->executeFenced({}, {}, {}, {}, {}, {}, kHints, kExtensionNameToPrefix); + + // verify result + ASSERT_FALSE(result.has_value()); + EXPECT_EQ(result.error().code, nn::ErrorStatus::DEAD_OBJECT); +} + TEST_P(PreparedModelTest, configureExecutionBurst) { // setup test const auto mockPreparedModel = MockPreparedModel::create(); @@ -567,13 +800,13 @@ TEST_P(PreparedModelTest, createReusableExecution) { // setup test const auto mockPreparedModel = MockPreparedModel::create(); const auto mockExecution = ndk::SharedRefBase::make<MockExecution>(); - EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _)) + EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _)) .Times(1) - .WillOnce(DoAll(SetArgPointee<3>(mockExecution), Invoke(makeStatusOk))); + .WillOnce(DoAll(SetArgPointee<2>(mockExecution), Invoke(makeStatusOk))); const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); // run test - const auto result = preparedModel->createReusableExecution({}, {}, {}); + const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {}); // verify result ASSERT_TRUE(result.has_value()) @@ -586,13 +819,13 @@ TEST_P(PreparedModelTest, createReusableExecutionError) { // setup test const auto mockPreparedModel = MockPreparedModel::create(); - EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _)) + EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _)) .Times(1) .WillOnce(InvokeWithoutArgs(makeGeneralFailure)); const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); // run test - const auto result = preparedModel->createReusableExecution({}, {}, {}); + const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); @@ -604,13 +837,13 @@ TEST_P(PreparedModelTest, createReusableExecutionTransportFailure) { // setup test const auto mockPreparedModel = MockPreparedModel::create(); - EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _)) + EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _)) .Times(1) .WillOnce(InvokeWithoutArgs(makeGeneralTransportFailure)); const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); // run test - const auto result = preparedModel->createReusableExecution({}, {}, {}); + const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); @@ -622,13 +855,13 @@ TEST_P(PreparedModelTest, createReusableExecutionDeadObject) { // setup test const auto mockPreparedModel = MockPreparedModel::create(); - EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _, _)) + EXPECT_CALL(*mockPreparedModel, createReusableExecution(_, _, _)) .Times(1) .WillOnce(InvokeWithoutArgs(makeDeadObjectFailure)); const auto preparedModel = PreparedModel::create(mockPreparedModel, kVersion).value(); // run test - const auto result = preparedModel->createReusableExecution({}, {}, {}); + const auto result = preparedModel->createReusableExecution({}, {}, {}, {}, {}); // verify result ASSERT_FALSE(result.has_value()); |