diff options
-rw-r--r-- | camera/provider/2.4/vts/OWNERS | 2 | ||||
-rw-r--r-- | camera/provider/2.4/vts/functional/Android.bp | 6 | ||||
-rw-r--r-- | camera/provider/aidl/vts/Android.bp | 75 | ||||
-rw-r--r-- | camera/provider/aidl/vts/AndroidTest.xml | 33 | ||||
-rw-r--r-- | camera/provider/aidl/vts/OWNERS | 6 | ||||
-rw-r--r-- | camera/provider/aidl/vts/VtsAidlHalCameraProvider_TargetTest.cpp | 3010 | ||||
-rw-r--r-- | camera/provider/aidl/vts/camera_aidl_test.cpp | 2924 | ||||
-rw-r--r-- | camera/provider/aidl/vts/camera_aidl_test.h | 528 | ||||
-rw-r--r-- | camera/provider/aidl/vts/device_cb.cpp | 544 | ||||
-rw-r--r-- | camera/provider/aidl/vts/device_cb.h | 82 | ||||
-rw-r--r-- | camera/provider/aidl/vts/empty_device_cb.cpp | 43 | ||||
-rw-r--r-- | camera/provider/aidl/vts/empty_device_cb.h | 38 | ||||
-rw-r--r-- | camera/provider/aidl/vts/simple_device_cb.cpp | 36 | ||||
-rw-r--r-- | camera/provider/aidl/vts/simple_device_cb.h | 41 | ||||
-rw-r--r-- | camera/provider/aidl/vts/torch_provider_cb.cpp | 40 | ||||
-rw-r--r-- | camera/provider/aidl/vts/torch_provider_cb.h | 44 |
16 files changed, 7451 insertions, 1 deletions
diff --git a/camera/provider/2.4/vts/OWNERS b/camera/provider/2.4/vts/OWNERS index b8f6b048d2..eb4f0e4b6c 100644 --- a/camera/provider/2.4/vts/OWNERS +++ b/camera/provider/2.4/vts/OWNERS @@ -1,3 +1,5 @@ +# Bug component: 41727 + # Camera team include platform/frameworks/av:/camera/OWNERS diff --git a/camera/provider/2.4/vts/functional/Android.bp b/camera/provider/2.4/vts/functional/Android.bp index 0e622655df..17fbdfe9b8 100644 --- a/camera/provider/2.4/vts/functional/Android.bp +++ b/camera/provider/2.4/vts/functional/Android.bp @@ -65,5 +65,9 @@ cc_test { "libhidlmemory", "libgralloctypes", ], - test_suites: ["general-tests", "vts"], + test_suites: [ + "general-tests", + "vts", + ], + } diff --git a/camera/provider/aidl/vts/Android.bp b/camera/provider/aidl/vts/Android.bp new file mode 100644 index 0000000000..727ef03e72 --- /dev/null +++ b/camera/provider/aidl/vts/Android.bp @@ -0,0 +1,75 @@ +// +// Copyright (C) 2022 The Android Open Source Project +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// + +package { + // See: http://go/android-license-faq + // A large-scale-change added 'default_applicable_licenses' to import + // all of the 'license_kinds' from "hardware_interfaces_license" + // to get the below license kinds: + // SPDX-license-identifier-Apache-2.0 + default_applicable_licenses: ["hardware_interfaces_license"], +} + +cc_test { + name: "VtsAidlHalCameraProvider_TargetTest", + defaults: [ + "VtsHalTargetTestDefaults", + "use_libaidlvintf_gtest_helper_static", + ], + srcs: [ + "camera_aidl_test.cpp", + "device_cb.cpp", + "empty_device_cb.cpp", + "simple_device_cb.cpp", + "torch_provider_cb.cpp", + "VtsAidlHalCameraProvider_TargetTest.cpp", + ], + + // TODO(b/64437680): Assume these are always available on the device. + shared_libs: [ + "libbinder_ndk", + "libcamera_metadata", + "libcutils", + "libfmq", + "libgui", + "libui", + "libbase", + "android.hardware.common-V2-ndk", + "android.hardware.common.fmq-V1-ndk", + "android.hardware.graphics.mapper@2.0", + "android.hardware.graphics.mapper@3.0", + "android.hardware.graphics.mapper@4.0", + ], + + // Statically link to libs not guaranteed to be present on the device. + static_libs: [ + "android.hardware.camera.common@1.0-helper", + "android.hardware.camera.common-V1-ndk", + "android.hardware.camera.device-V1-ndk", + "android.hardware.camera.metadata-V1-ndk", + "android.hardware.camera.provider-V1-ndk", + "android.hardware.graphics.common-V3-ndk", + "android.hidl.allocator@1.0", + "libgrallocusage", + "libhidlmemory", + "libgralloctypes", + "libaidlcommonsupport", + ], + test_suites: [ + "general-tests", + "vts", + ], +} diff --git a/camera/provider/aidl/vts/AndroidTest.xml b/camera/provider/aidl/vts/AndroidTest.xml new file mode 100644 index 0000000000..226121da74 --- /dev/null +++ b/camera/provider/aidl/vts/AndroidTest.xml @@ -0,0 +1,33 @@ +<?xml version="1.0" encoding="utf-8"?> +<!-- Copyright (C) 2022 The Android Open Source Project + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<configuration description="Runs VtsAidlHalCameraProvider_TargetTest."> + <option name="test-suite-tag" value="apct" /> + <option name="test-suite-tag" value="apct-native" /> + + <target_preparer class="com.android.tradefed.targetprep.RootTargetPreparer"> + </target_preparer> + + <target_preparer class="com.android.tradefed.targetprep.PushFilePreparer"> + <option name="cleanup" value="true" /> + <option name="push" value="VtsAidlHalCameraProvider_TargetTest->/data/local/tmp/VtsAidlHalCameraProvider_TargetTest" /> + </target_preparer> + + <test class="com.android.tradefed.testtype.GTest" > + <option name="native-test-device-path" value="/data/local/tmp" /> + <option name="module-name" value="VtsAidlHalCameraProvider_TargetTest" /> + <option name="native-test-timeout" value="1800000"/> <!-- 30 min --> + </test> +</configuration>
\ No newline at end of file diff --git a/camera/provider/aidl/vts/OWNERS b/camera/provider/aidl/vts/OWNERS new file mode 100644 index 0000000000..27d370bcbe --- /dev/null +++ b/camera/provider/aidl/vts/OWNERS @@ -0,0 +1,6 @@ +# Camera team +include platform/frameworks/av:/camera/OWNERS + +# VTS team +yim@google.com +zhuoyao@google.com
\ No newline at end of file diff --git a/camera/provider/aidl/vts/VtsAidlHalCameraProvider_TargetTest.cpp b/camera/provider/aidl/vts/VtsAidlHalCameraProvider_TargetTest.cpp new file mode 100644 index 0000000000..3da89e2237 --- /dev/null +++ b/camera/provider/aidl/vts/VtsAidlHalCameraProvider_TargetTest.cpp @@ -0,0 +1,3010 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include <aidl/Vintf.h> +#include <aidl/android/hardware/camera/common/VendorTagSection.h> +#include <aidl/android/hardware/camera/device/ICameraDevice.h> +#include <aidlcommonsupport/NativeHandle.h> +#include <camera_aidl_test.h> +#include <cutils/properties.h> +#include <device_cb.h> +#include <empty_device_cb.h> +#include <grallocusage/GrallocUsageConversion.h> +#include <gtest/gtest.h> +#include <hardware/gralloc.h> +#include <hardware/gralloc1.h> +#include <hidl/GtestPrinter.h> +#include <hidl/HidlSupport.h> +#include <torch_provider_cb.h> +#include <list> + +using ::aidl::android::hardware::camera::common::CameraDeviceStatus; +using ::aidl::android::hardware::camera::common::CameraResourceCost; +using ::aidl::android::hardware::camera::common::TorchModeStatus; +using ::aidl::android::hardware::camera::common::VendorTagSection; +using ::aidl::android::hardware::camera::device::ICameraDevice; +using ::aidl::android::hardware::camera::metadata::SensorPixelMode; +using ::aidl::android::hardware::camera::provider::CameraIdAndStreamCombination; +using ::aidl::android::hardware::camera::provider::ICameraProviderCallbackDefault; + +using ::ndk::ScopedAStatus; + +namespace { +const int32_t kBurstFrameCount = 10; +const uint32_t kMaxStillWidth = 2048; +const uint32_t kMaxStillHeight = 1536; + +const int64_t kEmptyFlushTimeoutMSec = 200; + +const static std::vector<int32_t> kMandatoryUseCases = { + ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT, + ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW, + ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE, + ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD, + ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL, + ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL}; +} // namespace + +TEST_P(CameraAidlTest, getCameraIdList) { + std::vector<std::string> idList; + ScopedAStatus ret = mProvider->getCameraIdList(&idList); + ASSERT_TRUE(ret.isOk()); + + for (size_t i = 0; i < idList.size(); i++) { + ALOGI("Camera Id[%zu] is %s", i, idList[i].c_str()); + } +} + +// Test if ICameraProvider::getVendorTags returns Status::OK +TEST_P(CameraAidlTest, getVendorTags) { + std::vector<VendorTagSection> vendorTags; + ScopedAStatus ret = mProvider->getVendorTags(&vendorTags); + + ASSERT_TRUE(ret.isOk()); + for (size_t i = 0; i < vendorTags.size(); i++) { + ALOGI("Vendor tag section %zu name %s", i, vendorTags[i].sectionName.c_str()); + for (auto& tag : vendorTags[i].tags) { + ALOGI("Vendor tag id %u name %s type %d", tag.tagId, tag.tagName.c_str(), + (int)tag.tagType); + } + } +} + +// Test if ICameraProvider::setCallback returns Status::OK +TEST_P(CameraAidlTest, setCallback) { + struct ProviderCb : public ICameraProviderCallbackDefault { + ScopedAStatus cameraDeviceStatusChange(const std::string& cameraDeviceName, + CameraDeviceStatus newStatus) override { + ALOGI("camera device status callback name %s, status %d", cameraDeviceName.c_str(), + (int)newStatus); + return ScopedAStatus::ok(); + } + ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName, + TorchModeStatus newStatus) override { + ALOGI("Torch mode status callback name %s, status %d", cameraDeviceName.c_str(), + (int)newStatus); + return ScopedAStatus::ok(); + } + ScopedAStatus physicalCameraDeviceStatusChange(const std::string& cameraDeviceName, + const std::string& physicalCameraDeviceName, + CameraDeviceStatus newStatus) override { + ALOGI("physical camera device status callback name %s, physical camera name %s," + " status %d", + cameraDeviceName.c_str(), physicalCameraDeviceName.c_str(), (int)newStatus); + return ScopedAStatus::ok(); + } + }; + + std::shared_ptr<ProviderCb> cb = ProviderCb::make<ProviderCb>(); + ScopedAStatus ret = mProvider->setCallback(cb); + ASSERT_TRUE(ret.isOk()); + ret = mProvider->setCallback(nullptr); + ASSERT_TRUE(ret.isOk()); +} + +// Test if ICameraProvider::getCameraDeviceInterface returns Status::OK and non-null device +TEST_P(CameraAidlTest, getCameraDeviceInterface) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + std::shared_ptr<ICameraDevice> cameraDevice; + ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &cameraDevice); + ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(cameraDevice, nullptr); + } +} + +// Verify that the device resource cost can be retrieved and the values are +// correct. +TEST_P(CameraAidlTest, getResourceCost) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& deviceName : cameraDeviceNames) { + std::shared_ptr<ICameraDevice> cameraDevice; + ScopedAStatus ret = mProvider->getCameraDeviceInterface(deviceName, &cameraDevice); + ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(cameraDevice, nullptr); + + CameraResourceCost resourceCost; + ret = cameraDevice->getResourceCost(&resourceCost); + ALOGI("getResourceCost returns: %d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + + ALOGI(" Resource cost is %d", resourceCost.resourceCost); + ASSERT_LE(resourceCost.resourceCost, 100u); + + for (const auto& name : resourceCost.conflictingDevices) { + ALOGI(" Conflicting device: %s", name.c_str()); + } + } +} + +TEST_P(CameraAidlTest, systemCameraTest) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::map<std::string, std::vector<SystemCameraKind>> hiddenPhysicalIdToLogicalMap; + for (const auto& name : cameraDeviceNames) { + std::shared_ptr<ICameraDevice> device; + ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str()); + ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(device, nullptr); + + CameraMetadata cameraCharacteristics; + ret = device->getCameraCharacteristics(&cameraCharacteristics); + ASSERT_TRUE(ret.isOk()); + + const camera_metadata_t* staticMeta = + reinterpret_cast<const camera_metadata_t*>(cameraCharacteristics.metadata.data()); + Status rc = isLogicalMultiCamera(staticMeta); + if (rc == Status::OPERATION_NOT_SUPPORTED) { + return; + } + + ASSERT_EQ(rc, Status::OK); + std::unordered_set<std::string> physicalIds; + ASSERT_EQ(getPhysicalCameraIds(staticMeta, &physicalIds), Status::OK); + SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC; + Status retStatus = getSystemCameraKind(staticMeta, &systemCameraKind); + ASSERT_EQ(retStatus, Status::OK); + + for (auto physicalId : physicalIds) { + bool isPublicId = false; + for (auto& deviceName : cameraDeviceNames) { + std::string publicVersion, publicId; + ASSERT_TRUE(matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId)); + if (physicalId == publicId) { + isPublicId = true; + break; + } + } + // For hidden physical cameras, collect their associated logical cameras + // and store the system camera kind. + if (!isPublicId) { + auto it = hiddenPhysicalIdToLogicalMap.find(physicalId); + if (it == hiddenPhysicalIdToLogicalMap.end()) { + hiddenPhysicalIdToLogicalMap.insert(std::make_pair( + physicalId, std::vector<SystemCameraKind>(systemCameraKind))); + } else { + it->second.push_back(systemCameraKind); + } + } + } + } + + // Check that the system camera kind of the logical cameras associated with + // each hidden physical camera is the same. + for (const auto& it : hiddenPhysicalIdToLogicalMap) { + SystemCameraKind neededSystemCameraKind = it.second.front(); + for (auto foundSystemCamera : it.second) { + ASSERT_EQ(neededSystemCameraKind, foundSystemCamera); + } + } +} + +// Verify that the static camera characteristics can be retrieved +// successfully. +TEST_P(CameraAidlTest, getCameraCharacteristics) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + std::shared_ptr<ICameraDevice> device; + ALOGI("getCameraCharacteristics: Testing camera device %s", name.c_str()); + ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device); + ALOGI("getCameraDeviceInterface returns: %d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(device, nullptr); + + CameraMetadata chars; + ret = device->getCameraCharacteristics(&chars); + ASSERT_TRUE(ret.isOk()); + verifyCameraCharacteristics(chars); + verifyMonochromeCharacteristics(chars); + verifyRecommendedConfigs(chars); + verifyLogicalOrUltraHighResCameraMetadata(name, device, chars, cameraDeviceNames); + + ASSERT_TRUE(ret.isOk()); + + // getPhysicalCameraCharacteristics will fail for publicly + // advertised camera IDs. + std::string version, cameraId; + ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &cameraId)); + CameraMetadata devChars; + ret = device->getPhysicalCameraCharacteristics(cameraId, &devChars); + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError()); + ASSERT_EQ(0, devChars.metadata.size()); + } +} + +// Verify that the torch strength level can be set and retrieved successfully. +TEST_P(CameraAidlTest, turnOnTorchWithStrengthLevel) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + + std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this); + ndk::ScopedAStatus ret = mProvider->setCallback(cb); + ASSERT_TRUE(ret.isOk()); + + for (const auto& name : cameraDeviceNames) { + int32_t defaultLevel; + std::shared_ptr<ICameraDevice> device; + ALOGI("%s: Testing camera device %s", __FUNCTION__, name.c_str()); + + ret = mProvider->getCameraDeviceInterface(name, &device); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(device, nullptr); + + CameraMetadata chars; + ret = device->getCameraCharacteristics(&chars); + ASSERT_TRUE(ret.isOk()); + + const camera_metadata_t* staticMeta = + reinterpret_cast<const camera_metadata_t*>(chars.metadata.data()); + bool torchStrengthControlSupported = isTorchStrengthControlSupported(staticMeta); + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, + ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL, &entry); + if (torchStrengthControlSupported) { + ASSERT_EQ(rc, 0); + ASSERT_GT(entry.count, 0); + defaultLevel = *entry.data.i32; + ALOGI("Default level is:%d", defaultLevel); + } + + mTorchStatus = TorchModeStatus::NOT_AVAILABLE; + ret = device->turnOnTorchWithStrengthLevel(2); + ALOGI("turnOnTorchWithStrengthLevel returns status: %d", ret.getServiceSpecificError()); + // OPERATION_NOT_SUPPORTED check + if (!torchStrengthControlSupported) { + ALOGI("Torch strength control not supported."); + ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED), + ret.getServiceSpecificError()); + } else { + { + ASSERT_TRUE(ret.isOk()); + std::unique_lock<std::mutex> l(mTorchLock); + while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kTorchTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout)); + } + ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus); + mTorchStatus = TorchModeStatus::NOT_AVAILABLE; + } + ALOGI("getTorchStrengthLevel: Testing"); + int32_t strengthLevel; + ret = device->getTorchStrengthLevel(&strengthLevel); + ASSERT_TRUE(ret.isOk()); + ALOGI("Torch strength level is : %d", strengthLevel); + ASSERT_EQ(strengthLevel, 2); + + // Turn OFF the torch and verify torch strength level is reset to default level. + ALOGI("Testing torch strength level reset after turning the torch OFF."); + ret = device->setTorchMode(false); + ASSERT_TRUE(ret.isOk()); + { + std::unique_lock<std::mutex> l(mTorchLock); + while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kTorchTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout)); + } + ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus); + } + + ret = device->getTorchStrengthLevel(&strengthLevel); + ASSERT_TRUE(ret.isOk()); + ALOGI("Torch strength level after turning OFF torch is : %d", strengthLevel); + ASSERT_EQ(strengthLevel, defaultLevel); + } + } +} + +// In case it is supported verify that torch can be enabled. +// Check for corresponding torch callbacks as well. +TEST_P(CameraAidlTest, setTorchMode) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + + std::shared_ptr<TorchProviderCb> cb = ndk::SharedRefBase::make<TorchProviderCb>(this); + ndk::ScopedAStatus ret = mProvider->setCallback(cb); + ALOGI("setCallback returns status: %d", ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(cb, nullptr); + + for (const auto& name : cameraDeviceNames) { + std::shared_ptr<ICameraDevice> device; + ALOGI("setTorchMode: Testing camera device %s", name.c_str()); + ret = mProvider->getCameraDeviceInterface(name, &device); + ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(device, nullptr); + + CameraMetadata metadata; + ret = device->getCameraCharacteristics(&metadata); + ALOGI("getCameraCharacteristics returns status:%d", ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + camera_metadata_t* staticMeta = + reinterpret_cast<camera_metadata_t*>(metadata.metadata.data()); + bool torchSupported = isTorchSupported(staticMeta); + + mTorchStatus = TorchModeStatus::NOT_AVAILABLE; + ret = device->setTorchMode(true); + ALOGI("setTorchMode returns status: %d", ret.getServiceSpecificError()); + if (!torchSupported) { + ASSERT_EQ(static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED), + ret.getServiceSpecificError()); + } else { + ASSERT_TRUE(ret.isOk()); + { + std::unique_lock<std::mutex> l(mTorchLock); + while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kTorchTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout)); + } + ASSERT_EQ(TorchModeStatus::AVAILABLE_ON, mTorchStatus); + mTorchStatus = TorchModeStatus::NOT_AVAILABLE; + } + + ret = device->setTorchMode(false); + ASSERT_TRUE(ret.isOk()); + { + std::unique_lock<std::mutex> l(mTorchLock); + while (TorchModeStatus::NOT_AVAILABLE == mTorchStatus) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kTorchTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mTorchCond.wait_until(l, timeout)); + } + ASSERT_EQ(TorchModeStatus::AVAILABLE_OFF, mTorchStatus); + } + } + } + + ret = mProvider->setCallback(nullptr); + ASSERT_TRUE(ret.isOk()); +} + +// Check dump functionality. +TEST_P(CameraAidlTest, dump) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + std::shared_ptr<ICameraDevice> device; + ALOGI("dump: Testing camera device %s", name.c_str()); + + ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device); + ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(device, nullptr); + + int raw_handle = open(kDumpOutput, O_RDWR); + ASSERT_GE(raw_handle, 0); + + auto retStatus = device->dump(raw_handle, nullptr, 0); + ASSERT_EQ(retStatus, ::android::OK); + close(raw_handle); + } +} + +// Open, dump, then close +TEST_P(CameraAidlTest, openClose) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + std::shared_ptr<ICameraDevice> device; + ALOGI("openClose: Testing camera device %s", name.c_str()); + ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device); + ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(device, nullptr); + + std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>(); + + ret = device->open(cb, &mSession); + ASSERT_TRUE(ret.isOk()); + ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_NE(mSession, nullptr); + int raw_handle = open(kDumpOutput, O_RDWR); + ASSERT_GE(raw_handle, 0); + + auto retStatus = device->dump(raw_handle, nullptr, 0); + ASSERT_EQ(retStatus, ::android::OK); + close(raw_handle); + + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + // TODO: test all session API calls return INTERNAL_ERROR after close + // TODO: keep a wp copy here and verify session cannot be promoted out of this scope + } +} + +// Check whether all common default request settings can be successfully +// constructed. +TEST_P(CameraAidlTest, constructDefaultRequestSettings) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + std::shared_ptr<ICameraDevice> device; + ALOGI("constructDefaultRequestSettings: Testing camera device %s", name.c_str()); + ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(name, &device); + ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(device, nullptr); + + std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>(); + ret = device->open(cb, &mSession); + ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(mSession, nullptr); + + for (int32_t t = (int32_t)RequestTemplate::PREVIEW; t <= (int32_t)RequestTemplate::MANUAL; + t++) { + RequestTemplate reqTemplate = (RequestTemplate)t; + CameraMetadata rawMetadata; + ret = mSession->constructDefaultRequestSettings(reqTemplate, &rawMetadata); + ALOGI("constructDefaultRequestSettings returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + if (reqTemplate == RequestTemplate::ZERO_SHUTTER_LAG || + reqTemplate == RequestTemplate::MANUAL) { + // optional templates + ASSERT_TRUE(ret.isOk() || static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == + ret.getServiceSpecificError()); + } else { + ASSERT_TRUE(ret.isOk()); + } + + if (ret.isOk()) { + const camera_metadata_t* metadata = (camera_metadata_t*)rawMetadata.metadata.data(); + size_t expectedSize = rawMetadata.metadata.size(); + int result = validate_camera_metadata_structure(metadata, &expectedSize); + ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED)); + verifyRequestTemplate(metadata, reqTemplate); + } else { + ASSERT_EQ(0u, rawMetadata.metadata.size()); + } + } + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// Verify that all supported stream formats and sizes can be configured +// successfully. +TEST_P(CameraAidlTest, configureStreamsAvailableOutputs) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector<AvailableStream> outputStreams; + + for (const auto& name : cameraDeviceNames) { + CameraMetadata meta; + std::shared_ptr<ICameraDevice> device; + + openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, &device /*out*/); + + camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data()); + outputStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams)); + ASSERT_NE(0u, outputStreams.size()); + + int32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + int32_t streamConfigCounter = 0; + for (auto& it : outputStreams) { + Stream stream; + Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format)); + stream.id = streamId; + stream.streamType = StreamType::OUTPUT; + stream.width = it.width; + stream.height = it.height; + stream.format = static_cast<PixelFormat>(it.format); + stream.dataSpace = dataspace; + stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER); + stream.rotation = StreamRotation::ROTATION_0; + + std::vector<Stream> streams = {stream}; + StreamConfiguration config; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + + bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK); + verifyStreamCombination(device, config, /*expectedStatus*/ true, expectStreamCombQuery); + + config.streamConfigCounter = streamConfigCounter++; + std::vector<HalStream> halConfigs; + ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(halConfigs.size(), 1); + ASSERT_EQ(halConfigs[0].id, streamId); + + streamId++; + } + + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// Verify that mandatory concurrent streams and outputs are supported. +TEST_P(CameraAidlTest, configureConcurrentStreamsAvailableOutputs) { + struct CameraTestInfo { + CameraMetadata staticMeta; + std::shared_ptr<ICameraDeviceSession> session; + std::shared_ptr<ICameraDevice> cameraDevice; + StreamConfiguration config; + }; + + std::map<std::string, std::string> idToNameMap = getCameraDeviceIdToNameMap(mProvider); + std::vector<ConcurrentCameraIdCombination> concurrentDeviceCombinations = + getConcurrentDeviceCombinations(mProvider); + std::vector<AvailableStream> outputStreams; + for (const auto& cameraDeviceIds : concurrentDeviceCombinations) { + std::vector<CameraIdAndStreamCombination> cameraIdsAndStreamCombinations; + std::vector<CameraTestInfo> cameraTestInfos; + size_t i = 0; + for (const auto& id : cameraDeviceIds.combination) { + CameraTestInfo cti; + auto it = idToNameMap.find(id); + ASSERT_TRUE(idToNameMap.end() != it); + std::string name = it->second; + + openEmptyDeviceSession(name, mProvider, &cti.session /*out*/, &cti.staticMeta /*out*/, + &cti.cameraDevice /*out*/); + + outputStreams.clear(); + camera_metadata_t* staticMeta = + reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data()); + ASSERT_EQ(Status::OK, getMandatoryConcurrentStreams(staticMeta, &outputStreams)); + ASSERT_NE(0u, outputStreams.size()); + + int32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + std::vector<Stream> streams(outputStreams.size()); + size_t j = 0; + for (const auto& s : outputStreams) { + Stream stream; + Dataspace dataspace = getDataspace(static_cast<PixelFormat>(s.format)); + stream.id = streamId++; + stream.streamType = StreamType::OUTPUT; + stream.width = s.width; + stream.height = s.height; + stream.format = static_cast<PixelFormat>(s.format); + stream.usage = static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER); + stream.dataSpace = dataspace; + stream.rotation = StreamRotation::ROTATION_0; + stream.sensorPixelModesUsed = {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}; + streams[j] = stream; + j++; + } + + // Add the created stream configs to cameraIdsAndStreamCombinations + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &cti.config, + jpegBufferSize); + + cti.config.streamConfigCounter = outputStreams.size(); + CameraIdAndStreamCombination cameraIdAndStreamCombination; + cameraIdAndStreamCombination.cameraId = id; + cameraIdAndStreamCombination.streamConfiguration = cti.config; + cameraIdsAndStreamCombinations.push_back(cameraIdAndStreamCombination); + i++; + cameraTestInfos.push_back(cti); + } + // Now verify that concurrent streams are supported + bool combinationSupported; + ndk::ScopedAStatus ret = mProvider->isConcurrentStreamCombinationSupported( + cameraIdsAndStreamCombinations, &combinationSupported); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(combinationSupported, true); + + // Test the stream can actually be configured + for (auto& cti : cameraTestInfos) { + if (cti.session != nullptr) { + camera_metadata_t* staticMeta = + reinterpret_cast<camera_metadata_t*>(cti.staticMeta.metadata.data()); + bool expectStreamCombQuery = (isLogicalMultiCamera(staticMeta) == Status::OK); + verifyStreamCombination(cti.cameraDevice, cti.config, /*expectedStatus*/ true, + expectStreamCombQuery); + } + + if (cti.session != nullptr) { + std::vector<HalStream> streamConfigs; + ret = cti.session->configureStreams(cti.config, &streamConfigs); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(cti.config.streams.size(), streamConfigs.size()); + } + } + + for (auto& cti : cameraTestInfos) { + ret = cti.session->close(); + ASSERT_TRUE(ret.isOk()); + } + } +} + +// Check for correct handling of invalid/incorrect configuration parameters. +TEST_P(CameraAidlTest, configureStreamsInvalidOutputs) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector<AvailableStream> outputStreams; + + for (const auto& name : cameraDeviceNames) { + CameraMetadata meta; + std::shared_ptr<ICameraDevice> cameraDevice; + + openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, + &cameraDevice /*out*/); + camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data()); + outputStreams.clear(); + + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, outputStreams)); + ASSERT_NE(0u, outputStreams.size()); + + int32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + Stream stream = {streamId++, + StreamType::OUTPUT, + static_cast<uint32_t>(0), + static_cast<uint32_t>(0), + static_cast<PixelFormat>(outputStreams[0].format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + jpegBufferSize, + -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + int32_t streamConfigCounter = 0; + std::vector<Stream> streams = {stream}; + StreamConfiguration config; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + + verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ false, + /*expectStreamCombQuery*/ false); + + config.streamConfigCounter = streamConfigCounter++; + std::vector<HalStream> halConfigs; + ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs); + ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == + ret.getServiceSpecificError() || + static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError()); + + stream = {streamId++, + StreamType::OUTPUT, + /*width*/ INT32_MAX, + /*height*/ INT32_MAX, + static_cast<PixelFormat>(outputStreams[0].format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + jpegBufferSize, + -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + + config.streamConfigCounter = streamConfigCounter++; + halConfigs.clear(); + ret = mSession->configureStreams(config, &halConfigs); + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError()); + + for (auto& it : outputStreams) { + stream = {streamId++, + StreamType::OUTPUT, + it.width, + it.height, + static_cast<PixelFormat>(UINT32_MAX), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + jpegBufferSize, + -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + config.streamConfigCounter = streamConfigCounter++; + halConfigs.clear(); + ret = mSession->configureStreams(config, &halConfigs); + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), + ret.getServiceSpecificError()); + + stream = {streamId++, + StreamType::OUTPUT, + it.width, + it.height, + static_cast<PixelFormat>(it.format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + Dataspace::UNKNOWN, + static_cast<StreamRotation>(UINT32_MAX), + std::string(), + jpegBufferSize, + -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + + config.streamConfigCounter = streamConfigCounter++; + halConfigs.clear(); + ret = mSession->configureStreams(config, &halConfigs); + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), + ret.getServiceSpecificError()); + } + + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// Check whether all supported ZSL output stream combinations can be +// configured successfully. +TEST_P(CameraAidlTest, configureStreamsZSLInputOutputs) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector<AvailableStream> inputStreams; + std::vector<AvailableZSLInputOutput> inputOutputMap; + + for (const auto& name : cameraDeviceNames) { + CameraMetadata meta; + std::shared_ptr<ICameraDevice> cameraDevice; + + openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, + &cameraDevice /*out*/); + camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data()); + + Status rc = isZSLModeAvailable(staticMeta); + if (Status::OPERATION_NOT_SUPPORTED == rc) { + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + ASSERT_EQ(Status::OK, rc); + + inputStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMeta, inputStreams)); + ASSERT_NE(0u, inputStreams.size()); + + inputOutputMap.clear(); + ASSERT_EQ(Status::OK, getZSLInputOutputMap(staticMeta, inputOutputMap)); + ASSERT_NE(0u, inputOutputMap.size()); + + bool supportMonoY8 = false; + if (Status::OK == isMonochromeCamera(staticMeta)) { + for (auto& it : inputStreams) { + if (it.format == static_cast<uint32_t>(PixelFormat::Y8)) { + supportMonoY8 = true; + break; + } + } + } + + int32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + bool hasPrivToY8 = false, hasY8ToY8 = false, hasY8ToBlob = false; + uint32_t streamConfigCounter = 0; + for (auto& inputIter : inputOutputMap) { + AvailableStream input; + ASSERT_EQ(Status::OK, findLargestSize(inputStreams, inputIter.inputFormat, input)); + ASSERT_NE(0u, inputStreams.size()); + + if (inputIter.inputFormat == + static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED) && + inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) { + hasPrivToY8 = true; + } else if (inputIter.inputFormat == static_cast<uint32_t>(PixelFormat::Y8)) { + if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::BLOB)) { + hasY8ToBlob = true; + } else if (inputIter.outputFormat == static_cast<uint32_t>(PixelFormat::Y8)) { + hasY8ToY8 = true; + } + } + AvailableStream outputThreshold = {INT32_MAX, INT32_MAX, inputIter.outputFormat}; + std::vector<AvailableStream> outputStreams; + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputStreams, &outputThreshold)); + for (auto& outputIter : outputStreams) { + Dataspace outputDataSpace = + getDataspace(static_cast<PixelFormat>(outputIter.format)); + Stream zslStream = { + streamId++, + StreamType::OUTPUT, + input.width, + input.height, + static_cast<PixelFormat>(input.format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC_USAGE_HW_CAMERA_ZSL), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + jpegBufferSize, + -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + Stream inputStream = { + streamId++, + StreamType::INPUT, + input.width, + input.height, + static_cast<PixelFormat>(input.format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>(0), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + jpegBufferSize, + -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + Stream outputStream = { + streamId++, + StreamType::OUTPUT, + outputIter.width, + outputIter.height, + static_cast<PixelFormat>(outputIter.format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + outputDataSpace, + StreamRotation::ROTATION_0, + std::string(), + jpegBufferSize, + -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + + std::vector<Stream> streams = {inputStream, zslStream, outputStream}; + + StreamConfiguration config; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + + verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true, + /*expectStreamCombQuery*/ false); + + config.streamConfigCounter = streamConfigCounter++; + std::vector<HalStream> halConfigs; + ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(3u, halConfigs.size()); + } + } + + if (supportMonoY8) { + if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) { + ASSERT_TRUE(hasPrivToY8); + } + if (Status::OK == isZSLModeAvailable(staticMeta, YUV_REPROCESS)) { + ASSERT_TRUE(hasY8ToY8); + ASSERT_TRUE(hasY8ToBlob); + } + } + + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// Check whether session parameters are supported. If Hal support for them +// exist, then try to configure a preview stream using them. +TEST_P(CameraAidlTest, configureStreamsWithSessionParameters) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector<AvailableStream> outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)}; + + for (const auto& name : cameraDeviceNames) { + CameraMetadata meta; + + std::shared_ptr<ICameraDevice> unusedCameraDevice; + openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, + &unusedCameraDevice /*out*/); + camera_metadata_t* staticMetaBuffer = + reinterpret_cast<camera_metadata_t*>(meta.metadata.data()); + + std::unordered_set<int32_t> availableSessionKeys; + auto rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS, + &availableSessionKeys); + ASSERT_TRUE(Status::OK == rc); + if (availableSessionKeys.empty()) { + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + + android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings; + android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams, + modifiedSessionParams; + constructFilteredSettings(mSession, availableSessionKeys, RequestTemplate::PREVIEW, + &previewRequestSettings, &sessionParams); + if (sessionParams.isEmpty()) { + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + + outputPreviewStreams.clear(); + + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams, + &previewThreshold)); + ASSERT_NE(0u, outputPreviewStreams.size()); + + Stream previewStream = {0, + StreamType::OUTPUT, + outputPreviewStreams[0].width, + outputPreviewStreams[0].height, + static_cast<PixelFormat>(outputPreviewStreams[0].format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + /*bufferSize*/ 0, + /*groupId*/ -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + + std::vector<Stream> streams = {previewStream}; + StreamConfiguration config; + + config.streams = streams; + config.operationMode = StreamConfigurationMode::NORMAL_MODE; + modifiedSessionParams = sessionParams; + auto sessionParamsBuffer = sessionParams.release(); + std::vector<uint8_t> rawSessionParam = + std::vector(reinterpret_cast<uint8_t*>(sessionParamsBuffer), + reinterpret_cast<uint8_t*>(sessionParamsBuffer) + + get_camera_metadata_size(sessionParamsBuffer)); + + config.sessionParams.metadata = rawSessionParam; + config.streamConfigCounter = 0; + config.streams = {previewStream}; + config.streamConfigCounter = 0; + config.multiResolutionInputImage = false; + + bool newSessionParamsAvailable = false; + for (const auto& it : availableSessionKeys) { + if (modifiedSessionParams.exists(it)) { + modifiedSessionParams.erase(it); + newSessionParamsAvailable = true; + break; + } + } + if (newSessionParamsAvailable) { + auto modifiedSessionParamsBuffer = modifiedSessionParams.release(); + verifySessionReconfigurationQuery(mSession, sessionParamsBuffer, + modifiedSessionParamsBuffer); + modifiedSessionParams.acquire(modifiedSessionParamsBuffer); + } + + std::vector<HalStream> halConfigs; + ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(1u, halConfigs.size()); + + sessionParams.acquire(sessionParamsBuffer); + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// Verify that all supported preview + still capture stream combinations +// can be configured successfully. +TEST_P(CameraAidlTest, configureStreamsPreviewStillOutputs) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector<AvailableStream> outputBlobStreams; + std::vector<AvailableStream> outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)}; + AvailableStream blobThreshold = {INT32_MAX, INT32_MAX, static_cast<int32_t>(PixelFormat::BLOB)}; + + for (const auto& name : cameraDeviceNames) { + CameraMetadata meta; + + std::shared_ptr<ICameraDevice> cameraDevice; + openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, + &cameraDevice /*out*/); + + camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data()); + + // Check if camera support depth only + if (isDepthOnly(staticMeta)) { + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + + outputBlobStreams.clear(); + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold)); + ASSERT_NE(0u, outputBlobStreams.size()); + + outputPreviewStreams.clear(); + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold)); + ASSERT_NE(0u, outputPreviewStreams.size()); + + int32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + uint32_t streamConfigCounter = 0; + + for (auto& blobIter : outputBlobStreams) { + for (auto& previewIter : outputPreviewStreams) { + Stream previewStream = { + streamId++, + StreamType::OUTPUT, + previewIter.width, + previewIter.height, + static_cast<PixelFormat>(previewIter.format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + /*bufferSize*/ 0, + /*groupId*/ -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + Stream blobStream = { + streamId++, + StreamType::OUTPUT, + blobIter.width, + blobIter.height, + static_cast<PixelFormat>(blobIter.format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_CPU_READ), + Dataspace::JFIF, + StreamRotation::ROTATION_0, + std::string(), + /*bufferSize*/ 0, + /*groupId*/ -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + std::vector<Stream> streams = {previewStream, blobStream}; + StreamConfiguration config; + + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + config.streamConfigCounter = streamConfigCounter++; + verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true, + /*expectStreamCombQuery*/ false); + + std::vector<HalStream> halConfigs; + ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(2u, halConfigs.size()); + } + } + + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// In case constrained mode is supported, test whether it can be +// configured. Additionally check for common invalid inputs when +// using this mode. +TEST_P(CameraAidlTest, configureStreamsConstrainedOutputs) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + CameraMetadata meta; + std::shared_ptr<ICameraDevice> cameraDevice; + + openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, + &cameraDevice /*out*/); + camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data()); + + Status rc = isConstrainedModeAvailable(staticMeta); + if (Status::OPERATION_NOT_SUPPORTED == rc) { + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + ASSERT_EQ(Status::OK, rc); + + AvailableStream hfrStream; + rc = pickConstrainedModeSize(staticMeta, hfrStream); + ASSERT_EQ(Status::OK, rc); + + int32_t streamId = 0; + uint32_t streamConfigCounter = 0; + Stream stream = {streamId, + StreamType::OUTPUT, + hfrStream.width, + hfrStream.height, + static_cast<PixelFormat>(hfrStream.format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + /*bufferSize*/ 0, + /*groupId*/ -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + std::vector<Stream> streams = {stream}; + StreamConfiguration config; + createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE, + &config); + + verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true, + /*expectStreamCombQuery*/ false); + + config.streamConfigCounter = streamConfigCounter++; + std::vector<HalStream> halConfigs; + ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(1u, halConfigs.size()); + ASSERT_EQ(halConfigs[0].id, streamId); + + stream = {streamId++, + StreamType::OUTPUT, + static_cast<uint32_t>(0), + static_cast<uint32_t>(0), + static_cast<PixelFormat>(hfrStream.format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + /*bufferSize*/ 0, + /*groupId*/ -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE, + &config); + + config.streamConfigCounter = streamConfigCounter++; + std::vector<HalStream> halConfig; + ret = mSession->configureStreams(config, &halConfig); + ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == + ret.getServiceSpecificError() || + static_cast<int32_t>(Status::INTERNAL_ERROR) == ret.getServiceSpecificError()); + + stream = {streamId++, + StreamType::OUTPUT, + INT32_MAX, + INT32_MAX, + static_cast<PixelFormat>(hfrStream.format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + /*bufferSize*/ 0, + /*groupId*/ -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE, + &config); + + config.streamConfigCounter = streamConfigCounter++; + halConfigs.clear(); + ret = mSession->configureStreams(config, &halConfigs); + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError()); + + stream = {streamId++, + StreamType::OUTPUT, + hfrStream.width, + hfrStream.height, + static_cast<PixelFormat>(UINT32_MAX), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + /*bufferSize*/ 0, + /*groupId*/ -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::CONSTRAINED_HIGH_SPEED_MODE, + &config); + + config.streamConfigCounter = streamConfigCounter++; + halConfigs.clear(); + ret = mSession->configureStreams(config, &halConfigs); + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError()); + + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// Verify that all supported video + snapshot stream combinations can +// be configured successfully. +TEST_P(CameraAidlTest, configureStreamsVideoStillOutputs) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector<AvailableStream> outputBlobStreams; + std::vector<AvailableStream> outputVideoStreams; + AvailableStream videoThreshold = {kMaxVideoWidth, kMaxVideoHeight, + static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)}; + AvailableStream blobThreshold = {kMaxVideoWidth, kMaxVideoHeight, + static_cast<int32_t>(PixelFormat::BLOB)}; + + for (const auto& name : cameraDeviceNames) { + CameraMetadata meta; + std::shared_ptr<ICameraDevice> cameraDevice; + + openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, + &cameraDevice /*out*/); + + camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data()); + + // Check if camera support depth only + if (isDepthOnly(staticMeta)) { + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + + outputBlobStreams.clear(); + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputBlobStreams, &blobThreshold)); + ASSERT_NE(0u, outputBlobStreams.size()); + + outputVideoStreams.clear(); + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputVideoStreams, &videoThreshold)); + ASSERT_NE(0u, outputVideoStreams.size()); + + int32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + uint32_t streamConfigCounter = 0; + for (auto& blobIter : outputBlobStreams) { + for (auto& videoIter : outputVideoStreams) { + Stream videoStream = { + streamId++, + StreamType::OUTPUT, + videoIter.width, + videoIter.height, + static_cast<PixelFormat>(videoIter.format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_VIDEO_ENCODER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + jpegBufferSize, + /*groupId*/ -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + Stream blobStream = { + streamId++, + StreamType::OUTPUT, + blobIter.width, + blobIter.height, + static_cast<PixelFormat>(blobIter.format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_CPU_READ), + Dataspace::JFIF, + StreamRotation::ROTATION_0, + std::string(), + jpegBufferSize, + /*groupId*/ -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + std::vector<Stream> streams = {videoStream, blobStream}; + StreamConfiguration config; + + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + verifyStreamCombination(cameraDevice, config, /*expectedStatus*/ true, + /*expectStreamCombQuery*/ false); + + config.streamConfigCounter = streamConfigCounter++; + std::vector<HalStream> halConfigs; + ndk::ScopedAStatus ret = mSession->configureStreams(config, &halConfigs); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(2u, halConfigs.size()); + } + } + + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// Generate and verify a camera capture request +TEST_P(CameraAidlTest, processCaptureRequestPreview) { + // TODO(b/220897574): Failing with BUFFER_ERROR + processCaptureRequestInternal(GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, RequestTemplate::PREVIEW, + false /*secureOnlyCameras*/); +} + +// Generate and verify a secure camera capture request +TEST_P(CameraAidlTest, processSecureCaptureRequest) { + processCaptureRequestInternal(GRALLOC1_PRODUCER_USAGE_PROTECTED, RequestTemplate::STILL_CAPTURE, + true /*secureOnlyCameras*/); +} + +TEST_P(CameraAidlTest, processCaptureRequestPreviewStabilization) { + std::unordered_map<std::string, nsecs_t> cameraDeviceToTimeLag; + processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ false, + cameraDeviceToTimeLag); + processPreviewStabilizationCaptureRequestInternal(/*previewStabilizationOn*/ true, + cameraDeviceToTimeLag); +} + +// Generate and verify a multi-camera capture request +TEST_P(CameraAidlTest, processMultiCaptureRequestPreview) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast<int32_t>(PixelFormat::YCBCR_420_888)}; + int64_t bufferId = 1; + uint32_t frameNumber = 1; + std::vector<uint8_t> settings; + std::vector<uint8_t> emptySettings; + std::string invalidPhysicalId = "-1"; + + for (const auto& name : cameraDeviceNames) { + std::string version, deviceId; + ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId)); + CameraMetadata metadata; + + std::shared_ptr<ICameraDevice> unusedDevice; + openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &metadata /*out*/, + &unusedDevice /*out*/); + + camera_metadata_t* staticMeta = + reinterpret_cast<camera_metadata_t*>(metadata.metadata.data()); + Status rc = isLogicalMultiCamera(staticMeta); + if (Status::OPERATION_NOT_SUPPORTED == rc) { + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + + std::unordered_set<std::string> physicalIds; + rc = getPhysicalCameraIds(staticMeta, &physicalIds); + ASSERT_TRUE(Status::OK == rc); + ASSERT_TRUE(physicalIds.size() > 1); + + std::unordered_set<int32_t> physicalRequestKeyIDs; + rc = getSupportedKeys(staticMeta, ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS, + &physicalRequestKeyIDs); + ASSERT_TRUE(Status::OK == rc); + if (physicalRequestKeyIDs.empty()) { + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + // The logical camera doesn't support any individual physical requests. + continue; + } + + android::hardware::camera::common::V1_0::helper::CameraMetadata defaultPreviewSettings; + android::hardware::camera::common::V1_0::helper::CameraMetadata filteredSettings; + constructFilteredSettings(mSession, physicalRequestKeyIDs, RequestTemplate::PREVIEW, + &defaultPreviewSettings, &filteredSettings); + if (filteredSettings.isEmpty()) { + // No physical device settings in default request. + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + + const camera_metadata_t* settingsBuffer = defaultPreviewSettings.getAndLock(); + uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer; + settings.assign(rawSettingsBuffer, + rawSettingsBuffer + get_camera_metadata_size(settingsBuffer)); + CameraMetadata settingsMetadata = {settings}; + overrideRotateAndCrop(&settingsMetadata); + + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + + // Leave only 2 physical devices in the id set. + auto it = physicalIds.begin(); + std::string physicalDeviceId = *it; + it++; + physicalIds.erase(++it, physicalIds.end()); + ASSERT_EQ(physicalIds.size(), 2u); + + std::vector<HalStream> halStreams; + bool supportsPartialResults = false; + bool useHalBufManager = false; + int32_t partialResultCount = 0; + Stream previewStream; + std::shared_ptr<DeviceCb> cb; + + configurePreviewStreams(name, mProvider, &previewThreshold, physicalIds, &mSession, + &previewStream, &halStreams /*out*/, + &supportsPartialResults /*out*/, &partialResultCount /*out*/, + &useHalBufManager /*out*/, &cb /*out*/, 0 /*streamConfigCounter*/); + + ::aidl::android::hardware::common::fmq::MQDescriptor< + int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite> + descriptor; + auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor); + ASSERT_TRUE(resultQueueRet.isOk()); + std::shared_ptr<ResultMetadataQueue> resultQueue = + std::make_shared<ResultMetadataQueue>(descriptor); + if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + + std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>( + static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults, + partialResultCount, physicalIds, resultQueue); + + std::vector<CaptureRequest> requests(1); + CaptureRequest& request = requests[0]; + request.frameNumber = frameNumber; + request.fmqSettingsSize = 0; + request.settings.metadata = settings; + + std::vector<StreamBuffer>& outputBuffers = request.outputBuffers; + + std::vector<buffer_handle_t> graphicBuffers; + graphicBuffers.reserve(halStreams.size()); + outputBuffers.resize(halStreams.size()); + size_t k = 0; + for (const auto& halStream : halStreams) { + buffer_handle_t buffer_handle; + if (useHalBufManager) { + outputBuffers[k] = {halStream.id, /*bufferId*/ 0, NativeHandle(), + BufferStatus::OK, NativeHandle(), NativeHandle()}; + } else { + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage( + static_cast<uint64_t>(halStream.producerUsage), + static_cast<uint64_t>(halStream.consumerUsage)), + halStream.overrideFormat, &buffer_handle); + graphicBuffers.push_back(buffer_handle); + outputBuffers[k] = { + halStream.id, bufferId, ::android::makeToAidl(buffer_handle), + BufferStatus::OK, NativeHandle(), NativeHandle()}; + bufferId++; + } + k++; + } + + std::vector<PhysicalCameraSetting> camSettings(1); + const camera_metadata_t* filteredSettingsBuffer = filteredSettings.getAndLock(); + uint8_t* rawFilteredSettingsBuffer = (uint8_t*)filteredSettingsBuffer; + camSettings[0].settings = {std::vector( + rawFilteredSettingsBuffer, + rawFilteredSettingsBuffer + get_camera_metadata_size(filteredSettingsBuffer))}; + overrideRotateAndCrop(&camSettings[0].settings); + camSettings[0].fmqSettingsSize = 0; + camSettings[0].physicalCameraId = physicalDeviceId; + + request.inputBuffer = { + -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()}; + request.physicalCameraSettings = camSettings; + + { + std::unique_lock<std::mutex> l(mLock); + mInflightMap.clear(); + mInflightMap[frameNumber] = inflightReq; + } + + int32_t numRequestProcessed = 0; + std::vector<BufferCache> cachesToRemove; + ndk::ScopedAStatus returnStatus = + mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock<std::mutex> l(mLock); + while (!inflightReq->errorCodeValid && + ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReq->errorCodeValid); + ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u); + + request.frameNumber++; + // Empty settings should be supported after the first call + // for repeating requests. + request.settings.metadata.clear(); + request.physicalCameraSettings[0].settings.metadata.clear(); + // The buffer has been registered to HAL by bufferId, so per + // API contract we should send a null handle for this buffer + request.outputBuffers[0].buffer = NativeHandle(); + mInflightMap.clear(); + inflightReq = std::make_shared<InFlightRequest>( + static_cast<ssize_t>(physicalIds.size()), false, supportsPartialResults, + partialResultCount, physicalIds, resultQueue); + mInflightMap[request.frameNumber] = inflightReq; + } + + returnStatus = + mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock<std::mutex> l(mLock); + while (!inflightReq->errorCodeValid && + ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReq->errorCodeValid); + ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u); + } + + // Invalid physical camera id should fail process requests + frameNumber++; + camSettings[0].physicalCameraId = invalidPhysicalId; + camSettings[0].settings.metadata = settings; + + request.physicalCameraSettings = camSettings; // Invalid camera settings + returnStatus = + mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed); + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), + returnStatus.getServiceSpecificError()); + + defaultPreviewSettings.unlock(settingsBuffer); + filteredSettings.unlock(filteredSettingsBuffer); + + if (useHalBufManager) { + std::vector<int32_t> streamIds(halStreams.size()); + for (size_t i = 0; i < streamIds.size(); i++) { + streamIds[i] = halStreams[i].id; + } + verifyBuffersReturned(mSession, streamIds, cb); + } + + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// Generate and verify an ultra high resolution capture request +TEST_P(CameraAidlTest, processUltraHighResolutionRequest) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + int64_t bufferId = 1; + int32_t frameNumber = 1; + CameraMetadata settings; + + for (const auto& name : cameraDeviceNames) { + std::string version, deviceId; + ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId)); + CameraMetadata meta; + + std::shared_ptr<ICameraDevice> unusedDevice; + openEmptyDeviceSession(name, mProvider, &mSession, &meta, &unusedDevice); + camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data()); + if (!isUltraHighResolution(staticMeta)) { + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + CameraMetadata req; + android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings; + ndk::ScopedAStatus ret = + mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req); + ASSERT_TRUE(ret.isOk()); + + const camera_metadata_t* metadata = + reinterpret_cast<const camera_metadata_t*>(req.metadata.data()); + size_t expectedSize = req.metadata.size(); + int result = validate_camera_metadata_structure(metadata, &expectedSize); + ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED)); + + size_t entryCount = get_camera_metadata_entry_count(metadata); + ASSERT_GT(entryCount, 0u); + defaultSettings = metadata; + uint8_t sensorPixelMode = + static_cast<uint8_t>(ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION); + ASSERT_EQ(::android::OK, + defaultSettings.update(ANDROID_SENSOR_PIXEL_MODE, &sensorPixelMode, 1)); + + const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock(); + uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer; + settings.metadata = std::vector( + rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer)); + overrideRotateAndCrop(&settings); + + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + + std::vector<HalStream> halStreams; + bool supportsPartialResults = false; + bool useHalBufManager = false; + int32_t partialResultCount = 0; + Stream previewStream; + std::shared_ptr<DeviceCb> cb; + + std::list<PixelFormat> pixelFormats = {PixelFormat::YCBCR_420_888, PixelFormat::RAW16}; + for (PixelFormat format : pixelFormats) { + configureStreams(name, mProvider, format, &mSession, &previewStream, &halStreams, + &supportsPartialResults, &partialResultCount, &useHalBufManager, &cb, + 0, /*maxResolution*/ true); + ASSERT_NE(mSession, nullptr); + + ::aidl::android::hardware::common::fmq::MQDescriptor< + int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite> + descriptor; + auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor); + ASSERT_TRUE(resultQueueRet.isOk()); + + std::shared_ptr<ResultMetadataQueue> resultQueue = + std::make_shared<ResultMetadataQueue>(descriptor); + if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + + std::vector<buffer_handle_t> graphicBuffers; + graphicBuffers.reserve(halStreams.size()); + std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>( + static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults, + partialResultCount, std::unordered_set<std::string>(), resultQueue); + + std::vector<CaptureRequest> requests(1); + CaptureRequest& request = requests[0]; + std::vector<StreamBuffer>& outputBuffers = request.outputBuffers; + outputBuffers.resize(halStreams.size()); + + size_t k = 0; + for (const auto& halStream : halStreams) { + buffer_handle_t buffer_handle; + if (useHalBufManager) { + outputBuffers[k] = {halStream.id, 0, + NativeHandle(), BufferStatus::OK, + NativeHandle(), NativeHandle()}; + } else { + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage( + static_cast<uint64_t>(halStream.producerUsage), + static_cast<uint64_t>(halStream.consumerUsage)), + halStream.overrideFormat, &buffer_handle); + graphicBuffers.push_back(buffer_handle); + outputBuffers[k] = { + halStream.id, bufferId, ::android::makeToAidl(buffer_handle), + BufferStatus::OK, NativeHandle(), NativeHandle()}; + bufferId++; + } + k++; + } + + request.inputBuffer = { + -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()}; + request.frameNumber = frameNumber; + request.fmqSettingsSize = 0; + request.settings = settings; + request.inputWidth = 0; + request.inputHeight = 0; + + { + std::unique_lock<std::mutex> l(mLock); + mInflightMap.clear(); + mInflightMap[frameNumber] = inflightReq; + } + + int32_t numRequestProcessed = 0; + std::vector<BufferCache> cachesToRemove; + ndk::ScopedAStatus returnStatus = + mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock<std::mutex> l(mLock); + while (!inflightReq->errorCodeValid && + ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReq->errorCodeValid); + ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u); + } + if (useHalBufManager) { + std::vector<int32_t> streamIds(halStreams.size()); + for (size_t i = 0; i < streamIds.size(); i++) { + streamIds[i] = halStreams[i].id; + } + verifyBuffersReturned(mSession, streamIds, cb); + } + + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } + } +} + +// Generate and verify 10-bit dynamic range request +TEST_P(CameraAidlTest, process10BitDynamicRangeRequest) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + int64_t bufferId = 1; + int32_t frameNumber = 1; + CameraMetadata settings; + + for (const auto& name : cameraDeviceNames) { + std::string version, deviceId; + ASSERT_TRUE(matchDeviceName(name, mProviderType, &version, &deviceId)); + CameraMetadata meta; + std::shared_ptr<ICameraDevice> device; + openEmptyDeviceSession(name, mProvider, &mSession, &meta, &device); + camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data()); + if (!is10BitDynamicRangeCapable(staticMeta)) { + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + std::vector< + aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap> + profileList; + get10BitDynamicRangeProfiles(staticMeta, &profileList); + ASSERT_FALSE(profileList.empty()); + + CameraMetadata req; + android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings; + ndk::ScopedAStatus ret = + mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, &req); + ASSERT_TRUE(ret.isOk()); + + const camera_metadata_t* metadata = + reinterpret_cast<const camera_metadata_t*>(req.metadata.data()); + size_t expectedSize = req.metadata.size(); + int result = validate_camera_metadata_structure(metadata, &expectedSize); + ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED)); + + size_t entryCount = get_camera_metadata_entry_count(metadata); + ASSERT_GT(entryCount, 0u); + defaultSettings = metadata; + + const camera_metadata_t* settingsBuffer = defaultSettings.getAndLock(); + uint8_t* rawSettingsBuffer = (uint8_t*)settingsBuffer; + settings.metadata = std::vector( + rawSettingsBuffer, rawSettingsBuffer + get_camera_metadata_size(settingsBuffer)); + overrideRotateAndCrop(&settings); + + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + + std::vector<HalStream> halStreams; + bool supportsPartialResults = false; + bool useHalBufManager = false; + int32_t partialResultCount = 0; + Stream previewStream; + std::shared_ptr<DeviceCb> cb; + for (const auto& profile : profileList) { + configureStreams(name, mProvider, PixelFormat::IMPLEMENTATION_DEFINED, &mSession, + &previewStream, &halStreams, &supportsPartialResults, + &partialResultCount, &useHalBufManager, &cb, 0, + /*maxResolution*/ false, profile); + ASSERT_NE(mSession, nullptr); + + ::aidl::android::hardware::common::fmq::MQDescriptor< + int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite> + descriptor; + auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor); + ASSERT_TRUE(resultQueueRet.isOk()); + + std::shared_ptr<ResultMetadataQueue> resultQueue = + std::make_shared<ResultMetadataQueue>(descriptor); + if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + + std::vector<buffer_handle_t> graphicBuffers; + graphicBuffers.reserve(halStreams.size()); + + std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>( + static_cast<ssize_t>(halStreams.size()), false, supportsPartialResults, + partialResultCount, std::unordered_set<std::string>(), resultQueue); + + std::vector<CaptureRequest> requests(1); + CaptureRequest& request = requests[0]; + std::vector<StreamBuffer>& outputBuffers = request.outputBuffers; + outputBuffers.resize(halStreams.size()); + + size_t k = 0; + for (const auto& halStream : halStreams) { + buffer_handle_t buffer_handle; + if (useHalBufManager) { + outputBuffers[k] = {halStream.id, 0, + NativeHandle(), BufferStatus::OK, + NativeHandle(), NativeHandle()}; + } else { + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage( + static_cast<uint64_t>(halStream.producerUsage), + static_cast<uint64_t>(halStream.consumerUsage)), + halStream.overrideFormat, &buffer_handle); + + graphicBuffers.push_back(buffer_handle); + outputBuffers[k] = { + halStream.id, bufferId, android::makeToAidl(buffer_handle), + BufferStatus::OK, NativeHandle(), NativeHandle()}; + bufferId++; + } + k++; + } + + request.inputBuffer = { + -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()}; + request.frameNumber = frameNumber; + request.fmqSettingsSize = 0; + request.settings = settings; + request.inputWidth = 0; + request.inputHeight = 0; + + { + std::unique_lock<std::mutex> l(mLock); + mInflightMap.clear(); + mInflightMap[frameNumber] = inflightReq; + } + + int32_t numRequestProcessed = 0; + std::vector<BufferCache> cachesToRemove; + ndk::ScopedAStatus returnStatus = + mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock<std::mutex> l(mLock); + while (!inflightReq->errorCodeValid && + ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReq->errorCodeValid); + ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u); + verify10BitMetadata(mHandleImporter, *inflightReq, profile); + } + if (useHalBufManager) { + std::vector<int32_t> streamIds(halStreams.size()); + for (size_t i = 0; i < streamIds.size(); i++) { + streamIds[i] = halStreams[i].id; + } + mSession->signalStreamFlush(streamIds, /*streamConfigCounter*/ 0); + cb->waitForBuffersReturned(); + } + + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } + } +} + +// Generate and verify a burst containing alternating sensor sensitivity values +TEST_P(CameraAidlTest, processCaptureRequestBurstISO) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)}; + int64_t bufferId = 1; + int32_t frameNumber = 1; + float isoTol = .03f; + CameraMetadata settings; + + for (const auto& name : cameraDeviceNames) { + CameraMetadata meta; + settings.metadata.clear(); + std::shared_ptr<ICameraDevice> unusedDevice; + openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, + &unusedDevice /*out*/); + camera_metadata_t* staticMetaBuffer = + clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(meta.metadata.data())); + ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta( + staticMetaBuffer); + + camera_metadata_entry_t hwLevel = staticMeta.find(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL); + ASSERT_TRUE(0 < hwLevel.count); + if (ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED == hwLevel.data.u8[0] || + ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL == hwLevel.data.u8[0]) { + // Limited/External devices can skip this test + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + + camera_metadata_entry_t isoRange = staticMeta.find(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE); + ASSERT_EQ(isoRange.count, 2u); + + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + + bool supportsPartialResults = false; + bool useHalBufManager = false; + int32_t partialResultCount = 0; + Stream previewStream; + std::vector<HalStream> halStreams; + std::shared_ptr<DeviceCb> cb; + configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/, + &previewStream /*out*/, &halStreams /*out*/, + &supportsPartialResults /*out*/, &partialResultCount /*out*/, + &useHalBufManager /*out*/, &cb /*out*/); + + ::aidl::android::hardware::common::fmq::MQDescriptor< + int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite> + descriptor; + auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor); + std::shared_ptr<ResultMetadataQueue> resultQueue = + std::make_shared<ResultMetadataQueue>(descriptor); + ASSERT_TRUE(resultQueueRet.isOk()); + if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + + ret = mSession->constructDefaultRequestSettings(RequestTemplate::PREVIEW, &settings); + ASSERT_TRUE(ret.isOk()); + + ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta; + std::vector<CaptureRequest> requests(kBurstFrameCount); + std::vector<buffer_handle_t> buffers(kBurstFrameCount); + std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount); + std::vector<int32_t> isoValues(kBurstFrameCount); + std::vector<CameraMetadata> requestSettings(kBurstFrameCount); + + for (int32_t i = 0; i < kBurstFrameCount; i++) { + std::unique_lock<std::mutex> l(mLock); + CaptureRequest& request = requests[i]; + std::vector<StreamBuffer>& outputBuffers = request.outputBuffers; + outputBuffers.resize(1); + StreamBuffer& outputBuffer = outputBuffers[0]; + + isoValues[i] = ((i % 2) == 0) ? isoRange.data.i32[0] : isoRange.data.i32[1]; + if (useHalBufManager) { + outputBuffer = {halStreams[0].id, 0, + NativeHandle(), BufferStatus::OK, + NativeHandle(), NativeHandle()}; + } else { + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage( + static_cast<uint64_t>(halStreams[0].producerUsage), + static_cast<uint64_t>(halStreams[0].consumerUsage)), + halStreams[0].overrideFormat, &buffers[i]); + outputBuffer = {halStreams[0].id, bufferId + i, ::android::makeToAidl(buffers[i]), + BufferStatus::OK, NativeHandle(), NativeHandle()}; + } + + requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data())); + + // Disable all 3A routines + uint8_t mode = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF); + ASSERT_EQ(::android::OK, requestMeta.update(ANDROID_CONTROL_MODE, &mode, 1)); + ASSERT_EQ(::android::OK, + requestMeta.update(ANDROID_SENSOR_SENSITIVITY, &isoValues[i], 1)); + camera_metadata_t* metaBuffer = requestMeta.release(); + uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer); + requestSettings[i].metadata = std::vector( + rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer)); + overrideRotateAndCrop(&(requestSettings[i])); + + request.frameNumber = frameNumber + i; + request.fmqSettingsSize = 0; + request.settings = requestSettings[i]; + request.inputBuffer = { + -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()}; + + inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults, + partialResultCount, resultQueue); + mInflightMap[frameNumber + i] = inflightReqs[i]; + } + + int32_t numRequestProcessed = 0; + std::vector<BufferCache> cachesToRemove; + + ndk::ScopedAStatus returnStatus = + mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(numRequestProcessed, kBurstFrameCount); + + for (size_t i = 0; i < kBurstFrameCount; i++) { + std::unique_lock<std::mutex> l(mLock); + while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) || + (!inflightReqs[i]->haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReqs[i]->errorCodeValid); + ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u); + ASSERT_EQ(previewStream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId); + ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty()); + ASSERT_TRUE(inflightReqs[i]->collectedResult.exists(ANDROID_SENSOR_SENSITIVITY)); + camera_metadata_entry_t isoResult = + inflightReqs[i]->collectedResult.find(ANDROID_SENSOR_SENSITIVITY); + ASSERT_TRUE(std::abs(isoResult.data.i32[0] - isoValues[i]) <= + std::round(isoValues[i] * isoTol)); + } + + if (useHalBufManager) { + verifyBuffersReturned(mSession, previewStream.id, cb); + } + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// Test whether an incorrect capture request with missing settings will +// be reported correctly. +TEST_P(CameraAidlTest, processCaptureRequestInvalidSinglePreview) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector<AvailableStream> outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)}; + int64_t bufferId = 1; + int32_t frameNumber = 1; + CameraMetadata settings; + + for (const auto& name : cameraDeviceNames) { + Stream previewStream; + std::vector<HalStream> halStreams; + std::shared_ptr<DeviceCb> cb; + bool supportsPartialResults = false; + bool useHalBufManager = false; + int32_t partialResultCount = 0; + configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/, + &previewStream /*out*/, &halStreams /*out*/, + &supportsPartialResults /*out*/, &partialResultCount /*out*/, + &useHalBufManager /*out*/, &cb /*out*/); + ASSERT_NE(mSession, nullptr); + ASSERT_FALSE(halStreams.empty()); + + buffer_handle_t buffer_handle = nullptr; + + if (useHalBufManager) { + bufferId = 0; + } else { + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage( + static_cast<uint64_t>(halStreams[0].producerUsage), + static_cast<uint64_t>(halStreams[0].consumerUsage)), + halStreams[0].overrideFormat, &buffer_handle); + } + + std::vector<CaptureRequest> requests(1); + CaptureRequest& request = requests[0]; + std::vector<StreamBuffer>& outputBuffers = request.outputBuffers; + outputBuffers.resize(1); + StreamBuffer& outputBuffer = outputBuffers[0]; + + outputBuffer = { + halStreams[0].id, + bufferId, + buffer_handle == nullptr ? NativeHandle() : ::android::makeToAidl(buffer_handle), + BufferStatus::OK, + NativeHandle(), + NativeHandle()}; + + request.inputBuffer = { + -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()}; + request.frameNumber = frameNumber; + request.fmqSettingsSize = 0; + request.settings = settings; + + // Settings were not correctly initialized, we should fail here + int32_t numRequestProcessed = 0; + std::vector<BufferCache> cachesToRemove; + ndk::ScopedAStatus ret = + mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed); + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError()); + ASSERT_EQ(numRequestProcessed, 0u); + + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// Verify camera offline session behavior +TEST_P(CameraAidlTest, switchToOffline) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + AvailableStream threshold = {kMaxStillWidth, kMaxStillHeight, + static_cast<int32_t>(PixelFormat::BLOB)}; + int64_t bufferId = 1; + int32_t frameNumber = 1; + CameraMetadata settings; + + for (const auto& name : cameraDeviceNames) { + CameraMetadata meta; + { + std::shared_ptr<ICameraDevice> unusedDevice; + openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, + &unusedDevice); + camera_metadata_t* staticMetaBuffer = clone_camera_metadata( + reinterpret_cast<camera_metadata_t*>(meta.metadata.data())); + ::android::hardware::camera::common::V1_0::helper::CameraMetadata staticMeta( + staticMetaBuffer); + + if (isOfflineSessionSupported(staticMetaBuffer) != Status::OK) { + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } + + bool supportsPartialResults = false; + int32_t partialResultCount = 0; + Stream stream; + std::vector<HalStream> halStreams; + std::shared_ptr<DeviceCb> cb; + int32_t jpegBufferSize; + bool useHalBufManager; + configureOfflineStillStream(name, mProvider, &threshold, &mSession /*out*/, &stream /*out*/, + &halStreams /*out*/, &supportsPartialResults /*out*/, + &partialResultCount /*out*/, &cb /*out*/, + &jpegBufferSize /*out*/, &useHalBufManager /*out*/); + + auto ret = mSession->constructDefaultRequestSettings(RequestTemplate::STILL_CAPTURE, + &settings); + ASSERT_TRUE(ret.isOk()); + + ::aidl::android::hardware::common::fmq::MQDescriptor< + int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite> + descriptor; + + ndk::ScopedAStatus resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor); + ASSERT_TRUE(resultQueueRet.isOk()); + std::shared_ptr<ResultMetadataQueue> resultQueue = + std::make_shared<ResultMetadataQueue>(descriptor); + if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + + ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta; + + std::vector<buffer_handle_t> buffers(kBurstFrameCount); + std::vector<std::shared_ptr<InFlightRequest>> inflightReqs(kBurstFrameCount); + std::vector<CameraMetadata> requestSettings(kBurstFrameCount); + + std::vector<CaptureRequest> requests(kBurstFrameCount); + + HalStream halStream = halStreams[0]; + for (uint32_t i = 0; i < kBurstFrameCount; i++) { + CaptureRequest& request = requests[i]; + std::vector<StreamBuffer>& outputBuffers = request.outputBuffers; + outputBuffers.resize(1); + StreamBuffer& outputBuffer = outputBuffers[0]; + + std::unique_lock<std::mutex> l(mLock); + if (useHalBufManager) { + outputBuffer = {halStream.id, 0, NativeHandle(), BufferStatus::OK, NativeHandle(), + NativeHandle()}; + } else { + // jpeg buffer (w,h) = (blobLen, 1) + allocateGraphicBuffer(jpegBufferSize, /*height*/ 1, + android_convertGralloc1To0Usage( + static_cast<uint64_t>(halStream.producerUsage), + static_cast<uint64_t>(halStream.consumerUsage)), + halStream.overrideFormat, &buffers[i]); + outputBuffer = {halStream.id, bufferId + i, ::android::makeToAidl(buffers[i]), + BufferStatus::OK, NativeHandle(), NativeHandle()}; + } + + requestMeta.clear(); + requestMeta.append(reinterpret_cast<camera_metadata_t*>(settings.metadata.data())); + + camera_metadata_t* metaBuffer = requestMeta.release(); + uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer); + requestSettings[i].metadata = std::vector( + rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer)); + overrideRotateAndCrop(&requestSettings[i]); + + request.frameNumber = frameNumber + i; + request.fmqSettingsSize = 0; + request.settings = requestSettings[i]; + request.inputBuffer = {/*streamId*/ -1, + /*bufferId*/ 0, NativeHandle(), + BufferStatus::ERROR, NativeHandle(), + NativeHandle()}; + + inflightReqs[i] = std::make_shared<InFlightRequest>(1, false, supportsPartialResults, + partialResultCount, resultQueue); + mInflightMap[frameNumber + i] = inflightReqs[i]; + } + + int32_t numRequestProcessed = 0; + std::vector<BufferCache> cachesToRemove; + + ndk::ScopedAStatus returnStatus = + mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed); + ASSERT_TRUE(returnStatus.isOk()); + ASSERT_EQ(numRequestProcessed, kBurstFrameCount); + + std::vector<int32_t> offlineStreamIds = {halStream.id}; + CameraOfflineSessionInfo offlineSessionInfo; + std::shared_ptr<ICameraOfflineSession> offlineSession; + returnStatus = + mSession->switchToOffline(offlineStreamIds, &offlineSessionInfo, &offlineSession); + + if (!halStreams[0].supportOffline) { + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), + returnStatus.getServiceSpecificError()); + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + + ASSERT_TRUE(returnStatus.isOk()); + // Hal might be unable to find any requests qualified for offline mode. + if (offlineSession == nullptr) { + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + + ASSERT_EQ(offlineSessionInfo.offlineStreams.size(), 1u); + ASSERT_EQ(offlineSessionInfo.offlineStreams[0].id, halStream.id); + ASSERT_NE(offlineSessionInfo.offlineRequests.size(), 0u); + + // close device session to make sure offline session does not rely on it + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + + ::aidl::android::hardware::common::fmq::MQDescriptor< + int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite> + offlineResultDescriptor; + + auto offlineResultQueueRet = + offlineSession->getCaptureResultMetadataQueue(&offlineResultDescriptor); + std::shared_ptr<ResultMetadataQueue> offlineResultQueue = + std::make_shared<ResultMetadataQueue>(descriptor); + if (!offlineResultQueue->isValid() || offlineResultQueue->availableToWrite() <= 0) { + ALOGE("%s: offline session returns empty result metadata fmq, not use it", __func__); + offlineResultQueue = nullptr; + // Don't use the queue onwards. + } + ASSERT_TRUE(offlineResultQueueRet.isOk()); + + updateInflightResultQueue(offlineResultQueue); + + ret = offlineSession->setCallback(cb); + ASSERT_TRUE(ret.isOk()); + + for (size_t i = 0; i < kBurstFrameCount; i++) { + std::unique_lock<std::mutex> l(mLock); + while (!inflightReqs[i]->errorCodeValid && ((0 < inflightReqs[i]->numBuffersLeft) || + (!inflightReqs[i]->haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReqs[i]->errorCodeValid); + ASSERT_NE(inflightReqs[i]->resultOutputBuffers.size(), 0u); + ASSERT_EQ(stream.id, inflightReqs[i]->resultOutputBuffers[0].buffer.streamId); + ASSERT_FALSE(inflightReqs[i]->collectedResult.isEmpty()); + } + + ret = offlineSession->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Check whether an invalid capture request with missing output buffers +// will be reported correctly. +TEST_P(CameraAidlTest, processCaptureRequestInvalidBuffer) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector<AvailableStream> outputBlobStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)}; + int32_t frameNumber = 1; + CameraMetadata settings; + + for (const auto& name : cameraDeviceNames) { + Stream previewStream; + std::vector<HalStream> halStreams; + std::shared_ptr<DeviceCb> cb; + bool supportsPartialResults = false; + bool useHalBufManager = false; + int32_t partialResultCount = 0; + configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/, + &previewStream /*out*/, &halStreams /*out*/, + &supportsPartialResults /*out*/, &partialResultCount /*out*/, + &useHalBufManager /*out*/, &cb /*out*/); + + RequestTemplate reqTemplate = RequestTemplate::PREVIEW; + ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings); + ASSERT_TRUE(ret.isOk()); + overrideRotateAndCrop(&settings); + + std::vector<CaptureRequest> requests(1); + CaptureRequest& request = requests[0]; + std::vector<StreamBuffer>& outputBuffers = request.outputBuffers; + outputBuffers.resize(1); + // Empty output buffer + outputBuffers[0] = { + -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()}; + + request.inputBuffer = { + -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()}; + request.frameNumber = frameNumber; + request.fmqSettingsSize = 0; + request.settings = settings; + + // Output buffers are missing, we should fail here + int32_t numRequestProcessed = 0; + std::vector<BufferCache> cachesToRemove; + ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed); + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), ret.getServiceSpecificError()); + ASSERT_EQ(numRequestProcessed, 0u); + + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// Generate, trigger and flush a preview request +TEST_P(CameraAidlTest, flushPreviewRequest) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector<AvailableStream> outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)}; + int64_t bufferId = 1; + int32_t frameNumber = 1; + CameraMetadata settings; + + for (const auto& name : cameraDeviceNames) { + Stream previewStream; + std::vector<HalStream> halStreams; + std::shared_ptr<DeviceCb> cb; + bool supportsPartialResults = false; + bool useHalBufManager = false; + int32_t partialResultCount = 0; + + configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/, + &previewStream /*out*/, &halStreams /*out*/, + &supportsPartialResults /*out*/, &partialResultCount /*out*/, + &useHalBufManager /*out*/, &cb /*out*/); + + ASSERT_NE(mSession, nullptr); + ASSERT_NE(cb, nullptr); + ASSERT_FALSE(halStreams.empty()); + + ::aidl::android::hardware::common::fmq::MQDescriptor< + int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite> + descriptor; + + auto resultQueueRet = mSession->getCaptureResultMetadataQueue(&descriptor); + std::shared_ptr<ResultMetadataQueue> resultQueue = + std::make_shared<ResultMetadataQueue>(descriptor); + ASSERT_TRUE(resultQueueRet.isOk()); + if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq, not use it", __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + + std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>( + 1, false, supportsPartialResults, partialResultCount, resultQueue); + RequestTemplate reqTemplate = RequestTemplate::PREVIEW; + + ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &settings); + ASSERT_TRUE(ret.isOk()); + overrideRotateAndCrop(&settings); + + buffer_handle_t buffer_handle; + std::vector<CaptureRequest> requests(1); + CaptureRequest& request = requests[0]; + std::vector<StreamBuffer>& outputBuffers = request.outputBuffers; + outputBuffers.resize(1); + StreamBuffer& outputBuffer = outputBuffers[0]; + if (useHalBufManager) { + bufferId = 0; + outputBuffer = {halStreams[0].id, bufferId, NativeHandle(), + BufferStatus::OK, NativeHandle(), NativeHandle()}; + } else { + allocateGraphicBuffer(previewStream.width, previewStream.height, + android_convertGralloc1To0Usage( + static_cast<uint64_t>(halStreams[0].producerUsage), + static_cast<uint64_t>(halStreams[0].consumerUsage)), + halStreams[0].overrideFormat, &buffer_handle); + outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle), + BufferStatus::OK, NativeHandle(), NativeHandle()}; + } + + request.frameNumber = frameNumber; + request.fmqSettingsSize = 0; + request.settings = settings; + request.inputBuffer = { + -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()}; + + { + std::unique_lock<std::mutex> l(mLock); + mInflightMap.clear(); + mInflightMap[frameNumber] = inflightReq; + } + + int32_t numRequestProcessed = 0; + std::vector<BufferCache> cachesToRemove; + ret = mSession->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(numRequestProcessed, 1u); + + // Flush before waiting for request to complete. + ndk::ScopedAStatus returnStatus = mSession->flush(); + ASSERT_TRUE(returnStatus.isOk()); + + { + std::unique_lock<std::mutex> l(mLock); + while (!inflightReq->errorCodeValid && + ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + if (!inflightReq->errorCodeValid) { + ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u); + ASSERT_EQ(previewStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId); + } else { + switch (inflightReq->errorCode) { + case ErrorCode::ERROR_REQUEST: + case ErrorCode::ERROR_RESULT: + case ErrorCode::ERROR_BUFFER: + // Expected + break; + case ErrorCode::ERROR_DEVICE: + default: + FAIL() << "Unexpected error:" + << static_cast<uint32_t>(inflightReq->errorCode); + } + } + } + + if (useHalBufManager) { + verifyBuffersReturned(mSession, previewStream.id, cb); + } + + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// Verify that camera flushes correctly without any pending requests. +TEST_P(CameraAidlTest, flushEmpty) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector<AvailableStream> outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)}; + + for (const auto& name : cameraDeviceNames) { + Stream previewStream; + std::vector<HalStream> halStreams; + std::shared_ptr<DeviceCb> cb; + bool supportsPartialResults = false; + bool useHalBufManager = false; + + int32_t partialResultCount = 0; + configurePreviewStream(name, mProvider, &previewThreshold, &mSession /*out*/, + &previewStream /*out*/, &halStreams /*out*/, + &supportsPartialResults /*out*/, &partialResultCount /*out*/, + &useHalBufManager /*out*/, &cb /*out*/); + + ndk::ScopedAStatus returnStatus = mSession->flush(); + ASSERT_TRUE(returnStatus.isOk()); + + { + std::unique_lock<std::mutex> l(mLock); + auto timeout = std::chrono::system_clock::now() + + std::chrono::milliseconds(kEmptyFlushTimeoutMSec); + ASSERT_EQ(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +// Test camera provider notify method +TEST_P(CameraAidlTest, providerDeviceStateNotification) { + notifyDeviceState(ICameraProvider::DEVICE_STATE_BACK_COVERED); + notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL); +} + +// Verify that all supported stream formats and sizes can be configured +// successfully for injection camera. +TEST_P(CameraAidlTest, configureInjectionStreamsAvailableOutputs) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector<AvailableStream> outputStreams; + + for (const auto& name : cameraDeviceNames) { + CameraMetadata metadata; + + std::shared_ptr<ICameraInjectionSession> injectionSession; + std::shared_ptr<ICameraDevice> unusedDevice; + openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/, + &unusedDevice /*out*/); + if (injectionSession == nullptr) { + continue; + } + + camera_metadata_t* staticMetaBuffer = + reinterpret_cast<camera_metadata_t*>(metadata.metadata.data()); + CameraMetadata chars; + chars.metadata = metadata.metadata; + + outputStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams)); + ASSERT_NE(0u, outputStreams.size()); + + int32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + int32_t streamConfigCounter = 0; + for (auto& it : outputStreams) { + Dataspace dataspace = getDataspace(static_cast<PixelFormat>(it.format)); + Stream stream = {streamId, + StreamType::OUTPUT, + it.width, + it.height, + static_cast<PixelFormat>(it.format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + dataspace, + StreamRotation::ROTATION_0, + std::string(), + jpegBufferSize, + 0, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + + std::vector<Stream> streams = {stream}; + StreamConfiguration config; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + + config.streamConfigCounter = streamConfigCounter++; + ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars); + ASSERT_TRUE(s.isOk()); + streamId++; + } + + std::shared_ptr<ICameraDeviceSession> session; + ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(session, nullptr); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Check for correct handling of invalid/incorrect configuration parameters for injection camera. +TEST_P(CameraAidlTest, configureInjectionStreamsInvalidOutputs) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector<AvailableStream> outputStreams; + + for (const auto& name : cameraDeviceNames) { + CameraMetadata metadata; + std::shared_ptr<ICameraInjectionSession> injectionSession; + std::shared_ptr<ICameraDevice> unusedDevice; + openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/, + &unusedDevice); + if (injectionSession == nullptr) { + continue; + } + + camera_metadata_t* staticMetaBuffer = + reinterpret_cast<camera_metadata_t*>(metadata.metadata.data()); + std::shared_ptr<ICameraDeviceSession> session; + ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(session, nullptr); + + CameraMetadata chars; + chars.metadata = metadata.metadata; + + outputStreams.clear(); + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputStreams)); + ASSERT_NE(0u, outputStreams.size()); + + int32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMetaBuffer, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + int32_t streamId = 0; + Stream stream = {streamId++, + StreamType::OUTPUT, + 0, + 0, + static_cast<PixelFormat>(outputStreams[0].format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + jpegBufferSize, + 0, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + + int32_t streamConfigCounter = 0; + std::vector<Stream> streams = {stream}; + StreamConfiguration config; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + + config.streamConfigCounter = streamConfigCounter++; + ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars); + ASSERT_TRUE( + (static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == s.getServiceSpecificError()) || + (static_cast<int32_t>(Status::INTERNAL_ERROR) == s.getServiceSpecificError())); + + stream = {streamId++, + StreamType::OUTPUT, + INT32_MAX, + INT32_MAX, + static_cast<PixelFormat>(outputStreams[0].format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + jpegBufferSize, + 0, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + config.streamConfigCounter = streamConfigCounter++; + s = injectionSession->configureInjectionStreams(config, chars); + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError()); + + for (auto& it : outputStreams) { + stream = {streamId++, + StreamType::OUTPUT, + it.width, + it.height, + static_cast<PixelFormat>(INT32_MAX), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + jpegBufferSize, + 0, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + config.streamConfigCounter = streamConfigCounter++; + s = injectionSession->configureInjectionStreams(config, chars); + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError()); + + stream = {streamId++, + StreamType::OUTPUT, + it.width, + it.height, + static_cast<PixelFormat>(it.format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + Dataspace::UNKNOWN, + static_cast<StreamRotation>(INT32_MAX), + std::string(), + jpegBufferSize, + 0, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + streams[0] = stream; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + config.streamConfigCounter = streamConfigCounter++; + s = injectionSession->configureInjectionStreams(config, chars); + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), s.getServiceSpecificError()); + } + + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Check whether session parameters are supported for injection camera. If Hal support for them +// exist, then try to configure a preview stream using them. +TEST_P(CameraAidlTest, configureInjectionStreamsWithSessionParameters) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + std::vector<AvailableStream> outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)}; + + for (const auto& name : cameraDeviceNames) { + CameraMetadata metadata; + std::shared_ptr<ICameraInjectionSession> injectionSession; + std::shared_ptr<ICameraDevice> unusedDevice; + openEmptyInjectionSession(name, mProvider, &injectionSession /*out*/, &metadata /*out*/, + &unusedDevice /*out*/); + if (injectionSession == nullptr) { + continue; + } + + std::shared_ptr<ICameraDeviceSession> session; + ndk::ScopedAStatus ret = injectionSession->getCameraDeviceSession(&session); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(session, nullptr); + + camera_metadata_t* staticMetaBuffer = + reinterpret_cast<camera_metadata_t*>(metadata.metadata.data()); + CameraMetadata chars; + chars.metadata = metadata.metadata; + + std::unordered_set<int32_t> availableSessionKeys; + Status rc = getSupportedKeys(staticMetaBuffer, ANDROID_REQUEST_AVAILABLE_SESSION_KEYS, + &availableSessionKeys); + ASSERT_EQ(Status::OK, rc); + if (availableSessionKeys.empty()) { + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + android::hardware::camera::common::V1_0::helper::CameraMetadata previewRequestSettings; + android::hardware::camera::common::V1_0::helper::CameraMetadata sessionParams, + modifiedSessionParams; + constructFilteredSettings(session, availableSessionKeys, RequestTemplate::PREVIEW, + &previewRequestSettings, &sessionParams); + if (sessionParams.isEmpty()) { + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + continue; + } + + outputPreviewStreams.clear(); + + ASSERT_EQ(Status::OK, getAvailableOutputStreams(staticMetaBuffer, outputPreviewStreams, + &previewThreshold)); + ASSERT_NE(0u, outputPreviewStreams.size()); + + Stream previewStream = { + 0, + StreamType::OUTPUT, + outputPreviewStreams[0].width, + outputPreviewStreams[0].height, + static_cast<PixelFormat>(outputPreviewStreams[0].format), + static_cast<::aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + 0, + -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + std::vector<Stream> streams = {previewStream}; + StreamConfiguration config; + config.streams = streams; + config.operationMode = StreamConfigurationMode::NORMAL_MODE; + + modifiedSessionParams = sessionParams; + camera_metadata_t* sessionParamsBuffer = sessionParams.release(); + uint8_t* rawSessionParamsBuffer = reinterpret_cast<uint8_t*>(sessionParamsBuffer); + config.sessionParams.metadata = + std::vector(rawSessionParamsBuffer, + rawSessionParamsBuffer + get_camera_metadata_size(sessionParamsBuffer)); + + config.streamConfigCounter = 0; + config.streamConfigCounter = 0; + config.multiResolutionInputImage = false; + + ndk::ScopedAStatus s = injectionSession->configureInjectionStreams(config, chars); + ASSERT_TRUE(s.isOk()); + + sessionParams.acquire(sessionParamsBuffer); + free_camera_metadata(staticMetaBuffer); + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +// Verify that valid stream use cases can be configured successfully, and invalid use cases +// fail stream configuration. +TEST_P(CameraAidlTest, configureStreamsUseCases) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + + for (const auto& name : cameraDeviceNames) { + CameraMetadata meta; + std::shared_ptr<ICameraDevice> cameraDevice; + + openEmptyDeviceSession(name, mProvider, &mSession /*out*/, &meta /*out*/, + &cameraDevice /*out*/); + + camera_metadata_t* staticMeta = reinterpret_cast<camera_metadata_t*>(meta.metadata.data()); + // Check if camera support depth only + if (isDepthOnly(staticMeta)) { + ndk::ScopedAStatus ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + continue; + } + + std::vector<AvailableStream> outputPreviewStreams; + AvailableStream previewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast<int32_t>(PixelFormat::YCBCR_420_888)}; + ASSERT_EQ(Status::OK, + getAvailableOutputStreams(staticMeta, outputPreviewStreams, &previewThreshold)); + ASSERT_NE(0u, outputPreviewStreams.size()); + + // Combine valid and invalid stream use cases + std::vector<int32_t> useCases(kMandatoryUseCases); + useCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL + 1); + + std::vector<int32_t> supportedUseCases; + camera_metadata_ro_entry entry; + auto retcode = find_camera_metadata_ro_entry( + staticMeta, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, &entry); + if ((0 == retcode) && (entry.count > 0)) { + supportedUseCases.insert(supportedUseCases.end(), entry.data.i32, + entry.data.i32 + entry.count); + } else { + supportedUseCases.push_back(ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT); + } + + std::vector<Stream> streams(1); + streams[0] = { + 0, + StreamType::OUTPUT, + outputPreviewStreams[0].width, + outputPreviewStreams[0].height, + static_cast<PixelFormat>(outputPreviewStreams[0].format), + static_cast<::aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_CPU_READ), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + std::string(), + 0, + -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}, + aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap:: + ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD}; + + int32_t streamConfigCounter = 0; + CameraMetadata req; + StreamConfiguration config; + RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE; + ndk::ScopedAStatus ret = mSession->constructDefaultRequestSettings(reqTemplate, &req); + ASSERT_TRUE(ret.isOk()); + config.sessionParams = req; + + for (int32_t useCase : useCases) { + bool useCaseSupported = std::find(supportedUseCases.begin(), supportedUseCases.end(), + useCase) != supportedUseCases.end(); + + streams[0].useCase = static_cast< + aidl::android::hardware::camera::metadata::ScalerAvailableStreamUseCases>( + useCase); + config.streams = streams; + config.operationMode = StreamConfigurationMode::NORMAL_MODE; + config.streamConfigCounter = streamConfigCounter; + config.multiResolutionInputImage = false; + + bool combSupported; + ret = cameraDevice->isStreamCombinationSupported(config, &combSupported); + ASSERT_TRUE((ret.isOk()) || (static_cast<int32_t>(Status::OPERATION_NOT_SUPPORTED) == + ret.getServiceSpecificError())); + if (ret.isOk()) { + ASSERT_EQ(combSupported, useCaseSupported); + } + ASSERT_TRUE(ret.isOk()); + + std::vector<HalStream> halStreams; + ret = mSession->configureStreams(config, &halStreams); + ALOGI("configureStreams returns status: %d", ret.getServiceSpecificError()); + if (useCaseSupported) { + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(1u, halStreams.size()); + } else { + ASSERT_EQ(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT), + ret.getServiceSpecificError()); + } + } + ret = mSession->close(); + mSession = nullptr; + ASSERT_TRUE(ret.isOk()); + } +} + +GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(CameraAidlTest); +INSTANTIATE_TEST_SUITE_P( + PerInstance, CameraAidlTest, + testing::ValuesIn(android::getAidlHalInstanceNames(ICameraProvider::descriptor)), + android::hardware::PrintInstanceNameToString);
\ No newline at end of file diff --git a/camera/provider/aidl/vts/camera_aidl_test.cpp b/camera/provider/aidl/vts/camera_aidl_test.cpp new file mode 100644 index 0000000000..d03b09763b --- /dev/null +++ b/camera/provider/aidl/vts/camera_aidl_test.cpp @@ -0,0 +1,2924 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "camera_aidl_test.h" + +#include <CameraParameters.h> +#include <HandleImporter.h> +#include <aidl/android/hardware/camera/device/ICameraDevice.h> +#include <aidl/android/hardware/camera/metadata/CameraMetadataTag.h> +#include <aidl/android/hardware/camera/metadata/SensorInfoColorFilterArrangement.h> +#include <aidl/android/hardware/camera/metadata/SensorPixelMode.h> +#include <aidl/android/hardware/camera/provider/BnCameraProviderCallback.h> +#include <aidlcommonsupport/NativeHandle.h> +#include <android/binder_manager.h> +#include <android/binder_process.h> +#include <device_cb.h> +#include <empty_device_cb.h> +#include <grallocusage/GrallocUsageConversion.h> +#include <hardware/gralloc1.h> +#include <simple_device_cb.h> +#include <ui/GraphicBufferAllocator.h> +#include <regex> +#include <typeinfo> + +using ::aidl::android::hardware::camera::common::CameraDeviceStatus; +using ::aidl::android::hardware::camera::common::TorchModeStatus; +using ::aidl::android::hardware::camera::device::CameraMetadata; +using ::aidl::android::hardware::camera::device::ICameraDevice; +using ::aidl::android::hardware::camera::device::ICameraDeviceSessionDefault; +using ::aidl::android::hardware::camera::metadata::CameraMetadataTag; +using ::aidl::android::hardware::camera::metadata::SensorInfoColorFilterArrangement; +using ::aidl::android::hardware::camera::metadata::SensorPixelMode; +using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback; +using ::aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination; +using ::aidl::android::hardware::camera::provider::ICameraProvider; +using ::aidl::android::hardware::camera::provider::ICameraProviderCallback; +using ::aidl::android::hardware::common::NativeHandle; +using ::android::hardware::camera::common::V1_0::helper::Size; +using ::ndk::ScopedAStatus; +using ::ndk::SpAIBinder; + +namespace { +bool matchDeviceName(const std::string& deviceName, const std::string& providerType, + std::string* deviceVersion, std::string* cameraId) { + // expected format: device@<major>.<minor>/<type>/<id> + std::stringstream pattern; + pattern << "device@[0-9]+\\.[0-9]+/" << providerType << "/(.+)"; + std::regex e(pattern.str()); + + std::smatch sm; + if (std::regex_match(deviceName, sm, e)) { + if (deviceVersion != nullptr) { + *deviceVersion = sm[1]; + } + if (cameraId != nullptr) { + *cameraId = sm[2]; + } + return true; + } + return false; +} + +bool parseProviderName(const std::string& serviceDescriptor, std::string* type /*out*/, + uint32_t* id /*out*/) { + if (!type || !id) { + ADD_FAILURE(); + return false; + } + + // expected format: <service_name>/<type>/<id> + std::string::size_type slashIdx1 = serviceDescriptor.find('/'); + if (slashIdx1 == std::string::npos || slashIdx1 == serviceDescriptor.size() - 1) { + ADD_FAILURE() << "Provider name does not have / separator between name, type, and id"; + return false; + } + + std::string::size_type slashIdx2 = serviceDescriptor.find('/', slashIdx1 + 1); + if (slashIdx2 == std::string::npos || slashIdx2 == serviceDescriptor.size() - 1) { + ADD_FAILURE() << "Provider name does not have / separator between type and id"; + return false; + } + + std::string typeVal = serviceDescriptor.substr(slashIdx1 + 1, slashIdx2 - slashIdx1 - 1); + + char* endPtr; + errno = 0; + long idVal = strtol(serviceDescriptor.c_str() + slashIdx2 + 1, &endPtr, 10); + if (errno != 0) { + ADD_FAILURE() << "cannot parse provider id as an integer:" << serviceDescriptor.c_str() + << strerror(errno) << errno; + return false; + } + if (endPtr != serviceDescriptor.c_str() + serviceDescriptor.size()) { + ADD_FAILURE() << "provider id has unexpected length " << serviceDescriptor.c_str(); + return false; + } + if (idVal < 0) { + ADD_FAILURE() << "id is negative: " << serviceDescriptor.c_str() << idVal; + return false; + } + + *type = typeVal; + *id = static_cast<uint32_t>(idVal); + + return true; +} + +const std::vector<int32_t> kMandatoryUseCases = { + ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT, + ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW, + ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE, + ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD, + ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL, + ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL}; +} // namespace + +void CameraAidlTest::SetUp() { + std::string serviceDescriptor = GetParam(); + ALOGI("get service with name: %s", serviceDescriptor.c_str()); + + bool success = ABinderProcess_setThreadPoolMaxThreadCount(5); + ALOGI("ABinderProcess_setThreadPoolMaxThreadCount returns %s", success ? "true" : "false"); + ASSERT_TRUE(success); + ABinderProcess_startThreadPool(); + + SpAIBinder cameraProviderBinder = + SpAIBinder(AServiceManager_getService(serviceDescriptor.c_str())); + ASSERT_NE(cameraProviderBinder.get(), nullptr); + + std::shared_ptr<ICameraProvider> cameraProvider = + ICameraProvider::fromBinder(cameraProviderBinder); + ASSERT_NE(cameraProvider.get(), nullptr); + mProvider = cameraProvider; + uint32_t id; + ASSERT_TRUE(parseProviderName(serviceDescriptor, &mProviderType, &id)); + + notifyDeviceState(ICameraProvider::DEVICE_STATE_NORMAL); +} + +void CameraAidlTest::TearDown() { + if (mSession != nullptr) { + ndk::ScopedAStatus ret = mSession->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +std::vector<std::string> CameraAidlTest::getCameraDeviceNames( + std::shared_ptr<ICameraProvider>& provider, bool addSecureOnly) { + std::vector<std::string> cameraDeviceNames; + + ScopedAStatus ret = provider->getCameraIdList(&cameraDeviceNames); + if (!ret.isOk()) { + ADD_FAILURE() << "Could not get camera id list"; + } + + // External camera devices are reported through cameraDeviceStatusChange + struct ProviderCb : public BnCameraProviderCallback { + ScopedAStatus cameraDeviceStatusChange(const std::string& devName, + CameraDeviceStatus newStatus) override { + ALOGI("camera device status callback name %s, status %d", devName.c_str(), + (int)newStatus); + if (newStatus == CameraDeviceStatus::PRESENT) { + externalCameraDeviceNames.push_back(devName); + } + return ScopedAStatus::ok(); + } + + ScopedAStatus torchModeStatusChange(const std::string&, TorchModeStatus) override { + return ScopedAStatus::ok(); + } + + ScopedAStatus physicalCameraDeviceStatusChange( + const std::string&, const std::string&, + ::aidl::android::hardware::camera::common::CameraDeviceStatus) override { + return ndk::ScopedAStatus(); + } + + std::vector<std::string> externalCameraDeviceNames; + }; + std::shared_ptr<ProviderCb> cb = ndk::SharedRefBase::make<ProviderCb>(); + auto status = mProvider->setCallback(cb); + + for (const auto& devName : cb->externalCameraDeviceNames) { + if (cameraDeviceNames.end() == + std::find(cameraDeviceNames.begin(), cameraDeviceNames.end(), devName)) { + cameraDeviceNames.push_back(devName); + } + } + + std::vector<std::string> retList; + for (auto& cameraDeviceName : cameraDeviceNames) { + bool isSecureOnlyCamera = isSecureOnly(mProvider, cameraDeviceName); + if (addSecureOnly) { + if (isSecureOnlyCamera) { + retList.emplace_back(cameraDeviceName); + } + } else if (!isSecureOnlyCamera) { + retList.emplace_back(cameraDeviceName); + } + } + return retList; +} + +bool CameraAidlTest::isSecureOnly(const std::shared_ptr<ICameraProvider>& provider, + const std::string& name) { + std::shared_ptr<ICameraDevice> cameraDevice = nullptr; + ScopedAStatus retInterface = provider->getCameraDeviceInterface(name, &cameraDevice); + if (!retInterface.isOk()) { + ADD_FAILURE() << "Failed to get camera device interface for " << name; + } + + CameraMetadata cameraCharacteristics; + ScopedAStatus retChars = cameraDevice->getCameraCharacteristics(&cameraCharacteristics); + if (!retChars.isOk()) { + ADD_FAILURE() << "Failed to get camera characteristics for device " << name; + } + + camera_metadata_t* chars = + reinterpret_cast<camera_metadata_t*>(cameraCharacteristics.metadata.data()); + + SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC; + Status retCameraKind = getSystemCameraKind(chars, &systemCameraKind); + if (retCameraKind != Status::OK) { + ADD_FAILURE() << "Failed to get camera kind for " << name; + } + + return systemCameraKind == SystemCameraKind::HIDDEN_SECURE_CAMERA; +} + +std::map<std::string, std::string> CameraAidlTest::getCameraDeviceIdToNameMap( + std::shared_ptr<ICameraProvider> provider) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(provider); + + std::map<std::string, std::string> idToNameMap; + for (auto& name : cameraDeviceNames) { + std::string version, cameraId; + if (!matchDeviceName(name, mProviderType, &version, &cameraId)) { + ADD_FAILURE(); + } + idToNameMap.insert(std::make_pair(std::string(cameraId), name)); + } + return idToNameMap; +} + +void CameraAidlTest::verifyMonochromeCameraResult( + const ::android::hardware::camera::common::V1_0::helper::CameraMetadata& metadata) { + camera_metadata_ro_entry entry; + + // Check tags that are not applicable for monochrome camera + ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_GREEN_SPLIT)); + ASSERT_FALSE(metadata.exists(ANDROID_SENSOR_NEUTRAL_COLOR_POINT)); + ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_MODE)); + ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)); + ASSERT_FALSE(metadata.exists(ANDROID_COLOR_CORRECTION_GAINS)); + + // Check dynamicBlackLevel + entry = metadata.find(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL); + if (entry.count > 0) { + ASSERT_EQ(entry.count, 4); + for (size_t i = 1; i < entry.count; i++) { + ASSERT_FLOAT_EQ(entry.data.f[i], entry.data.f[0]); + } + } + + // Check noiseProfile + entry = metadata.find(ANDROID_SENSOR_NOISE_PROFILE); + if (entry.count > 0) { + ASSERT_EQ(entry.count, 2); + } + + // Check lensShadingMap + entry = metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP); + if (entry.count > 0) { + ASSERT_EQ(entry.count % 4, 0); + for (size_t i = 0; i < entry.count / 4; i++) { + ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 1], entry.data.f[i * 4]); + ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 2], entry.data.f[i * 4]); + ASSERT_FLOAT_EQ(entry.data.f[i * 4 + 3], entry.data.f[i * 4]); + } + } + + // Check tonemapCurve + camera_metadata_ro_entry curveRed = metadata.find(ANDROID_TONEMAP_CURVE_RED); + camera_metadata_ro_entry curveGreen = metadata.find(ANDROID_TONEMAP_CURVE_GREEN); + camera_metadata_ro_entry curveBlue = metadata.find(ANDROID_TONEMAP_CURVE_BLUE); + if (curveRed.count > 0 && curveGreen.count > 0 && curveBlue.count > 0) { + ASSERT_EQ(curveRed.count, curveGreen.count); + ASSERT_EQ(curveRed.count, curveBlue.count); + for (size_t i = 0; i < curveRed.count; i++) { + ASSERT_FLOAT_EQ(curveGreen.data.f[i], curveRed.data.f[i]); + ASSERT_FLOAT_EQ(curveBlue.data.f[i], curveRed.data.f[i]); + } + } +} + +void CameraAidlTest::verifyStreamUseCaseCharacteristics(const camera_metadata_t* metadata) { + camera_metadata_ro_entry entry; + // Check capabilities + int retcode = + find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry); + bool hasStreamUseCaseCap = false; + if ((0 == retcode) && (entry.count > 0)) { + if (std::find(entry.data.u8, entry.data.u8 + entry.count, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE) != + entry.data.u8 + entry.count) { + hasStreamUseCaseCap = true; + } + } + + bool supportMandatoryUseCases = false; + retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES, + &entry); + if ((0 == retcode) && (entry.count > 0)) { + supportMandatoryUseCases = true; + for (size_t i = 0; i < kMandatoryUseCases.size(); i++) { + if (std::find(entry.data.i32, entry.data.i32 + entry.count, kMandatoryUseCases[i]) == + entry.data.i32 + entry.count) { + supportMandatoryUseCases = false; + break; + } + } + bool supportDefaultUseCase = false; + for (size_t i = 0; i < entry.count; i++) { + if (entry.data.i32[i] == ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) { + supportDefaultUseCase = true; + } + ASSERT_TRUE(entry.data.i32[i] <= ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL || + entry.data.i32[i] >= + ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START); + } + ASSERT_TRUE(supportDefaultUseCase); + } + + ASSERT_EQ(hasStreamUseCaseCap, supportMandatoryUseCases); +} + +Status CameraAidlTest::isMonochromeCamera(const camera_metadata_t* staticMeta) { + Status ret = Status::OPERATION_NOT_SUPPORTED; + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + for (size_t i = 0; i < entry.count; i++) { + if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME == entry.data.u8[i]) { + ret = Status::OK; + break; + } + } + + return ret; +} + +Status CameraAidlTest::isLogicalMultiCamera(const camera_metadata_t* staticMeta) { + Status ret = Status::OPERATION_NOT_SUPPORTED; + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + for (size_t i = 0; i < entry.count; i++) { + if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA == entry.data.u8[i]) { + ret = Status::OK; + break; + } + } + + return ret; +} + +void CameraAidlTest::verifyLogicalCameraResult(const camera_metadata_t* staticMetadata, + const std::vector<uint8_t>& resultMetadata) { + camera_metadata_t* metadata = (camera_metadata_t*)resultMetadata.data(); + + std::unordered_set<std::string> physicalIds; + Status rc = getPhysicalCameraIds(staticMetadata, &physicalIds); + ASSERT_TRUE(Status::OK == rc); + ASSERT_TRUE(physicalIds.size() > 1); + + camera_metadata_ro_entry entry; + // Check mainPhysicalId + find_camera_metadata_ro_entry(metadata, ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID, + &entry); + if (entry.count > 0) { + std::string mainPhysicalId(reinterpret_cast<const char*>(entry.data.u8)); + ASSERT_NE(physicalIds.find(mainPhysicalId), physicalIds.end()); + } else { + ADD_FAILURE() << "Get LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID failed!"; + } +} + +Status CameraAidlTest::getPhysicalCameraIds(const camera_metadata_t* staticMeta, + std::unordered_set<std::string>* physicalIds) { + if ((nullptr == staticMeta) || (nullptr == physicalIds)) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS, + &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + const uint8_t* ids = entry.data.u8; + size_t start = 0; + for (size_t i = 0; i < entry.count; i++) { + if (ids[i] == '\0') { + if (start != i) { + std::string currentId(reinterpret_cast<const char*>(ids + start)); + physicalIds->emplace(currentId); + } + start = i + 1; + } + } + + return Status::OK; +} + +Status CameraAidlTest::getSystemCameraKind(const camera_metadata_t* staticMeta, + SystemCameraKind* systemCameraKind) { + if (nullptr == staticMeta || nullptr == systemCameraKind) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry{}; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + if (entry.count == 1 && + entry.data.u8[0] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA) { + *systemCameraKind = SystemCameraKind::HIDDEN_SECURE_CAMERA; + return Status::OK; + } + + // Go through the capabilities and check if it has + // ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA + for (size_t i = 0; i < entry.count; ++i) { + uint8_t capability = entry.data.u8[i]; + if (capability == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA) { + *systemCameraKind = SystemCameraKind::SYSTEM_ONLY_CAMERA; + return Status::OK; + } + } + *systemCameraKind = SystemCameraKind::PUBLIC; + return Status::OK; +} + +void CameraAidlTest::notifyDeviceState(int64_t state) { + if (mProvider == nullptr) { + return; + } + mProvider->notifyDeviceStateChange(state); +} + +void CameraAidlTest::allocateGraphicBuffer(uint32_t width, uint32_t height, uint64_t usage, + PixelFormat format, buffer_handle_t* buffer_handle) { + ASSERT_NE(buffer_handle, nullptr); + + uint32_t stride; + + android::status_t err = android::GraphicBufferAllocator::get().allocateRawHandle( + width, height, static_cast<int32_t>(format), 1u /*layerCount*/, usage, buffer_handle, + &stride, "VtsHalCameraProviderV2"); + ASSERT_EQ(err, android::NO_ERROR); +} + +bool CameraAidlTest::matchDeviceName(const std::string& deviceName, const std::string& providerType, + std::string* deviceVersion, std::string* cameraId) { + // "device@<version>/legacy/<id>" + std::string pattern; + pattern.append("device@([0-9]+\\.[0-9]+)/"); + pattern.append(providerType); + pattern.append("/(.+)"); + + std::regex e(pattern); + std::smatch sm; + if (std::regex_match(deviceName, sm, e)) { + if (deviceVersion != nullptr) { + *deviceVersion = sm[1]; + } + if (cameraId != nullptr) { + *cameraId = sm[2]; + } + return true; + } + return false; +} + +void CameraAidlTest::verifyCameraCharacteristics(const CameraMetadata& chars) { + const camera_metadata_t* metadata = + reinterpret_cast<const camera_metadata_t*>(chars.metadata.data()); + + size_t expectedSize = chars.metadata.size(); + int result = validate_camera_metadata_structure(metadata, &expectedSize); + ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED)); + size_t entryCount = get_camera_metadata_entry_count(metadata); + // TODO: we can do better than 0 here. Need to check how many required + // characteristics keys we've defined. + ASSERT_GT(entryCount, 0u); + + camera_metadata_ro_entry entry; + int retcode = + find_camera_metadata_ro_entry(metadata, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, &entry); + if ((0 == retcode) && (entry.count > 0)) { + uint8_t hardwareLevel = entry.data.u8[0]; + ASSERT_TRUE(hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED || + hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL || + hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3 || + hardwareLevel == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL); + } else { + ADD_FAILURE() << "Get camera hardware level failed!"; + } + + entry.count = 0; + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION, &entry); + if ((0 == retcode) || (entry.count > 0)) { + ADD_FAILURE() << "ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION " + << " per API contract should never be set by Hal!"; + } + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS, &entry); + if ((0 == retcode) || (entry.count > 0)) { + ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS" + << " per API contract should never be set by Hal!"; + } + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS, &entry); + if ((0 == retcode) || (entry.count > 0)) { + ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS" + << " per API contract should never be set by Hal!"; + } + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS, &entry); + if ((0 == retcode) || (entry.count > 0)) { + ADD_FAILURE() << "ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS" + << " per API contract should never be set by Hal!"; + } + + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, &entry); + if (0 == retcode || entry.count > 0) { + ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS " + << " per API contract should never be set by Hal!"; + } + + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS, &entry); + if (0 == retcode || entry.count > 0) { + ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS " + << " per API contract should never be set by Hal!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS, + &entry); + if (0 == retcode || entry.count > 0) { + ADD_FAILURE() << "ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS " + << " per API contract should never be set by Hal!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, ANDROID_HEIC_INFO_SUPPORTED, &entry); + if (0 == retcode && entry.count > 0) { + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT, &entry); + if (0 == retcode && entry.count > 0) { + uint8_t maxJpegAppSegmentsCount = entry.data.u8[0]; + ASSERT_TRUE(maxJpegAppSegmentsCount >= 1 && maxJpegAppSegmentsCount <= 16); + } else { + ADD_FAILURE() << "Get Heic maxJpegAppSegmentsCount failed!"; + } + } + + retcode = find_camera_metadata_ro_entry(metadata, ANDROID_LENS_POSE_REFERENCE, &entry); + if (0 == retcode && entry.count > 0) { + uint8_t poseReference = entry.data.u8[0]; + ASSERT_TRUE(poseReference <= ANDROID_LENS_POSE_REFERENCE_UNDEFINED && + poseReference >= ANDROID_LENS_POSE_REFERENCE_PRIMARY_CAMERA); + } + + retcode = + find_camera_metadata_ro_entry(metadata, ANDROID_INFO_DEVICE_STATE_ORIENTATIONS, &entry); + if (0 == retcode && entry.count > 0) { + ASSERT_TRUE((entry.count % 2) == 0); + uint64_t maxPublicState = ((uint64_t)ICameraProvider::DEVICE_STATE_FOLDED) << 1; + uint64_t vendorStateStart = 1UL << 31; // Reserved for vendor specific states + uint64_t stateMask = (1 << vendorStateStart) - 1; + stateMask &= ~((1 << maxPublicState) - 1); + for (int i = 0; i < entry.count; i += 2) { + ASSERT_TRUE((entry.data.i64[i] & stateMask) == 0); + ASSERT_TRUE((entry.data.i64[i + 1] % 90) == 0); + } + } + + verifyExtendedSceneModeCharacteristics(metadata); + verifyZoomCharacteristics(metadata); + verifyStreamUseCaseCharacteristics(metadata); +} + +void CameraAidlTest::verifyExtendedSceneModeCharacteristics(const camera_metadata_t* metadata) { + camera_metadata_ro_entry entry; + int retcode = 0; + + retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_AVAILABLE_MODES, &entry); + if ((0 == retcode) && (entry.count > 0)) { + for (auto i = 0; i < entry.count; i++) { + ASSERT_TRUE(entry.data.u8[i] >= ANDROID_CONTROL_MODE_OFF && + entry.data.u8[i] <= ANDROID_CONTROL_MODE_USE_EXTENDED_SCENE_MODE); + } + } else { + ADD_FAILURE() << "Get camera controlAvailableModes failed!"; + } + + // Check key availability in capabilities, request and result. + + retcode = + find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry); + bool hasExtendedSceneModeRequestKey = false; + if ((0 == retcode) && (entry.count > 0)) { + hasExtendedSceneModeRequestKey = + std::find(entry.data.i32, entry.data.i32 + entry.count, + ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count; + } else { + ADD_FAILURE() << "Get camera availableRequestKeys failed!"; + } + + retcode = + find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry); + bool hasExtendedSceneModeResultKey = false; + if ((0 == retcode) && (entry.count > 0)) { + hasExtendedSceneModeResultKey = + std::find(entry.data.i32, entry.data.i32 + entry.count, + ANDROID_CONTROL_EXTENDED_SCENE_MODE) != entry.data.i32 + entry.count; + } else { + ADD_FAILURE() << "Get camera availableResultKeys failed!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry); + bool hasExtendedSceneModeMaxSizesKey = false; + bool hasExtendedSceneModeZoomRatioRangesKey = false; + if ((0 == retcode) && (entry.count > 0)) { + hasExtendedSceneModeMaxSizesKey = + std::find(entry.data.i32, entry.data.i32 + entry.count, + ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES) != + entry.data.i32 + entry.count; + hasExtendedSceneModeZoomRatioRangesKey = + std::find(entry.data.i32, entry.data.i32 + entry.count, + ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES) != + entry.data.i32 + entry.count; + } else { + ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!"; + } + + camera_metadata_ro_entry maxSizesEntry; + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES, &maxSizesEntry); + bool hasExtendedSceneModeMaxSizes = (0 == retcode && maxSizesEntry.count > 0); + + camera_metadata_ro_entry zoomRatioRangesEntry; + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES, + &zoomRatioRangesEntry); + bool hasExtendedSceneModeZoomRatioRanges = (0 == retcode && zoomRatioRangesEntry.count > 0); + + // Extended scene mode keys must all be available, or all be unavailable. + bool noExtendedSceneMode = + !hasExtendedSceneModeRequestKey && !hasExtendedSceneModeResultKey && + !hasExtendedSceneModeMaxSizesKey && !hasExtendedSceneModeZoomRatioRangesKey && + !hasExtendedSceneModeMaxSizes && !hasExtendedSceneModeZoomRatioRanges; + if (noExtendedSceneMode) { + return; + } + bool hasExtendedSceneMode = hasExtendedSceneModeRequestKey && hasExtendedSceneModeResultKey && + hasExtendedSceneModeMaxSizesKey && + hasExtendedSceneModeZoomRatioRangesKey && + hasExtendedSceneModeMaxSizes && hasExtendedSceneModeZoomRatioRanges; + ASSERT_TRUE(hasExtendedSceneMode); + + // Must have DISABLED, and must have one of BOKEH_STILL_CAPTURE, BOKEH_CONTINUOUS, or a VENDOR + // mode. + ASSERT_TRUE((maxSizesEntry.count == 6 && zoomRatioRangesEntry.count == 2) || + (maxSizesEntry.count == 9 && zoomRatioRangesEntry.count == 4)); + bool hasDisabledMode = false; + bool hasBokehStillCaptureMode = false; + bool hasBokehContinuousMode = false; + bool hasVendorMode = false; + std::vector<AvailableStream> outputStreams; + ASSERT_EQ(Status::OK, getAvailableOutputStreams(metadata, outputStreams)); + for (int i = 0, j = 0; i < maxSizesEntry.count && j < zoomRatioRangesEntry.count; i += 3) { + int32_t mode = maxSizesEntry.data.i32[i]; + int32_t maxWidth = maxSizesEntry.data.i32[i + 1]; + int32_t maxHeight = maxSizesEntry.data.i32[i + 2]; + switch (mode) { + case ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED: + hasDisabledMode = true; + ASSERT_TRUE(maxWidth == 0 && maxHeight == 0); + break; + case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_STILL_CAPTURE: + hasBokehStillCaptureMode = true; + j += 2; + break; + case ANDROID_CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS: + hasBokehContinuousMode = true; + j += 2; + break; + default: + if (mode < ANDROID_CONTROL_EXTENDED_SCENE_MODE_VENDOR_START) { + ADD_FAILURE() << "Invalid extended scene mode advertised: " << mode; + } else { + hasVendorMode = true; + j += 2; + } + break; + } + + if (mode != ANDROID_CONTROL_EXTENDED_SCENE_MODE_DISABLED) { + // Make sure size is supported. + bool sizeSupported = false; + for (const auto& stream : outputStreams) { + if ((stream.format == static_cast<int32_t>(PixelFormat::YCBCR_420_888) || + stream.format == static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)) && + stream.width == maxWidth && stream.height == maxHeight) { + sizeSupported = true; + break; + } + } + ASSERT_TRUE(sizeSupported); + + // Make sure zoom range is valid + float minZoomRatio = zoomRatioRangesEntry.data.f[0]; + float maxZoomRatio = zoomRatioRangesEntry.data.f[1]; + ASSERT_GT(minZoomRatio, 0.0f); + ASSERT_LE(minZoomRatio, maxZoomRatio); + } + } + ASSERT_TRUE(hasDisabledMode); + ASSERT_TRUE(hasBokehStillCaptureMode || hasBokehContinuousMode || hasVendorMode); +} + +Status CameraAidlTest::getAvailableOutputStreams(const camera_metadata_t* staticMeta, + std::vector<AvailableStream>& outputStreams, + const AvailableStream* threshold, + bool maxResolution) { + AvailableStream depthPreviewThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast<int32_t>(PixelFormat::Y16)}; + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + int scalerTag = maxResolution + ? ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION + : ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS; + int depthTag = maxResolution + ? ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION + : ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS; + + camera_metadata_ro_entry scalerEntry; + camera_metadata_ro_entry depthEntry; + int foundScaler = find_camera_metadata_ro_entry(staticMeta, scalerTag, &scalerEntry); + int foundDepth = find_camera_metadata_ro_entry(staticMeta, depthTag, &depthEntry); + if ((0 != foundScaler || (0 != (scalerEntry.count % 4))) && + (0 != foundDepth || (0 != (depthEntry.count % 4)))) { + return Status::ILLEGAL_ARGUMENT; + } + + if (foundScaler == 0 && (0 == (scalerEntry.count % 4))) { + fillOutputStreams(&scalerEntry, outputStreams, threshold, + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT); + } + + if (foundDepth == 0 && (0 == (depthEntry.count % 4))) { + fillOutputStreams(&depthEntry, outputStreams, &depthPreviewThreshold, + ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_OUTPUT); + } + + return Status::OK; +} + +void CameraAidlTest::fillOutputStreams(camera_metadata_ro_entry_t* entry, + std::vector<AvailableStream>& outputStreams, + const AvailableStream* threshold, + const int32_t availableConfigOutputTag) { + for (size_t i = 0; i < entry->count; i += 4) { + if (availableConfigOutputTag == entry->data.i32[i + 3]) { + if (nullptr == threshold) { + AvailableStream s = {entry->data.i32[i + 1], entry->data.i32[i + 2], + entry->data.i32[i]}; + outputStreams.push_back(s); + } else { + if ((threshold->format == entry->data.i32[i]) && + (threshold->width >= entry->data.i32[i + 1]) && + (threshold->height >= entry->data.i32[i + 2])) { + AvailableStream s = {entry->data.i32[i + 1], entry->data.i32[i + 2], + threshold->format}; + outputStreams.push_back(s); + } + } + } + } +} + +void CameraAidlTest::verifyZoomCharacteristics(const camera_metadata_t* metadata) { + camera_metadata_ro_entry entry; + int retcode = 0; + + // Check key availability in capabilities, request and result. + retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, + &entry); + float maxDigitalZoom = 1.0; + if ((0 == retcode) && (entry.count == 1)) { + maxDigitalZoom = entry.data.f[0]; + } else { + ADD_FAILURE() << "Get camera scalerAvailableMaxDigitalZoom failed!"; + } + + retcode = + find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry); + bool hasZoomRequestKey = false; + if ((0 == retcode) && (entry.count > 0)) { + hasZoomRequestKey = std::find(entry.data.i32, entry.data.i32 + entry.count, + ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32 + entry.count; + } else { + ADD_FAILURE() << "Get camera availableRequestKeys failed!"; + } + + retcode = + find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry); + bool hasZoomResultKey = false; + if ((0 == retcode) && (entry.count > 0)) { + hasZoomResultKey = std::find(entry.data.i32, entry.data.i32 + entry.count, + ANDROID_CONTROL_ZOOM_RATIO) != entry.data.i32 + entry.count; + } else { + ADD_FAILURE() << "Get camera availableResultKeys failed!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry); + bool hasZoomCharacteristicsKey = false; + if ((0 == retcode) && (entry.count > 0)) { + hasZoomCharacteristicsKey = + std::find(entry.data.i32, entry.data.i32 + entry.count, + ANDROID_CONTROL_ZOOM_RATIO_RANGE) != entry.data.i32 + entry.count; + } else { + ADD_FAILURE() << "Get camera availableCharacteristicsKeys failed!"; + } + + retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry); + bool hasZoomRatioRange = (0 == retcode && entry.count == 2); + + // Zoom keys must all be available, or all be unavailable. + bool noZoomRatio = !hasZoomRequestKey && !hasZoomResultKey && !hasZoomCharacteristicsKey && + !hasZoomRatioRange; + if (noZoomRatio) { + return; + } + bool hasZoomRatio = + hasZoomRequestKey && hasZoomResultKey && hasZoomCharacteristicsKey && hasZoomRatioRange; + ASSERT_TRUE(hasZoomRatio); + + float minZoomRatio = entry.data.f[0]; + float maxZoomRatio = entry.data.f[1]; + constexpr float FLOATING_POINT_THRESHOLD = 0.00001f; + if (maxDigitalZoom > maxZoomRatio + FLOATING_POINT_THRESHOLD) { + ADD_FAILURE() << "Maximum digital zoom " << maxDigitalZoom + << " is larger than maximum zoom ratio " << maxZoomRatio << " + threshold " + << FLOATING_POINT_THRESHOLD << "!"; + } + if (minZoomRatio > maxZoomRatio) { + ADD_FAILURE() << "Maximum zoom ratio is less than minimum zoom ratio!"; + } + if (minZoomRatio > 1.0f) { + ADD_FAILURE() << "Minimum zoom ratio is more than 1.0!"; + } + if (maxZoomRatio < 1.0f) { + ADD_FAILURE() << "Maximum zoom ratio is less than 1.0!"; + } + + // Make sure CROPPING_TYPE is CENTER_ONLY + retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SCALER_CROPPING_TYPE, &entry); + if ((0 == retcode) && (entry.count == 1)) { + int8_t croppingType = entry.data.u8[0]; + ASSERT_EQ(croppingType, ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY); + } else { + ADD_FAILURE() << "Get camera scalerCroppingType failed!"; + } +} + +void CameraAidlTest::verifyMonochromeCharacteristics(const CameraMetadata& chars) { + const camera_metadata_t* metadata = (camera_metadata_t*)chars.metadata.data(); + Status rc = isMonochromeCamera(metadata); + if (Status::OPERATION_NOT_SUPPORTED == rc) { + return; + } + ASSERT_EQ(Status::OK, rc); + + camera_metadata_ro_entry entry; + // Check capabilities + int retcode = + find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, &entry); + if ((0 == retcode) && (entry.count > 0)) { + ASSERT_EQ(std::find(entry.data.u8, entry.data.u8 + entry.count, + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING), + entry.data.u8 + entry.count); + } + + // Check Cfa + retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, + &entry); + if ((0 == retcode) && (entry.count == 1)) { + ASSERT_TRUE(entry.data.i32[0] == + static_cast<int32_t>( + SensorInfoColorFilterArrangement:: + ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO) || + entry.data.i32[0] == + static_cast<int32_t>( + SensorInfoColorFilterArrangement:: + ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR)); + } + + // Check availableRequestKeys + retcode = + find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, &entry); + if ((0 == retcode) && (entry.count > 0)) { + for (size_t i = 0; i < entry.count; i++) { + ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE); + ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM); + ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS); + } + } else { + ADD_FAILURE() << "Get camera availableRequestKeys failed!"; + } + + // Check availableResultKeys + retcode = + find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, &entry); + if ((0 == retcode) && (entry.count > 0)) { + for (size_t i = 0; i < entry.count; i++) { + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_GREEN_SPLIT); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_NEUTRAL_COLOR_POINT); + ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_MODE); + ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_TRANSFORM); + ASSERT_NE(entry.data.i32[i], ANDROID_COLOR_CORRECTION_GAINS); + } + } else { + ADD_FAILURE() << "Get camera availableResultKeys failed!"; + } + + // Check availableCharacteristicKeys + retcode = find_camera_metadata_ro_entry(metadata, + ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &entry); + if ((0 == retcode) && (entry.count > 0)) { + for (size_t i = 0; i < entry.count; i++) { + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT1); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_REFERENCE_ILLUMINANT2); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM1); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_CALIBRATION_TRANSFORM2); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM1); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_COLOR_TRANSFORM2); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX1); + ASSERT_NE(entry.data.i32[i], ANDROID_SENSOR_FORWARD_MATRIX2); + } + } else { + ADD_FAILURE() << "Get camera availableResultKeys failed!"; + } + + // Check blackLevelPattern + retcode = find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_BLACK_LEVEL_PATTERN, &entry); + if ((0 == retcode) && (entry.count > 0)) { + ASSERT_EQ(entry.count, 4); + for (size_t i = 1; i < entry.count; i++) { + ASSERT_EQ(entry.data.i32[i], entry.data.i32[0]); + } + } +} + +void CameraAidlTest::verifyRecommendedConfigs(const CameraMetadata& chars) { + size_t CONFIG_ENTRY_SIZE = 5; + size_t CONFIG_ENTRY_TYPE_OFFSET = 3; + size_t CONFIG_ENTRY_BITFIELD_OFFSET = 4; + uint32_t maxPublicUsecase = + ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PUBLIC_END_3_8; + uint32_t vendorUsecaseStart = + ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VENDOR_START; + uint32_t usecaseMask = (1 << vendorUsecaseStart) - 1; + usecaseMask &= ~((1 << maxPublicUsecase) - 1); + + const camera_metadata_t* metadata = + reinterpret_cast<const camera_metadata_t*>(chars.metadata.data()); + + camera_metadata_ro_entry recommendedConfigsEntry, recommendedDepthConfigsEntry, ioMapEntry; + recommendedConfigsEntry.count = recommendedDepthConfigsEntry.count = ioMapEntry.count = 0; + int retCode = find_camera_metadata_ro_entry( + metadata, ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS, + &recommendedConfigsEntry); + int depthRetCode = find_camera_metadata_ro_entry( + metadata, ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS, + &recommendedDepthConfigsEntry); + int ioRetCode = find_camera_metadata_ro_entry( + metadata, ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP, &ioMapEntry); + if ((0 != retCode) && (0 != depthRetCode)) { + // In case both regular and depth recommended configurations are absent, + // I/O should be absent as well. + ASSERT_NE(ioRetCode, 0); + return; + } + + camera_metadata_ro_entry availableKeysEntry; + retCode = find_camera_metadata_ro_entry( + metadata, ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, &availableKeysEntry); + ASSERT_TRUE((0 == retCode) && (availableKeysEntry.count > 0)); + std::vector<int32_t> availableKeys; + availableKeys.reserve(availableKeysEntry.count); + availableKeys.insert(availableKeys.end(), availableKeysEntry.data.i32, + availableKeysEntry.data.i32 + availableKeysEntry.count); + + if (recommendedConfigsEntry.count > 0) { + ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(), + ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS), + availableKeys.end()); + ASSERT_EQ((recommendedConfigsEntry.count % CONFIG_ENTRY_SIZE), 0); + for (size_t i = 0; i < recommendedConfigsEntry.count; i += CONFIG_ENTRY_SIZE) { + int32_t entryType = recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET]; + uint32_t bitfield = recommendedConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET]; + ASSERT_TRUE((entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) || + (entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT)); + ASSERT_TRUE((bitfield & usecaseMask) == 0); + } + } + + if (recommendedDepthConfigsEntry.count > 0) { + ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(), + ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS), + availableKeys.end()); + ASSERT_EQ((recommendedDepthConfigsEntry.count % CONFIG_ENTRY_SIZE), 0); + for (size_t i = 0; i < recommendedDepthConfigsEntry.count; i += CONFIG_ENTRY_SIZE) { + int32_t entryType = recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_TYPE_OFFSET]; + uint32_t bitfield = + recommendedDepthConfigsEntry.data.i32[i + CONFIG_ENTRY_BITFIELD_OFFSET]; + ASSERT_TRUE((entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) || + (entryType == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT)); + ASSERT_TRUE((bitfield & usecaseMask) == 0); + } + + if (recommendedConfigsEntry.count == 0) { + // In case regular recommended configurations are absent but suggested depth + // configurations are present, I/O should be absent. + ASSERT_NE(ioRetCode, 0); + } + } + + if ((ioRetCode == 0) && (ioMapEntry.count > 0)) { + ASSERT_NE(std::find(availableKeys.begin(), availableKeys.end(), + ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP), + availableKeys.end()); + ASSERT_EQ(isZSLModeAvailable(metadata), Status::OK); + } +} + +// Check whether ZSL is available using the static camera +// characteristics. +Status CameraAidlTest::isZSLModeAvailable(const camera_metadata_t* staticMeta) { + if (Status::OK == isZSLModeAvailable(staticMeta, PRIV_REPROCESS)) { + return Status::OK; + } else { + return isZSLModeAvailable(staticMeta, YUV_REPROCESS); + } +} + +Status CameraAidlTest::isZSLModeAvailable(const camera_metadata_t* staticMeta, + ReprocessType reprocType) { + Status ret = Status::OPERATION_NOT_SUPPORTED; + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + for (size_t i = 0; i < entry.count; i++) { + if ((reprocType == PRIV_REPROCESS && + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING == entry.data.u8[i]) || + (reprocType == YUV_REPROCESS && + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING == entry.data.u8[i])) { + ret = Status::OK; + break; + } + } + + return ret; +} + +// Verify logical or ultra high resolution camera static metadata +void CameraAidlTest::verifyLogicalOrUltraHighResCameraMetadata( + const std::string& cameraName, const std::shared_ptr<ICameraDevice>& device, + const CameraMetadata& chars, const std::vector<std::string>& deviceNames) { + const camera_metadata_t* metadata = + reinterpret_cast<const camera_metadata_t*>(chars.metadata.data()); + ASSERT_NE(nullptr, metadata); + SystemCameraKind systemCameraKind = SystemCameraKind::PUBLIC; + Status retStatus = getSystemCameraKind(metadata, &systemCameraKind); + ASSERT_EQ(retStatus, Status::OK); + Status rc = isLogicalMultiCamera(metadata); + ASSERT_TRUE(Status::OK == rc || Status::OPERATION_NOT_SUPPORTED == rc); + bool isMultiCamera = (Status::OK == rc); + bool isUltraHighResCamera = isUltraHighResolution(metadata); + if (!isMultiCamera && !isUltraHighResCamera) { + return; + } + + camera_metadata_ro_entry entry; + int retcode = find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry); + bool hasZoomRatioRange = (0 == retcode && entry.count == 2); + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry); + bool hasHalBufferManager = + (0 == retcode && 1 == entry.count && + entry.data.i32[0] == ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5); + retcode = find_camera_metadata_ro_entry( + metadata, ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED, &entry); + bool multiResolutionStreamSupported = + (0 == retcode && 1 == entry.count && + entry.data.u8[0] == ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED_TRUE); + if (multiResolutionStreamSupported) { + ASSERT_TRUE(hasHalBufferManager); + } + + std::string version, cameraId; + ASSERT_TRUE(::matchDeviceName(cameraName, mProviderType, &version, &cameraId)); + std::unordered_set<std::string> physicalIds; + rc = getPhysicalCameraIds(metadata, &physicalIds); + ASSERT_TRUE(isUltraHighResCamera || Status::OK == rc); + for (const auto& physicalId : physicalIds) { + ASSERT_NE(physicalId, cameraId); + } + if (physicalIds.size() == 0) { + ASSERT_TRUE(isUltraHighResCamera && !isMultiCamera); + physicalIds.insert(cameraId); + } + + std::unordered_set<int32_t> physicalRequestKeyIDs; + rc = getSupportedKeys(const_cast<camera_metadata_t*>(metadata), + ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS, + &physicalRequestKeyIDs); + ASSERT_TRUE(Status::OK == rc); + bool hasTestPatternPhysicalRequestKey = + physicalRequestKeyIDs.find(ANDROID_SENSOR_TEST_PATTERN_MODE) != + physicalRequestKeyIDs.end(); + std::unordered_set<int32_t> privacyTestPatternModes; + getPrivacyTestPatternModes(metadata, &privacyTestPatternModes); + + // Map from image format to number of multi-resolution sizes for that format + std::unordered_map<int32_t, size_t> multiResOutputFormatCounterMap; + std::unordered_map<int32_t, size_t> multiResInputFormatCounterMap; + for (const auto& physicalId : physicalIds) { + bool isPublicId = false; + std::string fullPublicId; + SystemCameraKind physSystemCameraKind = SystemCameraKind::PUBLIC; + for (auto& deviceName : deviceNames) { + std::string publicVersion, publicId; + ASSERT_TRUE(::matchDeviceName(deviceName, mProviderType, &publicVersion, &publicId)); + if (physicalId == publicId) { + isPublicId = true; + fullPublicId = deviceName; + break; + } + } + + camera_metadata_ro_entry physicalMultiResStreamConfigs; + camera_metadata_ro_entry physicalStreamConfigs; + camera_metadata_ro_entry physicalMaxResolutionStreamConfigs; + bool isUltraHighRes = false; + std::unordered_set<int32_t> subCameraPrivacyTestPatterns; + if (isPublicId) { + std::shared_ptr<ICameraDevice> subDevice; + ndk::ScopedAStatus ret = mProvider->getCameraDeviceInterface(fullPublicId, &subDevice); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(subDevice, nullptr); + + CameraMetadata subDeviceChars; + ret = subDevice->getCameraCharacteristics(&subDeviceChars); + ASSERT_TRUE(ret.isOk()); + + const camera_metadata_t* staticMetadata = + reinterpret_cast<const camera_metadata_t*>(subDeviceChars.metadata.data()); + retStatus = getSystemCameraKind(staticMetadata, &physSystemCameraKind); + ASSERT_EQ(retStatus, Status::OK); + + // Make sure that the system camera kind of a non-hidden + // physical cameras is the same as the logical camera associated + // with it. + ASSERT_EQ(physSystemCameraKind, systemCameraKind); + retcode = find_camera_metadata_ro_entry(staticMetadata, + ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry); + bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2); + ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange); + + getMultiResolutionStreamConfigurations( + &physicalMultiResStreamConfigs, &physicalStreamConfigs, + &physicalMaxResolutionStreamConfigs, staticMetadata); + isUltraHighRes = isUltraHighResolution(staticMetadata); + + getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns); + } else { + // Check camera characteristics for hidden camera id + CameraMetadata physChars; + ndk::ScopedAStatus ret = + device->getPhysicalCameraCharacteristics(physicalId, &physChars); + ASSERT_TRUE(ret.isOk()); + verifyCameraCharacteristics(physChars); + verifyMonochromeCharacteristics(physChars); + + auto staticMetadata = (const camera_metadata_t*)physChars.metadata.data(); + retcode = find_camera_metadata_ro_entry(staticMetadata, + ANDROID_CONTROL_ZOOM_RATIO_RANGE, &entry); + bool subCameraHasZoomRatioRange = (0 == retcode && entry.count == 2); + ASSERT_EQ(hasZoomRatioRange, subCameraHasZoomRatioRange); + + getMultiResolutionStreamConfigurations( + &physicalMultiResStreamConfigs, &physicalStreamConfigs, + &physicalMaxResolutionStreamConfigs, staticMetadata); + isUltraHighRes = isUltraHighResolution(staticMetadata); + getPrivacyTestPatternModes(staticMetadata, &subCameraPrivacyTestPatterns); + + // Check calling getCameraDeviceInterface_V3_x() on hidden camera id returns + // ILLEGAL_ARGUMENT. + std::stringstream s; + s << "device@" << version << "/" << mProviderType << "/" << physicalId; + std::string fullPhysicalId(s.str()); + std::shared_ptr<ICameraDevice> subDevice; + ret = mProvider->getCameraDeviceInterface(fullPhysicalId, &subDevice); + ASSERT_TRUE(static_cast<int32_t>(Status::ILLEGAL_ARGUMENT) == + ret.getServiceSpecificError()); + ASSERT_EQ(subDevice, nullptr); + } + + if (hasTestPatternPhysicalRequestKey) { + ASSERT_TRUE(privacyTestPatternModes == subCameraPrivacyTestPatterns); + } + + if (physicalMultiResStreamConfigs.count > 0) { + ASSERT_EQ(physicalMultiResStreamConfigs.count % 4, 0); + + // Each supported size must be max size for that format, + for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4; i++) { + int32_t multiResFormat = physicalMultiResStreamConfigs.data.i32[i * 4]; + int32_t multiResWidth = physicalMultiResStreamConfigs.data.i32[i * 4 + 1]; + int32_t multiResHeight = physicalMultiResStreamConfigs.data.i32[i * 4 + 2]; + int32_t multiResInput = physicalMultiResStreamConfigs.data.i32[i * 4 + 3]; + + // Check if the resolution is the max resolution in stream + // configuration map + bool supported = false; + bool isMaxSize = true; + for (size_t j = 0; j < physicalStreamConfigs.count / 4; j++) { + int32_t format = physicalStreamConfigs.data.i32[j * 4]; + int32_t width = physicalStreamConfigs.data.i32[j * 4 + 1]; + int32_t height = physicalStreamConfigs.data.i32[j * 4 + 2]; + int32_t input = physicalStreamConfigs.data.i32[j * 4 + 3]; + if (format == multiResFormat && input == multiResInput) { + if (width == multiResWidth && height == multiResHeight) { + supported = true; + } else if (width * height > multiResWidth * multiResHeight) { + isMaxSize = false; + } + } + } + // Check if the resolution is the max resolution in max + // resolution stream configuration map + bool supportedUltraHighRes = false; + bool isUltraHighResMaxSize = true; + for (size_t j = 0; j < physicalMaxResolutionStreamConfigs.count / 4; j++) { + int32_t format = physicalMaxResolutionStreamConfigs.data.i32[j * 4]; + int32_t width = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 1]; + int32_t height = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 2]; + int32_t input = physicalMaxResolutionStreamConfigs.data.i32[j * 4 + 3]; + if (format == multiResFormat && input == multiResInput) { + if (width == multiResWidth && height == multiResHeight) { + supportedUltraHighRes = true; + } else if (width * height > multiResWidth * multiResHeight) { + isUltraHighResMaxSize = false; + } + } + } + + if (isUltraHighRes) { + // For ultra high resolution camera, the configuration must + // be the maximum size in stream configuration map, or max + // resolution stream configuration map + ASSERT_TRUE((supported && isMaxSize) || + (supportedUltraHighRes && isUltraHighResMaxSize)); + } else { + // The configuration must be the maximum size in stream + // configuration map + ASSERT_TRUE(supported && isMaxSize); + ASSERT_FALSE(supportedUltraHighRes); + } + + // Increment the counter for the configuration's format. + auto& formatCounterMap = multiResInput ? multiResInputFormatCounterMap + : multiResOutputFormatCounterMap; + if (formatCounterMap.count(multiResFormat) == 0) { + formatCounterMap[multiResFormat] = 1; + } else { + formatCounterMap[multiResFormat]++; + } + } + + // There must be no duplicates + for (size_t i = 0; i < physicalMultiResStreamConfigs.count / 4 - 1; i++) { + for (size_t j = i + 1; j < physicalMultiResStreamConfigs.count / 4; j++) { + // Input/output doesn't match + if (physicalMultiResStreamConfigs.data.i32[i * 4 + 3] != + physicalMultiResStreamConfigs.data.i32[j * 4 + 3]) { + continue; + } + // Format doesn't match + if (physicalMultiResStreamConfigs.data.i32[i * 4] != + physicalMultiResStreamConfigs.data.i32[j * 4]) { + continue; + } + // Width doesn't match + if (physicalMultiResStreamConfigs.data.i32[i * 4 + 1] != + physicalMultiResStreamConfigs.data.i32[j * 4 + 1]) { + continue; + } + // Height doesn't match + if (physicalMultiResStreamConfigs.data.i32[i * 4 + 2] != + physicalMultiResStreamConfigs.data.i32[j * 4 + 2]) { + continue; + } + // input/output, format, width, and height all match + ADD_FAILURE(); + } + } + } + } + + // If a multi-resolution stream is supported, there must be at least one + // format with more than one resolutions + if (multiResolutionStreamSupported) { + size_t numMultiResFormats = 0; + for (const auto& [format, sizeCount] : multiResOutputFormatCounterMap) { + if (sizeCount >= 2) { + numMultiResFormats++; + } + } + for (const auto& [format, sizeCount] : multiResInputFormatCounterMap) { + if (sizeCount >= 2) { + numMultiResFormats++; + + // If multi-resolution reprocessing is supported, the logical + // camera or ultra-high resolution sensor camera must support + // the corresponding reprocessing capability. + if (format == static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED)) { + ASSERT_EQ(isZSLModeAvailable(metadata, PRIV_REPROCESS), Status::OK); + } else if (format == static_cast<int32_t>(PixelFormat::YCBCR_420_888)) { + ASSERT_EQ(isZSLModeAvailable(metadata, YUV_REPROCESS), Status::OK); + } + } + } + ASSERT_GT(numMultiResFormats, 0); + } + + // Make sure ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID is available in + // result keys. + if (isMultiCamera) { + retcode = find_camera_metadata_ro_entry(metadata, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS, + &entry); + if ((0 == retcode) && (entry.count > 0)) { + ASSERT_NE(std::find(entry.data.i32, entry.data.i32 + entry.count, + static_cast<int32_t>( + CameraMetadataTag:: + ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID)), + entry.data.i32 + entry.count); + } else { + ADD_FAILURE() << "Get camera availableResultKeys failed!"; + } + } +} + +bool CameraAidlTest::isUltraHighResolution(const camera_metadata_t* staticMeta) { + camera_metadata_ro_entry scalerEntry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + &scalerEntry); + if (rc == 0) { + for (uint32_t i = 0; i < scalerEntry.count; i++) { + if (scalerEntry.data.u8[i] == + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR) { + return true; + } + } + } + return false; +} + +Status CameraAidlTest::getSupportedKeys(camera_metadata_t* staticMeta, uint32_t tagId, + std::unordered_set<int32_t>* requestIDs) { + if ((nullptr == staticMeta) || (nullptr == requestIDs)) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, tagId, &entry); + if ((0 != rc) || (entry.count == 0)) { + return Status::OK; + } + + requestIDs->insert(entry.data.i32, entry.data.i32 + entry.count); + + return Status::OK; +} + +void CameraAidlTest::getPrivacyTestPatternModes( + const camera_metadata_t* staticMetadata, + std::unordered_set<int32_t>* privacyTestPatternModes) { + ASSERT_NE(staticMetadata, nullptr); + ASSERT_NE(privacyTestPatternModes, nullptr); + + camera_metadata_ro_entry entry; + int retcode = find_camera_metadata_ro_entry( + staticMetadata, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES, &entry); + ASSERT_TRUE(0 == retcode); + + for (auto i = 0; i < entry.count; i++) { + if (entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR || + entry.data.i32[i] == ANDROID_SENSOR_TEST_PATTERN_MODE_BLACK) { + privacyTestPatternModes->insert(entry.data.i32[i]); + } + } +} + +void CameraAidlTest::getMultiResolutionStreamConfigurations( + camera_metadata_ro_entry* multiResStreamConfigs, camera_metadata_ro_entry* streamConfigs, + camera_metadata_ro_entry* maxResolutionStreamConfigs, + const camera_metadata_t* staticMetadata) { + ASSERT_NE(multiResStreamConfigs, nullptr); + ASSERT_NE(streamConfigs, nullptr); + ASSERT_NE(maxResolutionStreamConfigs, nullptr); + ASSERT_NE(staticMetadata, nullptr); + + int retcode = find_camera_metadata_ro_entry( + staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, streamConfigs); + ASSERT_TRUE(0 == retcode); + retcode = find_camera_metadata_ro_entry( + staticMetadata, ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION, + maxResolutionStreamConfigs); + ASSERT_TRUE(-ENOENT == retcode || 0 == retcode); + retcode = find_camera_metadata_ro_entry( + staticMetadata, ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS, + multiResStreamConfigs); + ASSERT_TRUE(-ENOENT == retcode || 0 == retcode); +} + +bool CameraAidlTest::isTorchSupported(const camera_metadata_t* staticMeta) { + camera_metadata_ro_entry torchEntry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_FLASH_INFO_AVAILABLE, &torchEntry); + if (rc != 0) { + ALOGI("isTorchSupported: Failed to find entry for ANDROID_FLASH_INFO_AVAILABLE"); + return false; + } + if (torchEntry.count == 1 && !torchEntry.data.u8[0]) { + ALOGI("isTorchSupported: Torch not supported"); + return false; + } + ALOGI("isTorchSupported: Torch supported"); + return true; +} + +bool CameraAidlTest::isTorchStrengthControlSupported(const camera_metadata_t* staticMeta) { + int32_t maxLevel = 0; + camera_metadata_ro_entry maxEntry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL, + &maxEntry); + if (rc != 0) { + ALOGI("isTorchStrengthControlSupported: Failed to find entry for " + "ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL"); + return false; + } + + maxLevel = *maxEntry.data.i32; + if (maxLevel > 1) { + ALOGI("isTorchStrengthControlSupported: Torch strength control supported."); + return true; + } + ALOGI("isTorchStrengthControlSupported: Torch strength control not supported."); + return false; +} + +void CameraAidlTest::verifyRequestTemplate(const camera_metadata_t* metadata, + RequestTemplate requestTemplate) { + ASSERT_NE(nullptr, metadata); + size_t entryCount = get_camera_metadata_entry_count(metadata); + ALOGI("template %u metadata entry count is %zu", (int32_t)requestTemplate, entryCount); + // TODO: we can do better than 0 here. Need to check how many required + // request keys we've defined for each template + ASSERT_GT(entryCount, 0u); + + // Check zoomRatio + camera_metadata_ro_entry zoomRatioEntry; + int foundZoomRatio = + find_camera_metadata_ro_entry(metadata, ANDROID_CONTROL_ZOOM_RATIO, &zoomRatioEntry); + if (foundZoomRatio == 0) { + ASSERT_EQ(zoomRatioEntry.count, 1); + ASSERT_EQ(zoomRatioEntry.data.f[0], 1.0f); + } +} + +void CameraAidlTest::openEmptyDeviceSession(const std::string& name, + std::shared_ptr<ICameraProvider> provider, + std::shared_ptr<ICameraDeviceSession>* session, + CameraMetadata* staticMeta, + std::shared_ptr<ICameraDevice>* device) { + ASSERT_NE(nullptr, session); + ASSERT_NE(nullptr, staticMeta); + ASSERT_NE(nullptr, device); + + ALOGI("configureStreams: Testing camera device %s", name.c_str()); + ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, device); + ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(device, nullptr); + + std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>(); + ret = (*device)->open(cb, session); + ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(*session, nullptr); + + ret = (*device)->getCameraCharacteristics(staticMeta); +} + +void CameraAidlTest::openEmptyInjectionSession(const std::string& name, + const std::shared_ptr<ICameraProvider>& provider, + std::shared_ptr<ICameraInjectionSession>* session, + CameraMetadata* metadata, + std::shared_ptr<ICameraDevice>* device) { + ASSERT_NE(nullptr, session); + ASSERT_NE(nullptr, metadata); + ASSERT_NE(nullptr, device); + + ALOGI("openEmptyInjectionSession: Testing camera device %s", name.c_str()); + ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, device); + ALOGI("openEmptyInjectionSession: getCameraDeviceInterface returns status:%d:%d", + ret.getExceptionCode(), ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(*device, nullptr); + + std::shared_ptr<EmptyDeviceCb> cb = ndk::SharedRefBase::make<EmptyDeviceCb>(); + ret = (*device)->openInjectionSession(cb, session); + ALOGI("device::openInjectionSession returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + + if (static_cast<Status>(ret.getServiceSpecificError()) == Status::OPERATION_NOT_SUPPORTED && + *session == nullptr) { + return; // Injection Session not supported. Callee will receive nullptr in *session + } + + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(*session, nullptr); + + ret = (*device)->getCameraCharacteristics(metadata); + ASSERT_TRUE(ret.isOk()); +} + +Status CameraAidlTest::getJpegBufferSize(camera_metadata_t* staticMeta, int32_t* outBufSize) { + if (nullptr == staticMeta || nullptr == outBufSize) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_JPEG_MAX_SIZE, &entry); + if ((0 != rc) || (1 != entry.count)) { + return Status::ILLEGAL_ARGUMENT; + } + + *outBufSize = entry.data.i32[0]; + return Status::OK; +} + +Dataspace CameraAidlTest::getDataspace(PixelFormat format) { + switch (format) { + case PixelFormat::BLOB: + return Dataspace::JFIF; + case PixelFormat::Y16: + return Dataspace::DEPTH; + case PixelFormat::RAW16: + case PixelFormat::RAW_OPAQUE: + case PixelFormat::RAW10: + case PixelFormat::RAW12: + return Dataspace::ARBITRARY; + default: + return Dataspace::UNKNOWN; + } +} + +void CameraAidlTest::createStreamConfiguration(std::vector<Stream>& streams, + StreamConfigurationMode configMode, + StreamConfiguration* config, + int32_t jpegBufferSize) { + ASSERT_NE(nullptr, config); + + for (auto& stream : streams) { + stream.bufferSize = + (stream.format == PixelFormat::BLOB && stream.dataSpace == Dataspace::JFIF) + ? jpegBufferSize + : 0; + } + + // Caller is responsible to fill in non-zero config->streamConfigCounter after this returns + config->streams = streams; + config->operationMode = configMode; + config->multiResolutionInputImage = false; +} + +void CameraAidlTest::verifyStreamCombination(const std::shared_ptr<ICameraDevice>& device, + const StreamConfiguration& config, bool expectedStatus, + bool expectStreamCombQuery) { + if (device != nullptr) { + bool streamCombinationSupported; + ScopedAStatus ret = + device->isStreamCombinationSupported(config, &streamCombinationSupported); + // TODO: Check is unsupported operation is correct. + ASSERT_TRUE(ret.isOk() || + (expectStreamCombQuery && ret.getExceptionCode() == EX_UNSUPPORTED_OPERATION)); + if (ret.isOk()) { + ASSERT_EQ(expectedStatus, streamCombinationSupported); + } + } +} + +std::vector<ConcurrentCameraIdCombination> CameraAidlTest::getConcurrentDeviceCombinations( + std::shared_ptr<ICameraProvider>& provider) { + std::vector<ConcurrentCameraIdCombination> combinations; + ndk::ScopedAStatus ret = provider->getConcurrentCameraIds(&combinations); + if (!ret.isOk()) { + ADD_FAILURE(); + } + + return combinations; +} + +Status CameraAidlTest::getMandatoryConcurrentStreams(const camera_metadata_t* staticMeta, + std::vector<AvailableStream>* outputStreams) { + if (nullptr == staticMeta || nullptr == outputStreams) { + return Status::ILLEGAL_ARGUMENT; + } + + if (isDepthOnly(staticMeta)) { + Size y16MaxSize(640, 480); + Size maxAvailableY16Size; + getMaxOutputSizeForFormat(staticMeta, PixelFormat::Y16, &maxAvailableY16Size); + Size y16ChosenSize = getMinSize(y16MaxSize, maxAvailableY16Size); + AvailableStream y16Stream = {.width = y16ChosenSize.width, + .height = y16ChosenSize.height, + .format = static_cast<int32_t>(PixelFormat::Y16)}; + outputStreams->push_back(y16Stream); + return Status::OK; + } + + Size yuvMaxSize(1280, 720); + Size jpegMaxSize(1920, 1440); + Size maxAvailableYuvSize; + Size maxAvailableJpegSize; + getMaxOutputSizeForFormat(staticMeta, PixelFormat::YCBCR_420_888, &maxAvailableYuvSize); + getMaxOutputSizeForFormat(staticMeta, PixelFormat::BLOB, &maxAvailableJpegSize); + Size yuvChosenSize = getMinSize(yuvMaxSize, maxAvailableYuvSize); + Size jpegChosenSize = getMinSize(jpegMaxSize, maxAvailableJpegSize); + + AvailableStream yuvStream = {.width = yuvChosenSize.width, + .height = yuvChosenSize.height, + .format = static_cast<int32_t>(PixelFormat::YCBCR_420_888)}; + + AvailableStream jpegStream = {.width = jpegChosenSize.width, + .height = jpegChosenSize.height, + .format = static_cast<int32_t>(PixelFormat::BLOB)}; + outputStreams->push_back(yuvStream); + outputStreams->push_back(jpegStream); + + return Status::OK; +} + +bool CameraAidlTest::isDepthOnly(const camera_metadata_t* staticMeta) { + camera_metadata_ro_entry scalerEntry; + camera_metadata_ro_entry depthEntry; + + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + &scalerEntry); + if (rc == 0) { + for (uint32_t i = 0; i < scalerEntry.count; i++) { + if (scalerEntry.data.u8[i] == + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) { + return false; + } + } + } + + for (uint32_t i = 0; i < scalerEntry.count; i++) { + if (scalerEntry.data.u8[i] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) { + rc = find_camera_metadata_ro_entry( + staticMeta, ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, &depthEntry); + size_t idx = 0; + if (rc == 0 && depthEntry.data.i32[idx] == static_cast<int32_t>(PixelFormat::Y16)) { + // only Depth16 format is supported now + return true; + } + break; + } + } + + return false; +} + +Status CameraAidlTest::getMaxOutputSizeForFormat(const camera_metadata_t* staticMeta, + PixelFormat format, Size* size, + bool maxResolution) { + std::vector<AvailableStream> outputStreams; + if (size == nullptr || + getAvailableOutputStreams(staticMeta, outputStreams, + /*threshold*/ nullptr, maxResolution) != Status::OK) { + return Status::ILLEGAL_ARGUMENT; + } + Size maxSize; + bool found = false; + for (auto& outputStream : outputStreams) { + if (static_cast<int32_t>(format) == outputStream.format && + (outputStream.width * outputStream.height > maxSize.width * maxSize.height)) { + maxSize.width = outputStream.width; + maxSize.height = outputStream.height; + found = true; + } + } + if (!found) { + ALOGE("%s :chosen format %d not found", __FUNCTION__, static_cast<int32_t>(format)); + return Status::ILLEGAL_ARGUMENT; + } + *size = maxSize; + return Status::OK; +} + +Size CameraAidlTest::getMinSize(Size a, Size b) { + if (a.width * a.height < b.width * b.height) { + return a; + } + return b; +} + +Status CameraAidlTest::getZSLInputOutputMap(camera_metadata_t* staticMeta, + std::vector<AvailableZSLInputOutput>& inputOutputMap) { + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry( + staticMeta, ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, &entry); + if ((0 != rc) || (0 >= entry.count)) { + return Status::ILLEGAL_ARGUMENT; + } + + const int32_t* contents = &entry.data.i32[0]; + for (size_t i = 0; i < entry.count;) { + int32_t inputFormat = contents[i++]; + int32_t length = contents[i++]; + for (int32_t j = 0; j < length; j++) { + int32_t outputFormat = contents[i + j]; + AvailableZSLInputOutput zslEntry = {inputFormat, outputFormat}; + inputOutputMap.push_back(zslEntry); + } + i += length; + } + + return Status::OK; +} + +Status CameraAidlTest::findLargestSize(const std::vector<AvailableStream>& streamSizes, + int32_t format, AvailableStream& result) { + result = {0, 0, 0}; + for (auto& iter : streamSizes) { + if (format == iter.format) { + if ((result.width * result.height) < (iter.width * iter.height)) { + result = iter; + } + } + } + + return (result.format == format) ? Status::OK : Status::ILLEGAL_ARGUMENT; +} + +void CameraAidlTest::constructFilteredSettings( + const std::shared_ptr<ICameraDeviceSession>& session, + const std::unordered_set<int32_t>& availableKeys, RequestTemplate reqTemplate, + android::hardware::camera::common::V1_0::helper::CameraMetadata* defaultSettings, + android::hardware::camera::common::V1_0::helper::CameraMetadata* filteredSettings) { + ASSERT_NE(defaultSettings, nullptr); + ASSERT_NE(filteredSettings, nullptr); + + CameraMetadata req; + auto ret = session->constructDefaultRequestSettings(reqTemplate, &req); + ASSERT_TRUE(ret.isOk()); + + const camera_metadata_t* metadata = + clone_camera_metadata(reinterpret_cast<const camera_metadata_t*>(req.metadata.data())); + size_t expectedSize = req.metadata.size(); + int result = validate_camera_metadata_structure(metadata, &expectedSize); + ASSERT_TRUE((result == 0) || (result == CAMERA_METADATA_VALIDATION_SHIFTED)); + + size_t entryCount = get_camera_metadata_entry_count(metadata); + ASSERT_GT(entryCount, 0u); + *defaultSettings = metadata; + + const android::hardware::camera::common::V1_0::helper::CameraMetadata& constSettings = + *defaultSettings; + for (const auto& keyIt : availableKeys) { + camera_metadata_ro_entry entry = constSettings.find(keyIt); + if (entry.count > 0) { + filteredSettings->update(entry); + } + } +} + +void CameraAidlTest::verifySessionReconfigurationQuery( + const std::shared_ptr<ICameraDeviceSession>& session, camera_metadata* oldSessionParams, + camera_metadata* newSessionParams) { + ASSERT_NE(nullptr, session); + ASSERT_NE(nullptr, oldSessionParams); + ASSERT_NE(nullptr, newSessionParams); + + std::vector<uint8_t> oldParams = + std::vector(reinterpret_cast<uint8_t*>(oldSessionParams), + reinterpret_cast<uint8_t*>(oldSessionParams) + + get_camera_metadata_size(oldSessionParams)); + CameraMetadata oldMetadata = {oldParams}; + + std::vector<uint8_t> newParams = + std::vector(reinterpret_cast<uint8_t*>(newSessionParams), + reinterpret_cast<uint8_t*>(newSessionParams) + + get_camera_metadata_size(newSessionParams)); + CameraMetadata newMetadata = {newParams}; + + bool reconfigReq; + ndk::ScopedAStatus ret = + session->isReconfigurationRequired(oldMetadata, newMetadata, &reconfigReq); + ASSERT_TRUE(ret.isOk() || static_cast<Status>(ret.getServiceSpecificError()) == + Status::OPERATION_NOT_SUPPORTED); +} + +Status CameraAidlTest::isConstrainedModeAvailable(camera_metadata_t* staticMeta) { + Status ret = Status::OPERATION_NOT_SUPPORTED; + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + for (size_t i = 0; i < entry.count; i++) { + if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO == + entry.data.u8[i]) { + ret = Status::OK; + break; + } + } + + return ret; +} + +Status CameraAidlTest::pickConstrainedModeSize(camera_metadata_t* staticMeta, + AvailableStream& hfrStream) { + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry( + staticMeta, ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS, &entry); + if (0 != rc) { + return Status::OPERATION_NOT_SUPPORTED; + } else if (0 != (entry.count % 5)) { + return Status::ILLEGAL_ARGUMENT; + } + + hfrStream = {0, 0, static_cast<uint32_t>(PixelFormat::IMPLEMENTATION_DEFINED)}; + for (size_t i = 0; i < entry.count; i += 5) { + int32_t w = entry.data.i32[i]; + int32_t h = entry.data.i32[i + 1]; + if ((hfrStream.width * hfrStream.height) < (w * h)) { + hfrStream.width = w; + hfrStream.height = h; + } + } + + return Status::OK; +} + +void CameraAidlTest::processCaptureRequestInternal(uint64_t bufferUsage, + RequestTemplate reqTemplate, + bool useSecureOnlyCameras) { + std::vector<std::string> cameraDeviceNames = + getCameraDeviceNames(mProvider, useSecureOnlyCameras); + AvailableStream streamThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)}; + int64_t bufferId = 1; + int32_t frameNumber = 1; + CameraMetadata settings; + + for (const auto& name : cameraDeviceNames) { + Stream testStream; + std::vector<HalStream> halStreams; + std::shared_ptr<ICameraDeviceSession> session; + std::shared_ptr<DeviceCb> cb; + bool supportsPartialResults = false; + bool useHalBufManager = false; + int32_t partialResultCount = 0; + configureSingleStream(name, mProvider, &streamThreshold, bufferUsage, reqTemplate, + &session /*out*/, &testStream /*out*/, &halStreams /*out*/, + &supportsPartialResults /*out*/, &partialResultCount /*out*/, + &useHalBufManager /*out*/, &cb /*out*/); + + ASSERT_NE(session, nullptr); + ASSERT_NE(cb, nullptr); + ASSERT_FALSE(halStreams.empty()); + + std::shared_ptr<ResultMetadataQueue> resultQueue; + ::aidl::android::hardware::common::fmq::MQDescriptor< + int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite> + descriptor; + ndk::ScopedAStatus ret = session->getCaptureResultMetadataQueue(&descriptor); + ASSERT_TRUE(ret.isOk()); + + resultQueue = std::make_shared<ResultMetadataQueue>(descriptor); + if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq," + " not use it", + __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + + std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>( + 1, false, supportsPartialResults, partialResultCount, resultQueue); + + CameraMetadata req; + ret = session->constructDefaultRequestSettings(reqTemplate, &req); + ASSERT_TRUE(ret.isOk()); + settings = req; + + overrideRotateAndCrop(&settings); + + std::vector<CaptureRequest> requests(1); + CaptureRequest& request = requests[0]; + request.frameNumber = frameNumber; + request.fmqSettingsSize = 0; + request.settings = settings; + + std::vector<StreamBuffer>& outputBuffers = request.outputBuffers; + outputBuffers.resize(1); + StreamBuffer& outputBuffer = outputBuffers[0]; + if (useHalBufManager) { + outputBuffer = {halStreams[0].id, + /*bufferId*/ 0, NativeHandle(), BufferStatus::OK, + NativeHandle(), NativeHandle()}; + } else { + buffer_handle_t handle; + allocateGraphicBuffer( + testStream.width, testStream.height, + /* We don't look at halStreamConfig.streams[0].consumerUsage + * since that is 0 for output streams + */ + android_convertGralloc1To0Usage( + static_cast<uint64_t>(halStreams[0].producerUsage), bufferUsage), + halStreams[0].overrideFormat, &handle); + + outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(handle), + BufferStatus::OK, NativeHandle(), NativeHandle()}; + } + request.inputBuffer = {-1, + 0, + NativeHandle(), + BufferStatus::ERROR, + NativeHandle(), + NativeHandle()}; // Empty Input Buffer + + { + std::unique_lock<std::mutex> l(mLock); + mInflightMap.clear(); + mInflightMap.insert(std::make_pair(frameNumber, inflightReq)); + } + + int32_t numRequestProcessed = 0; + std::vector<BufferCache> cachesToRemove; + ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed); + ALOGI("processCaptureRequestInternal: processCaptureRequest returns status: %d:%d", + ret.getExceptionCode(), ret.getServiceSpecificError()); + + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock<std::mutex> l(mLock); + while (!inflightReq->errorCodeValid && + ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReq->errorCodeValid); + ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u); + ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId); + + // shutterReadoutTimestamp must be available, and it must + // be >= shutterTimestamp + exposureTime, + // and < shutterTimestamp + exposureTime + rollingShutterSkew / 2. + ASSERT_TRUE(inflightReq->shutterReadoutTimestampValid); + ASSERT_FALSE(inflightReq->collectedResult.isEmpty()); + + if (inflightReq->collectedResult.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { + camera_metadata_entry_t exposureTimeResult = + inflightReq->collectedResult.find(ANDROID_SENSOR_EXPOSURE_TIME); + nsecs_t exposureToReadout = + inflightReq->shutterReadoutTimestamp - inflightReq->shutterTimestamp; + ASSERT_GE(exposureToReadout, exposureTimeResult.data.i64[0]); + if (inflightReq->collectedResult.exists(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW)) { + camera_metadata_entry_t rollingShutterSkew = + inflightReq->collectedResult.find(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW); + ASSERT_LT(exposureToReadout, + exposureTimeResult.data.i64[0] + rollingShutterSkew.data.i64[0] / 2); + } + } + + request.frameNumber++; + // Empty settings should be supported after the first call + // for repeating requests. + request.settings.metadata.clear(); + // The buffer has been registered to HAL by bufferId, so per + // API contract we should send a null handle for this buffer + request.outputBuffers[0].buffer = NativeHandle(); + mInflightMap.clear(); + inflightReq = std::make_shared<InFlightRequest>(1, false, supportsPartialResults, + partialResultCount, resultQueue); + mInflightMap.insert(std::make_pair(request.frameNumber, inflightReq)); + } + + ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed); + ALOGI("processCaptureRequestInternal: processCaptureRequest returns status: %d:%d", + ret.getExceptionCode(), ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock<std::mutex> l(mLock); + while (!inflightReq->errorCodeValid && + ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReq->errorCodeValid); + ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u); + ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId); + } + + if (useHalBufManager) { + verifyBuffersReturned(session, testStream.id, cb); + } + + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +void CameraAidlTest::configureSingleStream( + const std::string& name, const std::shared_ptr<ICameraProvider>& provider, + const AvailableStream* previewThreshold, uint64_t bufferUsage, RequestTemplate reqTemplate, + std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream, + std::vector<HalStream>* halStreams, bool* supportsPartialResults, + int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb, + uint32_t streamConfigCounter) { + ASSERT_NE(nullptr, session); + ASSERT_NE(nullptr, previewStream); + ASSERT_NE(nullptr, halStreams); + ASSERT_NE(nullptr, supportsPartialResults); + ASSERT_NE(nullptr, partialResultCount); + ASSERT_NE(nullptr, useHalBufManager); + ASSERT_NE(nullptr, cb); + + std::vector<AvailableStream> outputPreviewStreams; + std::shared_ptr<ICameraDevice> device; + ALOGI("configureStreams: Testing camera device %s", name.c_str()); + + ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device); + ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(device, nullptr); + + camera_metadata_t* staticMeta; + CameraMetadata chars; + ret = device->getCameraCharacteristics(&chars); + ASSERT_TRUE(ret.isOk()); + staticMeta = clone_camera_metadata( + reinterpret_cast<const camera_metadata_t*>(chars.metadata.data())); + ASSERT_NE(nullptr, staticMeta); + + camera_metadata_ro_entry entry; + auto status = + find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry); + if ((0 == status) && (entry.count > 0)) { + *partialResultCount = entry.data.i32[0]; + *supportsPartialResults = (*partialResultCount > 1); + } + + *cb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta); + + device->open(*cb, session); + ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(*session, nullptr); + + *useHalBufManager = false; + status = find_camera_metadata_ro_entry( + staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry); + if ((0 == status) && (entry.count == 1)) { + *useHalBufManager = (entry.data.u8[0] == + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5); + } + + outputPreviewStreams.clear(); + auto rc = getAvailableOutputStreams(staticMeta, outputPreviewStreams, previewThreshold); + + int32_t jpegBufferSize = 0; + ASSERT_EQ(Status::OK, getJpegBufferSize(staticMeta, &jpegBufferSize)); + ASSERT_NE(0u, jpegBufferSize); + + free_camera_metadata(staticMeta); + ASSERT_EQ(Status::OK, rc); + ASSERT_FALSE(outputPreviewStreams.empty()); + + Dataspace dataspace = Dataspace::UNKNOWN; + switch (static_cast<PixelFormat>(outputPreviewStreams[0].format)) { + case PixelFormat::Y16: + dataspace = Dataspace::DEPTH; + break; + default: + dataspace = Dataspace::UNKNOWN; + } + + std::vector<Stream> streams(1); + streams[0] = {0, + StreamType::OUTPUT, + outputPreviewStreams[0].width, + outputPreviewStreams[0].height, + static_cast<PixelFormat>(outputPreviewStreams[0].format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>(bufferUsage), + dataspace, + StreamRotation::ROTATION_0, + "", + 0, + /*groupId*/ -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + + StreamConfiguration config; + config.streams = streams; + createStreamConfiguration(streams, StreamConfigurationMode::NORMAL_MODE, &config, + jpegBufferSize); + if (*session != nullptr) { + CameraMetadata sessionParams; + ret = (*session)->constructDefaultRequestSettings(reqTemplate, &sessionParams); + ASSERT_TRUE(ret.isOk()); + config.sessionParams = sessionParams; + config.streamConfigCounter = (int32_t)streamConfigCounter; + + bool supported = false; + ret = device->isStreamCombinationSupported(config, &supported); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(supported, true); + + std::vector<HalStream> halConfigs; + ret = (*session)->configureStreams(config, &halConfigs); + ALOGI("configureStreams returns status: %d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(1u, halConfigs.size()); + halStreams->clear(); + halStreams->push_back(halConfigs[0]); + if (*useHalBufManager) { + std::vector<Stream> ss(1); + std::vector<HalStream> hs(1); + ss[0] = config.streams[0]; + hs[0] = halConfigs[0]; + (*cb)->setCurrentStreamConfig(ss, hs); + } + } + *previewStream = config.streams[0]; + ASSERT_TRUE(ret.isOk()); +} + +void CameraAidlTest::overrideRotateAndCrop(CameraMetadata* settings) { + if (settings == nullptr) { + return; + } + + ::android::hardware::camera::common::V1_0::helper::CameraMetadata requestMeta = + clone_camera_metadata(reinterpret_cast<camera_metadata_t*>(settings->metadata.data())); + auto entry = requestMeta.find(ANDROID_SCALER_ROTATE_AND_CROP); + if ((entry.count > 0) && (entry.data.u8[0] == ANDROID_SCALER_ROTATE_AND_CROP_AUTO)) { + uint8_t disableRotateAndCrop = ANDROID_SCALER_ROTATE_AND_CROP_NONE; + requestMeta.update(ANDROID_SCALER_ROTATE_AND_CROP, &disableRotateAndCrop, 1); + settings->metadata.clear(); + camera_metadata_t* metaBuffer = requestMeta.release(); + uint8_t* rawMetaBuffer = reinterpret_cast<uint8_t*>(metaBuffer); + settings->metadata = + std::vector(rawMetaBuffer, rawMetaBuffer + get_camera_metadata_size(metaBuffer)); + } +} + +void CameraAidlTest::verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session, + int32_t streamId, const std::shared_ptr<DeviceCb>& cb, + uint32_t streamConfigCounter) { + ASSERT_NE(nullptr, session); + + std::vector<int32_t> streamIds(1); + streamIds[0] = streamId; + session->signalStreamFlush(streamIds, /*streamConfigCounter*/ streamConfigCounter); + cb->waitForBuffersReturned(); +} + +void CameraAidlTest::processPreviewStabilizationCaptureRequestInternal( + bool previewStabilizationOn, + // Used as output when preview stabilization is off, as output when its on. + std::unordered_map<std::string, nsecs_t>& cameraDeviceToTimeLag) { + std::vector<std::string> cameraDeviceNames = getCameraDeviceNames(mProvider); + AvailableStream streamThreshold = {kMaxPreviewWidth, kMaxPreviewHeight, + static_cast<int32_t>(PixelFormat::IMPLEMENTATION_DEFINED)}; + int64_t bufferId = 1; + int32_t frameNumber = 1; + std::vector<uint8_t> settings; + + for (const auto& name : cameraDeviceNames) { + if (!supportsPreviewStabilization(name, mProvider)) { + ALOGI(" %s Camera device %s doesn't support preview stabilization, skipping", __func__, + name.c_str()); + continue; + } + + Stream testStream; + std::vector<HalStream> halStreams; + std::shared_ptr<ICameraDeviceSession> session; + std::shared_ptr<DeviceCb> cb; + bool supportsPartialResults = false; + bool useHalBufManager = false; + int32_t partialResultCount = 0; + configureSingleStream(name, mProvider, &streamThreshold, GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + RequestTemplate::PREVIEW, &session /*out*/, &testStream /*out*/, + &halStreams /*out*/, &supportsPartialResults /*out*/, + &partialResultCount /*out*/, &useHalBufManager /*out*/, &cb /*out*/); + + ::aidl::android::hardware::common::fmq::MQDescriptor< + int8_t, aidl::android::hardware::common::fmq::SynchronizedReadWrite> + descriptor; + ndk::ScopedAStatus resultQueueRet = session->getCaptureResultMetadataQueue(&descriptor); + ASSERT_TRUE(resultQueueRet.isOk()); + + std::shared_ptr<ResultMetadataQueue> resultQueue = + std::make_shared<ResultMetadataQueue>(descriptor); + if (!resultQueue->isValid() || resultQueue->availableToWrite() <= 0) { + ALOGE("%s: HAL returns empty result metadata fmq," + " not use it", + __func__); + resultQueue = nullptr; + // Don't use the queue onwards. + } + + std::shared_ptr<InFlightRequest> inflightReq = std::make_shared<InFlightRequest>( + 1, false, supportsPartialResults, partialResultCount, resultQueue); + + CameraMetadata defaultMetadata; + android::hardware::camera::common::V1_0::helper::CameraMetadata defaultSettings; + ndk::ScopedAStatus ret = session->constructDefaultRequestSettings(RequestTemplate::PREVIEW, + &defaultMetadata); + ASSERT_TRUE(ret.isOk()); + + const camera_metadata_t* metadata = + reinterpret_cast<const camera_metadata_t*>(defaultMetadata.metadata.data()); + defaultSettings = metadata; + android::status_t metadataRet = ::android::OK; + uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; + if (previewStabilizationOn) { + videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION; + metadataRet = defaultSettings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, + &videoStabilizationMode, 1); + } else { + metadataRet = defaultSettings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, + &videoStabilizationMode, 1); + } + ASSERT_EQ(metadataRet, ::android::OK); + + camera_metadata_t* releasedMetadata = defaultSettings.release(); + uint8_t* rawMetadata = reinterpret_cast<uint8_t*>(releasedMetadata); + + buffer_handle_t buffer_handle; + + std::vector<CaptureRequest> requests(1); + CaptureRequest& request = requests[0]; + request.frameNumber = frameNumber; + request.fmqSettingsSize = 0; + request.settings.metadata = + std::vector(rawMetadata, rawMetadata + get_camera_metadata_size(releasedMetadata)); + request.outputBuffers = std::vector<StreamBuffer>(1); + StreamBuffer& outputBuffer = request.outputBuffers[0]; + if (useHalBufManager) { + outputBuffer = {halStreams[0].id, + /*bufferId*/ 0, NativeHandle(), BufferStatus::OK, + NativeHandle(), NativeHandle()}; + } else { + allocateGraphicBuffer(testStream.width, testStream.height, + /* We don't look at halStreamConfig.streams[0].consumerUsage + * since that is 0 for output streams + */ + android_convertGralloc1To0Usage( + static_cast<uint64_t>(halStreams[0].producerUsage), + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + halStreams[0].overrideFormat, &buffer_handle); + outputBuffer = {halStreams[0].id, bufferId, ::android::makeToAidl(buffer_handle), + BufferStatus::OK, NativeHandle(), NativeHandle()}; + } + request.inputBuffer = { + -1, 0, NativeHandle(), BufferStatus::ERROR, NativeHandle(), NativeHandle()}; + + { + std::unique_lock<std::mutex> l(mLock); + mInflightMap.clear(); + mInflightMap.insert(std::make_pair(frameNumber, inflightReq)); + } + + int32_t numRequestProcessed = 0; + std::vector<BufferCache> cachesToRemove; + ret = session->processCaptureRequest(requests, cachesToRemove, &numRequestProcessed); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(numRequestProcessed, 1u); + + { + std::unique_lock<std::mutex> l(mLock); + while (!inflightReq->errorCodeValid && + ((0 < inflightReq->numBuffersLeft) || (!inflightReq->haveResultMetadata))) { + auto timeout = std::chrono::system_clock::now() + + std::chrono::seconds(kStreamBufferTimeoutSec); + ASSERT_NE(std::cv_status::timeout, mResultCondition.wait_until(l, timeout)); + } + + ASSERT_FALSE(inflightReq->errorCodeValid); + ASSERT_NE(inflightReq->resultOutputBuffers.size(), 0u); + ASSERT_EQ(testStream.id, inflightReq->resultOutputBuffers[0].buffer.streamId); + ASSERT_TRUE(inflightReq->shutterReadoutTimestampValid); + nsecs_t readoutTimestamp = inflightReq->shutterReadoutTimestamp; + + if (previewStabilizationOn) { + // Here we collect the time difference between the buffer ready + // timestamp - notify readout timestamp. + // timeLag = buffer ready timestamp - notify readout timestamp. + // timeLag(previewStabilization) must be <= + // timeLag(stabilization off) + 1 frame duration. + auto it = cameraDeviceToTimeLag.find(name); + camera_metadata_entry e; + e = inflightReq->collectedResult.find(ANDROID_SENSOR_FRAME_DURATION); + ASSERT_TRUE(e.count > 0); + nsecs_t frameDuration = e.data.i64[0]; + ASSERT_TRUE(it != cameraDeviceToTimeLag.end()); + + nsecs_t previewStabOnLagTime = + inflightReq->resultOutputBuffers[0].timeStamp - readoutTimestamp; + ASSERT_TRUE(previewStabOnLagTime <= (it->second + frameDuration)); + } else { + // Fill in the buffer ready timestamp - notify timestamp; + cameraDeviceToTimeLag[std::string(name)] = + inflightReq->resultOutputBuffers[0].timeStamp - readoutTimestamp; + } + } + + if (useHalBufManager) { + verifyBuffersReturned(session, testStream.id, cb); + } + + ret = session->close(); + ASSERT_TRUE(ret.isOk()); + } +} + +bool CameraAidlTest::supportsPreviewStabilization( + const std::string& name, const std::shared_ptr<ICameraProvider>& provider) { + std::shared_ptr<ICameraDevice> device; + ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device); + ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + if (!ret.isOk() || device == nullptr) { + ADD_FAILURE() << "Failed to get camera device interface for " << name; + } + + CameraMetadata metadata; + ret = device->getCameraCharacteristics(&metadata); + camera_metadata_t* staticMeta = clone_camera_metadata( + reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data())); + if (!(ret.isOk())) { + ADD_FAILURE() << "Failed to get camera characteristics for " << name; + } + // Go through the characteristics and see if video stabilization modes have + // preview stabilization + camera_metadata_ro_entry entry; + + int retcode = find_camera_metadata_ro_entry( + staticMeta, ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, &entry); + if ((0 == retcode) && (entry.count > 0)) { + for (auto i = 0; i < entry.count; i++) { + if (entry.data.u8[i] == + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION) { + return true; + } + } + } + return false; +} + +void CameraAidlTest::configurePreviewStreams( + const std::string& name, const std::shared_ptr<ICameraProvider>& provider, + const AvailableStream* previewThreshold, const std::unordered_set<std::string>& physicalIds, + std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream, + std::vector<HalStream>* halStreams, bool* supportsPartialResults, + int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb, + int32_t streamConfigCounter) { + ASSERT_NE(nullptr, session); + ASSERT_NE(nullptr, halStreams); + ASSERT_NE(nullptr, previewStream); + ASSERT_NE(nullptr, supportsPartialResults); + ASSERT_NE(nullptr, partialResultCount); + ASSERT_NE(nullptr, useHalBufManager); + ASSERT_NE(nullptr, cb); + + ASSERT_FALSE(physicalIds.empty()); + + std::vector<AvailableStream> outputPreviewStreams; + std::shared_ptr<ICameraDevice> device; + ALOGI("configureStreams: Testing camera device %s", name.c_str()); + + ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device); + ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(device, nullptr); + + CameraMetadata meta; + ret = device->getCameraCharacteristics(&meta); + ASSERT_TRUE(ret.isOk()); + camera_metadata_t* staticMeta = + clone_camera_metadata(reinterpret_cast<const camera_metadata_t*>(meta.metadata.data())); + ASSERT_NE(nullptr, staticMeta); + + camera_metadata_ro_entry entry; + auto status = + find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry); + if ((0 == status) && (entry.count > 0)) { + *partialResultCount = entry.data.i32[0]; + *supportsPartialResults = (*partialResultCount > 1); + } + + *cb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta); + ret = device->open(*cb, session); + ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(*session, nullptr); + + *useHalBufManager = false; + status = find_camera_metadata_ro_entry( + staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry); + if ((0 == status) && (entry.count == 1)) { + *useHalBufManager = (entry.data.u8[0] == + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5); + } + + outputPreviewStreams.clear(); + Status rc = getAvailableOutputStreams(staticMeta, outputPreviewStreams, previewThreshold); + free_camera_metadata(staticMeta); + ASSERT_EQ(Status::OK, rc); + ASSERT_FALSE(outputPreviewStreams.empty()); + + std::vector<Stream> streams(physicalIds.size()); + int32_t streamId = 0; + for (auto const& physicalId : physicalIds) { + streams[streamId++] = {streamId, + StreamType::OUTPUT, + outputPreviewStreams[0].width, + outputPreviewStreams[0].height, + static_cast<PixelFormat>(outputPreviewStreams[0].format), + static_cast<aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_HWCOMPOSER), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + physicalId, + 0, + 0, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + } + + StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE, CameraMetadata()}; + + RequestTemplate reqTemplate = RequestTemplate::PREVIEW; + ret = (*session)->constructDefaultRequestSettings(reqTemplate, &config.sessionParams); + ASSERT_TRUE(ret.isOk()); + + bool supported = false; + ret = device->isStreamCombinationSupported(config, &supported); + ASSERT_TRUE(ret.isOk()); + + config.streamConfigCounter = streamConfigCounter; + std::vector<HalStream> halConfigs; + ret = (*session)->configureStreams(config, &halConfigs); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(physicalIds.size(), halConfigs.size()); + *halStreams = halConfigs; + if (*useHalBufManager) { + std::vector<Stream> ss(physicalIds.size()); + std::vector<HalStream> hs(physicalIds.size()); + for (size_t i = 0; i < physicalIds.size(); i++) { + ss[i] = streams[i]; + hs[i] = halConfigs[i]; + } + (*cb)->setCurrentStreamConfig(ss, hs); + } + *previewStream = streams[0]; + ASSERT_TRUE(ret.isOk()); +} + +void CameraAidlTest::verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session, + const std::vector<int32_t>& streamIds, + std::shared_ptr<DeviceCb> cb, + uint32_t streamConfigCounter) { + ndk::ScopedAStatus ret = + session->signalStreamFlush(streamIds, /*streamConfigCounter*/ streamConfigCounter); + ASSERT_TRUE(ret.isOk()); + cb->waitForBuffersReturned(); +} + +void CameraAidlTest::configureStreams( + const std::string& name, const std::shared_ptr<ICameraProvider>& provider, + PixelFormat format, std::shared_ptr<ICameraDeviceSession>* session, Stream* previewStream, + std::vector<HalStream>* halStreams, bool* supportsPartialResults, + int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* outCb, + uint32_t streamConfigCounter, bool maxResolution, + aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap prof) { + ASSERT_NE(nullptr, session); + ASSERT_NE(nullptr, halStreams); + ASSERT_NE(nullptr, previewStream); + ASSERT_NE(nullptr, supportsPartialResults); + ASSERT_NE(nullptr, partialResultCount); + ASSERT_NE(nullptr, useHalBufManager); + ASSERT_NE(nullptr, outCb); + + ALOGI("configureStreams: Testing camera device %s", name.c_str()); + + std::vector<AvailableStream> outputStreams; + std::shared_ptr<ICameraDevice> device; + + ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &device); + ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(device, nullptr); + + CameraMetadata metadata; + camera_metadata_t* staticMeta; + ret = device->getCameraCharacteristics(&metadata); + ASSERT_TRUE(ret.isOk()); + staticMeta = clone_camera_metadata( + reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data())); + + camera_metadata_ro_entry entry; + auto status = + find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry); + if ((0 == status) && (entry.count > 0)) { + *partialResultCount = entry.data.i32[0]; + *supportsPartialResults = (*partialResultCount > 1); + } + + *outCb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta); + ret = device->open(*outCb, session); + ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_TRUE(ret.isOk()); + ASSERT_NE(*session, nullptr); + + *useHalBufManager = false; + status = find_camera_metadata_ro_entry( + staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry); + if ((0 == status) && (entry.count == 1)) { + *useHalBufManager = (entry.data.u8[0] == + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5); + } + + outputStreams.clear(); + Size maxSize; + auto rc = getMaxOutputSizeForFormat(staticMeta, format, &maxSize, maxResolution); + ASSERT_EQ(Status::OK, rc); + free_camera_metadata(staticMeta); + + std::vector<Stream> streams(1); + streams[0] = {0, + StreamType::OUTPUT, + maxSize.width, + maxSize.height, + format, + static_cast<::aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_CPU_READ), + Dataspace::UNKNOWN, + StreamRotation::ROTATION_0, + "", + 0, + -1, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}, + prof}; + + StreamConfiguration config; + config.streams = streams; + config.operationMode = StreamConfigurationMode::NORMAL_MODE; + config.streamConfigCounter = streamConfigCounter; + config.multiResolutionInputImage = false; + CameraMetadata req; + RequestTemplate reqTemplate = RequestTemplate::STILL_CAPTURE; + ret = (*session)->constructDefaultRequestSettings(reqTemplate, &req); + ASSERT_TRUE(ret.isOk()); + config.sessionParams = req; + + bool supported = false; + ret = device->isStreamCombinationSupported(config, &supported); + ASSERT_TRUE(ret.isOk()); + ASSERT_EQ(supported, true); + + ret = (*session)->configureStreams(config, halStreams); + ASSERT_TRUE(ret.isOk()); + + if (*useHalBufManager) { + std::vector<Stream> ss(1); + std::vector<HalStream> hs(1); + ss[0] = streams[0]; + hs[0] = (*halStreams)[0]; + (*outCb)->setCurrentStreamConfig(ss, hs); + } + + *previewStream = streams[0]; + ASSERT_TRUE(ret.isOk()); +} + +bool CameraAidlTest::is10BitDynamicRangeCapable(const camera_metadata_t* staticMeta) { + camera_metadata_ro_entry scalerEntry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + &scalerEntry); + if (rc == 0) { + for (uint32_t i = 0; i < scalerEntry.count; i++) { + if (scalerEntry.data.u8[i] == + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT) { + return true; + } + } + } + return false; +} + +void CameraAidlTest::get10BitDynamicRangeProfiles( + const camera_metadata_t* staticMeta, + std::vector< + aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap>* + profiles) { + ASSERT_NE(nullptr, staticMeta); + ASSERT_NE(nullptr, profiles); + camera_metadata_ro_entry entry; + std::unordered_set<int32_t> entries; + int rc = find_camera_metadata_ro_entry( + staticMeta, ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP, &entry); + ASSERT_EQ(rc, 0); + ASSERT_TRUE(entry.count > 0); + ASSERT_EQ(entry.count % 2, 0); + + for (uint32_t i = 0; i < entry.count; i += 2) { + ASSERT_NE(entry.data.i32[i], ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD); + ASSERT_EQ(entries.find(entry.data.i32[i]), entries.end()); + entries.insert(static_cast<int32_t>(entry.data.i32[i])); + profiles->emplace_back( + static_cast<aidl::android::hardware::camera::metadata:: + RequestAvailableDynamicRangeProfilesMap>(entry.data.i32[i])); + } + + if (!entries.empty()) { + ASSERT_NE(entries.find(ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10), + entries.end()); + } +} + +void CameraAidlTest::verify10BitMetadata( + HandleImporter& importer, const InFlightRequest& request, + aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap + profile) { + for (const auto& b : request.resultOutputBuffers) { + bool smpte2086Present = importer.isSmpte2086Present(b.buffer.buffer); + bool smpte2094_10Present = importer.isSmpte2094_10Present(b.buffer.buffer); + bool smpte2094_40Present = importer.isSmpte2094_40Present(b.buffer.buffer); + + switch (static_cast<uint32_t>(profile)) { + case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10: + ASSERT_FALSE(smpte2086Present); + ASSERT_FALSE(smpte2094_10Present); + ASSERT_FALSE(smpte2094_40Present); + break; + case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10: + ASSERT_TRUE(smpte2086Present); + ASSERT_FALSE(smpte2094_10Present); + ASSERT_FALSE(smpte2094_40Present); + break; + case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS: + ASSERT_FALSE(smpte2086Present); + ASSERT_FALSE(smpte2094_10Present); + ASSERT_TRUE(smpte2094_40Present); + break; + case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF: + case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO: + case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM: + case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO: + case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF: + case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO: + case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM: + case ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO: + ASSERT_FALSE(smpte2086Present); + ASSERT_TRUE(smpte2094_10Present); + ASSERT_FALSE(smpte2094_40Present); + break; + default: + ALOGE("%s: Unexpected 10-bit dynamic range profile: %d", __FUNCTION__, profile); + ADD_FAILURE(); + } + } +} + +void CameraAidlTest::configurePreviewStream( + const std::string& name, const std::shared_ptr<ICameraProvider>& provider, + const AvailableStream* previewThreshold, std::shared_ptr<ICameraDeviceSession>* session, + Stream* previewStream, std::vector<HalStream>* halStreams, bool* supportsPartialResults, + int32_t* partialResultCount, bool* useHalBufManager, std::shared_ptr<DeviceCb>* cb, + uint32_t streamConfigCounter) { + configureSingleStream(name, provider, previewThreshold, GRALLOC1_CONSUMER_USAGE_HWCOMPOSER, + RequestTemplate::PREVIEW, session, previewStream, halStreams, + supportsPartialResults, partialResultCount, useHalBufManager, cb, + streamConfigCounter); +} + +Status CameraAidlTest::isOfflineSessionSupported(const camera_metadata_t* staticMeta) { + Status ret = Status::OPERATION_NOT_SUPPORTED; + if (nullptr == staticMeta) { + return Status::ILLEGAL_ARGUMENT; + } + + camera_metadata_ro_entry entry; + int rc = find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_AVAILABLE_CAPABILITIES, + &entry); + if (0 != rc) { + return Status::ILLEGAL_ARGUMENT; + } + + for (size_t i = 0; i < entry.count; i++) { + if (ANDROID_REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING == entry.data.u8[i]) { + ret = Status::OK; + break; + } + } + + return ret; +} + +void CameraAidlTest::configureOfflineStillStream( + const std::string& name, const std::shared_ptr<ICameraProvider>& provider, + const AvailableStream* threshold, std::shared_ptr<ICameraDeviceSession>* session, + Stream* stream, std::vector<HalStream>* halStreams, bool* supportsPartialResults, + int32_t* partialResultCount, std::shared_ptr<DeviceCb>* outCb, int32_t* jpegBufferSize, + bool* useHalBufManager) { + ASSERT_NE(nullptr, session); + ASSERT_NE(nullptr, halStreams); + ASSERT_NE(nullptr, stream); + ASSERT_NE(nullptr, supportsPartialResults); + ASSERT_NE(nullptr, partialResultCount); + ASSERT_NE(nullptr, outCb); + ASSERT_NE(nullptr, jpegBufferSize); + ASSERT_NE(nullptr, useHalBufManager); + + std::vector<AvailableStream> outputStreams; + std::shared_ptr<ICameraDevice> cameraDevice; + ALOGI("configureStreams: Testing camera device %s", name.c_str()); + + ndk::ScopedAStatus ret = provider->getCameraDeviceInterface(name, &cameraDevice); + ASSERT_TRUE(ret.isOk()); + ALOGI("getCameraDeviceInterface returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_NE(cameraDevice, nullptr); + + CameraMetadata metadata; + ret = cameraDevice->getCameraCharacteristics(&metadata); + ASSERT_TRUE(ret.isOk()); + camera_metadata_t* staticMeta = clone_camera_metadata( + reinterpret_cast<const camera_metadata_t*>(metadata.metadata.data())); + ASSERT_NE(nullptr, staticMeta); + + camera_metadata_ro_entry entry; + auto status = + find_camera_metadata_ro_entry(staticMeta, ANDROID_REQUEST_PARTIAL_RESULT_COUNT, &entry); + if ((0 == status) && (entry.count > 0)) { + *partialResultCount = entry.data.i32[0]; + *supportsPartialResults = (*partialResultCount > 1); + } + + *useHalBufManager = false; + status = find_camera_metadata_ro_entry( + staticMeta, ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION, &entry); + if ((0 == status) && (entry.count == 1)) { + *useHalBufManager = (entry.data.u8[0] == + ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION_HIDL_DEVICE_3_5); + } + + auto st = getJpegBufferSize(staticMeta, jpegBufferSize); + ASSERT_EQ(st, Status::OK); + + *outCb = ndk::SharedRefBase::make<DeviceCb>(this, staticMeta); + ret = cameraDevice->open(*outCb, session); + ASSERT_TRUE(ret.isOk()); + ALOGI("device::open returns status:%d:%d", ret.getExceptionCode(), + ret.getServiceSpecificError()); + ASSERT_NE(session, nullptr); + + outputStreams.clear(); + auto rc = getAvailableOutputStreams(staticMeta, outputStreams, threshold); + size_t idx = 0; + int currLargest = outputStreams[0].width * outputStreams[0].height; + for (size_t i = 0; i < outputStreams.size(); i++) { + int area = outputStreams[i].width * outputStreams[i].height; + if (area > currLargest) { + idx = i; + currLargest = area; + } + } + free_camera_metadata(staticMeta); + ASSERT_EQ(Status::OK, rc); + ASSERT_FALSE(outputStreams.empty()); + + Dataspace dataspace = getDataspace(static_cast<PixelFormat>(outputStreams[idx].format)); + + std::vector<Stream> streams(/*size*/ 1); + streams[0] = {/*id*/ 0, + StreamType::OUTPUT, + outputStreams[idx].width, + outputStreams[idx].height, + static_cast<PixelFormat>(outputStreams[idx].format), + static_cast<::aidl::android::hardware::graphics::common::BufferUsage>( + GRALLOC1_CONSUMER_USAGE_CPU_READ), + dataspace, + StreamRotation::ROTATION_0, + /*physicalId*/ std::string(), + *jpegBufferSize, + /*groupId*/ 0, + {SensorPixelMode::ANDROID_SENSOR_PIXEL_MODE_DEFAULT}}; + + StreamConfiguration config = {streams, StreamConfigurationMode::NORMAL_MODE, CameraMetadata()}; + + (*session)->configureStreams(config, halStreams); + ASSERT_TRUE(ret.isOk()); + + if (*useHalBufManager) { + (*outCb)->setCurrentStreamConfig(streams, *halStreams); + } + + *stream = streams[0]; +} + +void CameraAidlTest::updateInflightResultQueue( + const std::shared_ptr<ResultMetadataQueue>& resultQueue) { + std::unique_lock<std::mutex> l(mLock); + for (auto& it : mInflightMap) { + it.second->resultQueue = resultQueue; + } +}
\ No newline at end of file diff --git a/camera/provider/aidl/vts/camera_aidl_test.h b/camera/provider/aidl/vts/camera_aidl_test.h new file mode 100644 index 0000000000..cc381696c7 --- /dev/null +++ b/camera/provider/aidl/vts/camera_aidl_test.h @@ -0,0 +1,528 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_CAMERA_AIDL_TEST_H_ +#define HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_CAMERA_AIDL_TEST_H_ + +#define LOG_TAG "camera_aidl_hal_test" + +#include <string> +#include <unordered_map> +#include <unordered_set> + +#include <CameraMetadata.h> +#include <CameraParameters.h> +#include <HandleImporter.h> +#include <fmq/AidlMessageQueue.h> + +#include <aidl/android/hardware/graphics/common/Dataspace.h> + +#include <aidl/android/hardware/camera/common/Status.h> +#include <aidl/android/hardware/camera/common/TorchModeStatus.h> +#include <aidl/android/hardware/common/NativeHandle.h> + +#include <aidl/android/hardware/camera/device/CaptureResult.h> +#include <aidl/android/hardware/camera/device/ErrorCode.h> +#include <aidl/android/hardware/camera/device/HalStream.h> +#include <aidl/android/hardware/camera/device/ICameraDevice.h> +#include <aidl/android/hardware/camera/device/NotifyMsg.h> +#include <aidl/android/hardware/camera/device/PhysicalCameraMetadata.h> +#include <aidl/android/hardware/camera/device/Stream.h> + +#include <aidl/android/hardware/camera/provider/ICameraProvider.h> + +#include <aidl/android/hardware/graphics/common/PixelFormat.h> + +#include <gtest/gtest.h> + +#include <log/log.h> +#include <system/camera_metadata.h> +#include <utils/KeyedVector.h> +#include <utils/Timers.h> + +using ::aidl::android::hardware::camera::common::Status; +using ::aidl::android::hardware::camera::common::TorchModeStatus; +using ::aidl::android::hardware::camera::device::BufferRequest; +using ::aidl::android::hardware::camera::device::BufferRequestStatus; +using ::aidl::android::hardware::camera::device::CameraMetadata; +using ::aidl::android::hardware::camera::device::CaptureResult; +using ::aidl::android::hardware::camera::device::ErrorCode; +using ::aidl::android::hardware::camera::device::HalStream; +using ::aidl::android::hardware::camera::device::ICameraDevice; +using ::aidl::android::hardware::camera::device::ICameraDeviceSession; +using ::aidl::android::hardware::camera::device::ICameraInjectionSession; +using ::aidl::android::hardware::camera::device::NotifyMsg; +using ::aidl::android::hardware::camera::device::PhysicalCameraMetadata; +using ::aidl::android::hardware::camera::device::RequestTemplate; +using ::aidl::android::hardware::camera::device::Stream; +using ::aidl::android::hardware::camera::device::StreamBuffer; +using ::aidl::android::hardware::camera::device::StreamBufferRet; +using ::aidl::android::hardware::camera::device::StreamConfiguration; +using ::aidl::android::hardware::camera::device::StreamConfigurationMode; +using ::aidl::android::hardware::camera::provider::ConcurrentCameraIdCombination; +using ::aidl::android::hardware::camera::provider::ICameraProvider; + +using ::aidl::android::hardware::common::NativeHandle; +using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite; + +using ::aidl::android::hardware::graphics::common::Dataspace; +using ::aidl::android::hardware::graphics::common::PixelFormat; + +using ::android::hardware::camera::common::V1_0::helper::HandleImporter; +using ::android::hardware::camera::common::V1_0::helper::Size; + +using ResultMetadataQueue = android::AidlMessageQueue<int8_t, SynchronizedReadWrite>; + +using ::ndk::ScopedAStatus; + +class DeviceCb; // Forward declare to break circular header dependency + +class CameraAidlTest : public ::testing::TestWithParam<std::string> { + public: + enum SystemCameraKind { + /** + * These camera devices are visible to all apps and system components alike + */ + PUBLIC = 0, + + /** + * These camera devices are visible only to processes having the + * android.permission.SYSTEM_CAMERA permission. They are not exposed to 3P + * apps. + */ + SYSTEM_ONLY_CAMERA, + + /** + * These camera devices are visible only to HAL clients (that try to connect + * on a hwbinder thread). + */ + HIDDEN_SECURE_CAMERA + }; + + struct AvailableStream { + int32_t width; + int32_t height; + int32_t format; + }; + + enum ReprocessType { + PRIV_REPROCESS, + YUV_REPROCESS, + }; + + struct AvailableZSLInputOutput { + int32_t inputFormat; + int32_t outputFormat; + }; + + virtual void SetUp() override; + virtual void TearDown() override; + + std::vector<std::string> getCameraDeviceNames(std::shared_ptr<ICameraProvider>& provider, + bool addSecureOnly = false); + + static bool isSecureOnly(const std::shared_ptr<ICameraProvider>& provider, + const std::string& name); + + std::map<std::string, std::string> getCameraDeviceIdToNameMap( + std::shared_ptr<ICameraProvider> provider); + + static std::vector<ConcurrentCameraIdCombination> getConcurrentDeviceCombinations( + std::shared_ptr<ICameraProvider>& provider); + + void notifyDeviceState(int64_t state); + + static void allocateGraphicBuffer(uint32_t width, uint32_t height, uint64_t usage, + PixelFormat format, buffer_handle_t* buffer_handle /*out*/); + + static void openEmptyDeviceSession(const std::string& name, + std::shared_ptr<ICameraProvider> provider, + std::shared_ptr<ICameraDeviceSession>* session /*out*/, + CameraMetadata* staticMeta /*out*/, + std::shared_ptr<ICameraDevice>* device /*out*/); + static void openEmptyInjectionSession(const std::string& name, + const std::shared_ptr<ICameraProvider>& provider, + std::shared_ptr<ICameraInjectionSession>* session /*out*/, + CameraMetadata* staticMeta /*out*/, + std::shared_ptr<ICameraDevice>* device /*out*/); + + static void createStreamConfiguration(std::vector<Stream>& streams, + StreamConfigurationMode configMode, + StreamConfiguration* config, int32_t jpegBufferSize = 0); + + void configureOfflineStillStream( + const std::string& name, const std::shared_ptr<ICameraProvider>& provider, + const AvailableStream* threshold, + std::shared_ptr<ICameraDeviceSession>* session /*out*/, Stream* stream /*out*/, + std::vector<HalStream>* halStreams, bool* supportsPartialResults /*out*/, + int32_t* partialResultCount /*out*/, std::shared_ptr<DeviceCb>* outCb /*out*/, + int32_t* jpegBufferSize /*out*/, bool* useHalBufManager /*out*/); + + void configureStreams( + const std::string& name, const std::shared_ptr<ICameraProvider>& provider, + PixelFormat format, std::shared_ptr<ICameraDeviceSession>* session /*out*/, + Stream* previewStream /*out*/, std::vector<HalStream>* halStreams /*out*/, + bool* supportsPartialResults /*out*/, int32_t* partialResultCount /*out*/, + bool* useHalBufManager /*out*/, std::shared_ptr<DeviceCb>* outCb /*out*/, + uint32_t streamConfigCounter, bool maxResolution, + aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap + prof = ::aidl::android::hardware::camera::metadata:: + RequestAvailableDynamicRangeProfilesMap(0)); + + void configurePreviewStreams( + const std::string& name, const std::shared_ptr<ICameraProvider>& provider, + const AvailableStream* previewThreshold, + const std::unordered_set<std::string>& physicalIds, + std::shared_ptr<ICameraDeviceSession>* session /*out*/, Stream* previewStream /*out*/, + std::vector<HalStream>* halStreams /*out*/, bool* supportsPartialResults /*out*/, + int32_t* partialResultCount /*out*/, bool* useHalBufManager /*out*/, + std::shared_ptr<DeviceCb>* cb /*out*/, int32_t streamConfigCounter = 0); + + void configurePreviewStream( + const std::string& name, const std::shared_ptr<ICameraProvider>& provider, + const AvailableStream* previewThreshold, + std::shared_ptr<ICameraDeviceSession>* session /*out*/, Stream* previewStream /*out*/, + std::vector<HalStream>* halStreams /*out*/, bool* supportsPartialResults /*out*/, + int32_t* partialResultCount /*out*/, bool* useHalBufManager /*out*/, + std::shared_ptr<DeviceCb>* cb /*out*/, uint32_t streamConfigCounter = 0); + + void configureSingleStream( + const std::string& name, const std::shared_ptr<ICameraProvider>& provider, + const AvailableStream* previewThreshold, uint64_t bufferUsage, + RequestTemplate reqTemplate, std::shared_ptr<ICameraDeviceSession>* session /*out*/, + Stream* previewStream /*out*/, std::vector<HalStream>* halStreams /*out*/, + bool* supportsPartialResults /*out*/, int32_t* partialResultCount /*out*/, + bool* useHalBufManager /*out*/, std::shared_ptr<DeviceCb>* cb /*out*/, + uint32_t streamConfigCounter = 0); + + void verifyLogicalOrUltraHighResCameraMetadata(const std::string& cameraName, + const std::shared_ptr<ICameraDevice>& device, + const CameraMetadata& chars, + const std::vector<std::string>& deviceNames); + + static void verifyCameraCharacteristics(const CameraMetadata& chars); + + static void verifyExtendedSceneModeCharacteristics(const camera_metadata_t* metadata); + + static void verifyZoomCharacteristics(const camera_metadata_t* metadata); + + static void verifyRecommendedConfigs(const CameraMetadata& chars); + + static void verifyMonochromeCharacteristics(const CameraMetadata& chars); + + static void verifyMonochromeCameraResult( + const ::android::hardware::camera::common::V1_0::helper::CameraMetadata& metadata); + + static void verifyStreamUseCaseCharacteristics(const camera_metadata_t* metadata); + + static void verifyStreamCombination(const std::shared_ptr<ICameraDevice>& device, + const StreamConfiguration& config, bool expectedStatus, + bool expectStreamCombQuery); + + static void verifyLogicalCameraResult(const camera_metadata_t* staticMetadata, + const std::vector<uint8_t>& resultMetadata); + + static void verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session, + int32_t streamId, const std::shared_ptr<DeviceCb>& cb, + uint32_t streamConfigCounter = 0); + + void verifyBuffersReturned(const std::shared_ptr<ICameraDeviceSession>& session, + const std::vector<int32_t>& streamIds, std::shared_ptr<DeviceCb> cb, + uint32_t streamConfigCounter = 0); + + static void verifySessionReconfigurationQuery( + const std::shared_ptr<ICameraDeviceSession>& session, camera_metadata* oldSessionParams, + camera_metadata* newSessionParams); + + static void verifyRequestTemplate(const camera_metadata_t* metadata, + RequestTemplate requestTemplate); + + static void overrideRotateAndCrop(CameraMetadata* settings /*in/out*/); + + static bool isDepthOnly(const camera_metadata_t* staticMeta); + + static bool isUltraHighResolution(const camera_metadata_t* staticMeta); + + static Status getAvailableOutputStreams(const camera_metadata_t* staticMeta, + std::vector<AvailableStream>& outputStreams, + const AvailableStream* threshold = nullptr, + bool maxResolution = false); + + static Status getMaxOutputSizeForFormat(const camera_metadata_t* staticMeta, PixelFormat format, + Size* size, bool maxResolution = false); + + static Status getMandatoryConcurrentStreams(const camera_metadata_t* staticMeta, + std::vector<AvailableStream>* outputStreams); + + static bool supportsPreviewStabilization(const std::string& name, + const std::shared_ptr<ICameraProvider>& provider); + + static Status getJpegBufferSize(camera_metadata_t* staticMeta, int32_t* outBufSize); + + static Status isConstrainedModeAvailable(camera_metadata_t* staticMeta); + + static Status isLogicalMultiCamera(const camera_metadata_t* staticMeta); + + static bool isTorchSupported(const camera_metadata_t* staticMeta); + + static bool isTorchStrengthControlSupported(const camera_metadata_t* staticMeta); + + static Status isOfflineSessionSupported(const camera_metadata_t* staticMeta); + + static Status getPhysicalCameraIds(const camera_metadata_t* staticMeta, + std::unordered_set<std::string>* physicalIds /*out*/); + + static Status getSupportedKeys(camera_metadata_t* staticMeta, uint32_t tagId, + std::unordered_set<int32_t>* requestIDs /*out*/); + + static void fillOutputStreams(camera_metadata_ro_entry_t* entry, + std::vector<AvailableStream>& outputStreams, + const AvailableStream* threshold = nullptr, + const int32_t availableConfigOutputTag = 0u); + + static void constructFilteredSettings( + const std::shared_ptr<ICameraDeviceSession>& session, + const std::unordered_set<int32_t>& availableKeys, RequestTemplate reqTemplate, + android::hardware::camera::common::V1_0::helper::CameraMetadata* + defaultSettings /*out*/, + android::hardware::camera::common::V1_0::helper::CameraMetadata* filteredSettings + /*out*/); + + static Status pickConstrainedModeSize(camera_metadata_t* staticMeta, + AvailableStream& hfrStream); + + static Status isZSLModeAvailable(const camera_metadata_t* staticMeta); + + static Status isZSLModeAvailable(const camera_metadata_t* staticMeta, ReprocessType reprocType); + + static Status getZSLInputOutputMap(camera_metadata_t* staticMeta, + std::vector<AvailableZSLInputOutput>& inputOutputMap); + + static Status findLargestSize(const std::vector<AvailableStream>& streamSizes, int32_t format, + AvailableStream& result); + + static Status isMonochromeCamera(const camera_metadata_t* staticMeta); + + static Status getSystemCameraKind(const camera_metadata_t* staticMeta, + SystemCameraKind* systemCameraKind); + + static void getMultiResolutionStreamConfigurations( + camera_metadata_ro_entry* multiResStreamConfigs, + camera_metadata_ro_entry* streamConfigs, + camera_metadata_ro_entry* maxResolutionStreamConfigs, + const camera_metadata_t* staticMetadata); + + static void getPrivacyTestPatternModes( + const camera_metadata_t* staticMetadata, + std::unordered_set<int32_t>* privacyTestPatternModes /*out*/); + + static Dataspace getDataspace(PixelFormat format); + + void processCaptureRequestInternal(uint64_t bufferUsage, RequestTemplate reqTemplate, + bool useSecureOnlyCameras); + + void processPreviewStabilizationCaptureRequestInternal( + bool previewStabilizationOn, + /*inout*/ std::unordered_map<std::string, nsecs_t>& cameraDeviceToTimeLag); + + static bool is10BitDynamicRangeCapable(const camera_metadata_t* staticMeta); + + static void get10BitDynamicRangeProfiles( + const camera_metadata_t* staticMeta, + std::vector<aidl::android::hardware::camera::metadata:: + RequestAvailableDynamicRangeProfilesMap>* profiles); + + // Used by switchToOffline where a new result queue is created for offline reqs + void updateInflightResultQueue(const std::shared_ptr<ResultMetadataQueue>& resultQueue); + + static Size getMinSize(Size a, Size b); + + protected: + // In-flight queue for tracking completion of capture requests. + struct InFlightRequest { + // Set by notify() SHUTTER call. + nsecs_t shutterTimestamp; + + bool shutterReadoutTimestampValid; + nsecs_t shutterReadoutTimestamp; + + bool errorCodeValid; + ErrorCode errorCode; + + // Is partial result supported + bool usePartialResult; + + // Partial result count expected + int32_t numPartialResults; + + // Message queue + std::shared_ptr<ResultMetadataQueue> resultQueue; + + // Set by process_capture_result call with valid metadata + bool haveResultMetadata; + + // Decremented by calls to process_capture_result with valid output + // and input buffers + ssize_t numBuffersLeft; + + // A 64bit integer to index the frame number associated with this result. + int64_t frameNumber; + + // The partial result count (index) for this capture result. + int32_t partialResultCount; + + // For buffer drop errors, the stream ID for the stream that lost a buffer. + // For physical sub-camera result errors, the Id of the physical stream + // for the physical sub-camera. + // Otherwise -1. + int32_t errorStreamId; + + // If this request has any input buffer + bool hasInputBuffer; + + // Result metadata + ::android::hardware::camera::common::V1_0::helper::CameraMetadata collectedResult; + + // A copy-able StreamBuffer using buffer_handle_t instead of AIDLs NativeHandle + struct NativeStreamBuffer { + int32_t streamId; + int64_t bufferId; + buffer_handle_t buffer; + aidl::android::hardware::camera::device::BufferStatus status; + buffer_handle_t acquireFence; + buffer_handle_t releaseFence; + }; + + // Buffers are added by process_capture_result when output buffers + // return from HAL but framework. + struct StreamBufferAndTimestamp { + NativeStreamBuffer buffer; + nsecs_t timeStamp; + }; + std::vector<StreamBufferAndTimestamp> resultOutputBuffers; + + std::unordered_set<std::string> expectedPhysicalResults; + + InFlightRequest() + : shutterTimestamp(0), + shutterReadoutTimestampValid(false), + shutterReadoutTimestamp(0), + errorCodeValid(false), + errorCode(ErrorCode::ERROR_BUFFER), + usePartialResult(false), + numPartialResults(0), + resultQueue(nullptr), + haveResultMetadata(false), + numBuffersLeft(0), + frameNumber(0), + partialResultCount(0), + errorStreamId(-1), + hasInputBuffer(false), + collectedResult(1, 10) {} + + InFlightRequest(ssize_t numBuffers, bool hasInput, bool partialResults, + int32_t partialCount, std::shared_ptr<ResultMetadataQueue> queue = nullptr) + : shutterTimestamp(0), + shutterReadoutTimestampValid(false), + shutterReadoutTimestamp(0), + errorCodeValid(false), + errorCode(ErrorCode::ERROR_BUFFER), + usePartialResult(partialResults), + numPartialResults(partialCount), + resultQueue(queue), + haveResultMetadata(false), + numBuffersLeft(numBuffers), + frameNumber(0), + partialResultCount(0), + errorStreamId(-1), + hasInputBuffer(hasInput), + collectedResult(1, 10) {} + + InFlightRequest(ssize_t numBuffers, bool hasInput, bool partialResults, + int32_t partialCount, + const std::unordered_set<std::string>& extraPhysicalResult, + std::shared_ptr<ResultMetadataQueue> queue = nullptr) + : shutterTimestamp(0), + shutterReadoutTimestampValid(false), + shutterReadoutTimestamp(0), + errorCodeValid(false), + errorCode(ErrorCode::ERROR_BUFFER), + usePartialResult(partialResults), + numPartialResults(partialCount), + resultQueue(queue), + haveResultMetadata(false), + numBuffersLeft(numBuffers), + frameNumber(0), + partialResultCount(0), + errorStreamId(-1), + hasInputBuffer(hasInput), + collectedResult(1, 10), + expectedPhysicalResults(extraPhysicalResult) {} + }; + + static bool matchDeviceName(const std::string& deviceName, const std::string& providerType, + std::string* deviceVersion, std::string* cameraId); + + static void verify10BitMetadata( + HandleImporter& importer, const InFlightRequest& request, + aidl::android::hardware::camera::metadata::RequestAvailableDynamicRangeProfilesMap + profile); + + // Map from frame number to the in-flight request state + typedef std::unordered_map<uint32_t, std::shared_ptr<InFlightRequest>> InFlightMap; + + std::mutex mLock; // Synchronize access to member variables + std::condition_variable mResultCondition; // Condition variable for incoming results + InFlightMap mInflightMap; // Map of all inflight requests + + std::vector<NotifyMsg> mNotifyMessages; // Current notification message + + std::mutex mTorchLock; // Synchronize access to torch status + std::condition_variable mTorchCond; // Condition variable for torch status + TorchModeStatus mTorchStatus; // Current torch status + + // Camera provider service + std::shared_ptr<ICameraProvider> mProvider; + + // Camera device session used by the tests + // Tests should take care of closing this session and setting it back to nullptr in successful + // case. Declared as a field to allow TeadDown function to close the session if a test assertion + // fails. + std::shared_ptr<ICameraDeviceSession> mSession; + + // Camera provider type. + std::string mProviderType; + + HandleImporter mHandleImporter; + + friend class DeviceCb; + friend class SimpleDeviceCb; + friend class TorchProviderCb; +}; + +namespace { +// device@<major>.<minor>/<type>/id +const char* kDeviceNameRE = "device@([0-9]+\\.[0-9]+)/\\s+/(.+)"; +const int32_t kMaxVideoWidth = 4096; +const int32_t kMaxVideoHeight = 2160; + +const int64_t kStreamBufferTimeoutSec = 3; +const int64_t kTorchTimeoutSec = 1; +const char* kDumpOutput = "/dev/null"; +const uint32_t kMaxPreviewWidth = 1920; +const uint32_t kMaxPreviewHeight = 1080; +} // namespace +#endif // HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_CAMERA_AIDL_TEST_H_ diff --git a/camera/provider/aidl/vts/device_cb.cpp b/camera/provider/aidl/vts/device_cb.cpp new file mode 100644 index 0000000000..52b4079465 --- /dev/null +++ b/camera/provider/aidl/vts/device_cb.cpp @@ -0,0 +1,544 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "device_cb.h" + +#include <aidl/android/hardware/graphics/common/PixelFormat.h> +#include <aidlcommonsupport/NativeHandle.h> +#include <grallocusage/GrallocUsageConversion.h> +#include <ui/Fence.h> +#include <cinttypes> + +using ::aidl::android::hardware::camera::device::BufferStatus; +using ::aidl::android::hardware::camera::device::ErrorMsg; +using ::aidl::android::hardware::camera::device::StreamBufferRequestError; +using ::aidl::android::hardware::camera::device::StreamBuffersVal; +using ::aidl::android::hardware::graphics::common::PixelFormat; + +const int64_t kBufferReturnTimeoutSec = 1; + +DeviceCb::DeviceCb(CameraAidlTest* parent, camera_metadata_t* staticMeta) : mParent(parent) { + mStaticMetadata = staticMeta; +} + +ScopedAStatus DeviceCb::notify(const std::vector<NotifyMsg>& msgs) { + std::vector<std::pair<bool, nsecs_t>> readoutTimestamps; + + size_t count = msgs.size(); + readoutTimestamps.resize(count); + + for (size_t i = 0; i < count; i++) { + const NotifyMsg& msg = msgs[i]; + switch (msg.getTag()) { + case NotifyMsg::Tag::error: + readoutTimestamps[i] = {false, 0}; + break; + case NotifyMsg::Tag::shutter: + const auto& shutter = msg.get<NotifyMsg::Tag::shutter>(); + readoutTimestamps[i] = {true, shutter.readoutTimestamp}; + break; + } + } + + return notifyHelper(msgs, readoutTimestamps); +} + +ScopedAStatus DeviceCb::processCaptureResult(const std::vector<CaptureResult>& results) { + if (nullptr == mParent) { + return ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE); + } + + bool notify = false; + std::unique_lock<std::mutex> l(mParent->mLock); + for (const auto& result : results) { + notify = processCaptureResultLocked(result, result.physicalCameraMetadata); + } + + l.unlock(); + if (notify) { + mParent->mResultCondition.notify_one(); + } + + return ndk::ScopedAStatus::ok(); +} + +ScopedAStatus DeviceCb::requestStreamBuffers(const std::vector<BufferRequest>& bufReqs, + std::vector<StreamBufferRet>* buffers, + BufferRequestStatus* _aidl_return) { + std::vector<StreamBufferRet>& bufRets = *buffers; + std::unique_lock<std::mutex> l(mLock); + + if (!mUseHalBufManager) { + ALOGE("%s: Camera does not support HAL buffer management", __FUNCTION__); + ADD_FAILURE(); + *_aidl_return = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS; + return ScopedAStatus::ok(); + } + + if (bufReqs.size() > mStreams.size()) { + ALOGE("%s: illegal buffer request: too many requests!", __FUNCTION__); + ADD_FAILURE(); + *_aidl_return = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS; + return ndk::ScopedAStatus::ok(); + } + + std::vector<size_t> indexes(bufReqs.size()); + for (size_t i = 0; i < bufReqs.size(); i++) { + bool found = false; + for (size_t idx = 0; idx < mStreams.size(); idx++) { + if (bufReqs[i].streamId == mStreams[idx].id) { + found = true; + indexes[i] = idx; + break; + } + } + if (!found) { + ALOGE("%s: illegal buffer request: unknown streamId %d!", __FUNCTION__, + bufReqs[i].streamId); + ADD_FAILURE(); + *_aidl_return = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS; + return ScopedAStatus::ok(); + } + } + + bool allStreamOk = true; + bool atLeastOneStreamOk = false; + bufRets.resize(bufReqs.size()); + + for (size_t i = 0; i < bufReqs.size(); i++) { + size_t idx = indexes[i]; + const auto& stream = mStreams[idx]; + const auto& halStream = mHalStreams[idx]; + const BufferRequest& bufReq = bufReqs[i]; + + if (mOutstandingBufferIds[idx].size() + bufReq.numBuffersRequested > halStream.maxBuffers) { + bufRets[i].streamId = stream.id; + bufRets[i].val.set<StreamBuffersVal::Tag::error>( + StreamBufferRequestError::MAX_BUFFER_EXCEEDED); + allStreamOk = false; + continue; + } + + std::vector<StreamBuffer> tmpRetBuffers(bufReq.numBuffersRequested); + for (size_t j = 0; j < bufReq.numBuffersRequested; j++) { + buffer_handle_t handle; + uint32_t w = stream.width; + uint32_t h = stream.height; + if (stream.format == PixelFormat::BLOB) { + w = stream.bufferSize; + h = 1; + } + + CameraAidlTest::allocateGraphicBuffer( + w, h, + android_convertGralloc1To0Usage(static_cast<uint64_t>(halStream.producerUsage), + static_cast<uint64_t>(halStream.consumerUsage)), + halStream.overrideFormat, &handle); + + StreamBuffer streamBuffer = StreamBuffer(); + StreamBuffer& sb = tmpRetBuffers[j]; + sb = { + stream.id, mNextBufferId, ::android::dupToAidl(handle), + BufferStatus::OK, NativeHandle(), NativeHandle(), + }; + + mOutstandingBufferIds[idx][mNextBufferId++] = ::android::dupToAidl(handle); + } + atLeastOneStreamOk = true; + bufRets[i].streamId = stream.id; + bufRets[i].val.set<StreamBuffersVal::Tag::buffers>(std::move(tmpRetBuffers)); + } + + if (allStreamOk) { + *_aidl_return = BufferRequestStatus::OK; + } else if (atLeastOneStreamOk) { + *_aidl_return = BufferRequestStatus::FAILED_PARTIAL; + } else { + *_aidl_return = BufferRequestStatus::FAILED_UNKNOWN; + } + + if (!hasOutstandingBuffersLocked()) { + l.unlock(); + mFlushedCondition.notify_one(); + } + + return ndk::ScopedAStatus::ok(); +} + +ScopedAStatus DeviceCb::returnStreamBuffers(const std::vector<StreamBuffer>& buffers) { + if (!mUseHalBufManager) { + ALOGE("%s: Camera does not support HAL buffer management", __FUNCTION__); + ADD_FAILURE(); + } + + std::unique_lock<std::mutex> l(mLock); + for (const auto& buf : buffers) { + if (buf.bufferId == 0) { + // Don't return buffers of bufId 0 (empty buffer) + continue; + } + bool found = false; + for (size_t idx = 0; idx < mOutstandingBufferIds.size(); idx++) { + if (mStreams[idx].id == buf.streamId && + mOutstandingBufferIds[idx].count(buf.bufferId) == 1) { + mOutstandingBufferIds[idx].erase(buf.bufferId); + // TODO: check do we need to close/delete native handle or assume we have enough + // memory to run till the test finish? since we do not capture much requests (and + // most of time one buffer is sufficient) + found = true; + break; + } + } + if (found) { + continue; + } + ALOGE("%s: unknown buffer ID %" PRIu64, __FUNCTION__, buf.bufferId); + ADD_FAILURE(); + } + if (!hasOutstandingBuffersLocked()) { + l.unlock(); + mFlushedCondition.notify_one(); + } + + return ndk::ScopedAStatus::ok(); +} + +void DeviceCb::setCurrentStreamConfig(const std::vector<Stream>& streams, + const std::vector<HalStream>& halStreams) { + ASSERT_EQ(streams.size(), halStreams.size()); + ASSERT_NE(streams.size(), 0); + for (size_t i = 0; i < streams.size(); i++) { + ASSERT_EQ(streams[i].id, halStreams[i].id); + } + std::lock_guard<std::mutex> l(mLock); + mUseHalBufManager = true; + mStreams = streams; + mHalStreams = halStreams; + mOutstandingBufferIds.clear(); + for (size_t i = 0; i < streams.size(); i++) { + mOutstandingBufferIds.emplace_back(); + } +} + +void DeviceCb::waitForBuffersReturned() { + std::unique_lock<std::mutex> lk(mLock); + if (hasOutstandingBuffersLocked()) { + auto timeout = std::chrono::seconds(kBufferReturnTimeoutSec); + auto st = mFlushedCondition.wait_for(lk, timeout); + ASSERT_NE(std::cv_status::timeout, st); + } +} + +bool DeviceCb::processCaptureResultLocked( + const CaptureResult& results, std::vector<PhysicalCameraMetadata> physicalCameraMetadata) { + bool notify = false; + uint32_t frameNumber = results.frameNumber; + + if ((results.result.metadata.empty()) && (results.outputBuffers.empty()) && + (results.inputBuffer.buffer.fds.empty()) && (results.fmqResultSize == 0)) { + ALOGE("%s: No result data provided by HAL for frame %d result count: %d", __func__, + frameNumber, (int)results.fmqResultSize); + ADD_FAILURE(); + return notify; + } + + auto requestEntry = mParent->mInflightMap.find(frameNumber); + if (requestEntry == mParent->mInflightMap.end()) { + ALOGE("%s: Unexpected frame number! received: %u", __func__, frameNumber); + ADD_FAILURE(); + return notify; + } + + bool isPartialResult = false; + bool hasInputBufferInRequest = false; + auto& request = requestEntry->second; + + CameraMetadata resultMetadata; + size_t resultSize = 0; + if (results.fmqResultSize > 0) { + resultMetadata.metadata.resize(results.fmqResultSize); + if (request->resultQueue == nullptr) { + ADD_FAILURE(); + return notify; + } + + if (!request->resultQueue->read(reinterpret_cast<int8_t*>(resultMetadata.metadata.data()), + results.fmqResultSize)) { + ALOGE("%s: Frame %d: Cannot read camera metadata from fmq," + "size = %" PRIu64, + __func__, frameNumber, results.fmqResultSize); + ADD_FAILURE(); + return notify; + } + + // Physical device results are only expected in the last/final + // partial result notification. + bool expectPhysicalResults = !(request->usePartialResult && + (results.partialResult < request->numPartialResults)); + if (expectPhysicalResults && + (physicalCameraMetadata.size() != request->expectedPhysicalResults.size())) { + ALOGE("%s: Frame %d: Returned physical metadata count %zu " + "must be equal to expected count %zu", + __func__, frameNumber, physicalCameraMetadata.size(), + request->expectedPhysicalResults.size()); + ADD_FAILURE(); + return notify; + } + std::vector<std::vector<uint8_t>> physResultMetadata; + physResultMetadata.resize(physicalCameraMetadata.size()); + for (size_t i = 0; i < physicalCameraMetadata.size(); i++) { + physResultMetadata[i].resize(physicalCameraMetadata[i].fmqMetadataSize); + if (!request->resultQueue->read(reinterpret_cast<int8_t*>(physResultMetadata[i].data()), + physicalCameraMetadata[i].fmqMetadataSize)) { + ALOGE("%s: Frame %d: Cannot read physical camera metadata from fmq," + "size = %" PRIu64, + __func__, frameNumber, physicalCameraMetadata[i].fmqMetadataSize); + ADD_FAILURE(); + return notify; + } + } + resultSize = resultMetadata.metadata.size(); + } else if (!results.result.metadata.empty()) { + resultMetadata = results.result; + resultSize = resultMetadata.metadata.size(); + } + + if (!request->usePartialResult && (resultSize > 0) && (results.partialResult != 1)) { + ALOGE("%s: Result is malformed for frame %d: partial_result %u " + "must be 1 if partial result is not supported", + __func__, frameNumber, results.partialResult); + ADD_FAILURE(); + return notify; + } + + if (results.partialResult != 0) { + request->partialResultCount = results.partialResult; + } + + // Check if this result carries only partial metadata + if (request->usePartialResult && (resultSize > 0)) { + if ((results.partialResult > request->numPartialResults) || (results.partialResult < 1)) { + ALOGE("%s: Result is malformed for frame %d: partial_result %u" + " must be in the range of [1, %d] when metadata is " + "included in the result", + __func__, frameNumber, results.partialResult, request->numPartialResults); + ADD_FAILURE(); + return notify; + } + + // Verify no duplicate tags between partial results + const camera_metadata_t* partialMetadata = + reinterpret_cast<const camera_metadata_t*>(resultMetadata.metadata.data()); + const camera_metadata_t* collectedMetadata = request->collectedResult.getAndLock(); + camera_metadata_ro_entry_t searchEntry, foundEntry; + for (size_t i = 0; i < get_camera_metadata_entry_count(partialMetadata); i++) { + if (0 != get_camera_metadata_ro_entry(partialMetadata, i, &searchEntry)) { + ADD_FAILURE(); + request->collectedResult.unlock(collectedMetadata); + return notify; + } + if (-ENOENT != + find_camera_metadata_ro_entry(collectedMetadata, searchEntry.tag, &foundEntry)) { + ADD_FAILURE(); + request->collectedResult.unlock(collectedMetadata); + return notify; + } + } + request->collectedResult.unlock(collectedMetadata); + request->collectedResult.append(partialMetadata); + + isPartialResult = (results.partialResult < request->numPartialResults); + } else if (resultSize > 0) { + request->collectedResult.append( + reinterpret_cast<const camera_metadata_t*>(resultMetadata.metadata.data())); + isPartialResult = false; + } + + hasInputBufferInRequest = request->hasInputBuffer; + + // Did we get the (final) result metadata for this capture? + if ((resultSize > 0) && !isPartialResult) { + if (request->haveResultMetadata) { + ALOGE("%s: Called multiple times with metadata for frame %d", __func__, frameNumber); + ADD_FAILURE(); + return notify; + } + request->haveResultMetadata = true; + request->collectedResult.sort(); + + // Verify final result metadata + auto staticMetadataBuffer = mStaticMetadata; + bool isMonochrome = Status::OK == CameraAidlTest::isMonochromeCamera(staticMetadataBuffer); + if (isMonochrome) { + CameraAidlTest::verifyMonochromeCameraResult(request->collectedResult); + } + + // Verify logical camera result metadata + bool isLogicalCamera = + Status::OK == CameraAidlTest::isLogicalMultiCamera(staticMetadataBuffer); + if (isLogicalCamera) { + camera_metadata_t* collectedMetadata = + const_cast<camera_metadata_t*>(request->collectedResult.getAndLock()); + uint8_t* rawMetadata = reinterpret_cast<uint8_t*>(collectedMetadata); + std::vector metadata = std::vector( + rawMetadata, rawMetadata + get_camera_metadata_size(collectedMetadata)); + CameraAidlTest::verifyLogicalCameraResult(staticMetadataBuffer, metadata); + request->collectedResult.unlock(collectedMetadata); + } + } + + uint32_t numBuffersReturned = results.outputBuffers.size(); + auto& inputBuffer = results.inputBuffer.buffer; + if (!inputBuffer.fds.empty() && !inputBuffer.ints.empty()) { + if (hasInputBufferInRequest) { + numBuffersReturned += 1; + } else { + ALOGW("%s: Input buffer should be NULL if there is no input" + " buffer sent in the request", + __func__); + } + } + request->numBuffersLeft -= numBuffersReturned; + if (request->numBuffersLeft < 0) { + ALOGE("%s: Too many buffers returned for frame %d", __func__, frameNumber); + ADD_FAILURE(); + return notify; + } + + for (const auto& buffer : results.outputBuffers) { + // wait for the fence timestamp and store it along with the buffer + // TODO: Check if we really need the dup here + android::sp<android::Fence> releaseFence = nullptr; + if (buffer.releaseFence.fds.size() == 1 && buffer.releaseFence.fds[0].get() >= 0) { + releaseFence = new android::Fence(dup(buffer.releaseFence.fds[0].get())); + } + + CameraAidlTest::InFlightRequest::StreamBufferAndTimestamp streamBufferAndTimestamp; + streamBufferAndTimestamp.buffer = {buffer.streamId, + buffer.bufferId, + ::android::makeFromAidl(buffer.buffer), + buffer.status, + ::android::makeFromAidl(buffer.acquireFence), + ::android::makeFromAidl(buffer.releaseFence)}; + streamBufferAndTimestamp.timeStamp = systemTime(); + if (releaseFence && releaseFence->isValid()) { + releaseFence->wait(/*ms*/ 300); + nsecs_t releaseTime = releaseFence->getSignalTime(); + if (streamBufferAndTimestamp.timeStamp < releaseTime) + streamBufferAndTimestamp.timeStamp = releaseTime; + } + request->resultOutputBuffers.push_back(streamBufferAndTimestamp); + } + // If shutter event is received notify the pending threads. + if (request->shutterTimestamp != 0) { + notify = true; + } + + if (mUseHalBufManager) { + returnStreamBuffers(results.outputBuffers); + } + return notify; +} + +ScopedAStatus DeviceCb::notifyHelper( + const std::vector<NotifyMsg>& msgs, + const std::vector<std::pair<bool, nsecs_t>>& readoutTimestamps) { + std::lock_guard<std::mutex> l(mParent->mLock); + + for (size_t i = 0; i < msgs.size(); i++) { + const NotifyMsg& msg = msgs[i]; + NotifyMsg::Tag msgTag = msgs[i].getTag(); + switch (msgTag) { + case NotifyMsg::Tag::error: + if (ErrorCode::ERROR_DEVICE == msg.get<NotifyMsg::Tag::error>().errorCode) { + ALOGE("%s: Camera reported serious device error", __func__); + ADD_FAILURE(); + } else { + auto itr = mParent->mInflightMap.find( + msg.get<NotifyMsg::Tag::error>().frameNumber); + if (itr == mParent->mInflightMap.end()) { + ALOGE("%s: Unexpected error frame number! received: %u", __func__, + msg.get<NotifyMsg::Tag::error>().frameNumber); + ADD_FAILURE(); + break; + } + + auto r = itr->second; + if (ErrorCode::ERROR_RESULT == msg.get<NotifyMsg::Tag::error>().errorCode && + msg.get<NotifyMsg::Tag::error>().errorStreamId != -1) { + if (r->haveResultMetadata) { + ALOGE("%s: Camera must report physical camera result error before " + "the final capture result!", + __func__); + ADD_FAILURE(); + } else { + for (auto& mStream : mStreams) { + if (mStream.id == msg.get<NotifyMsg::Tag::error>().errorStreamId) { + std::string physicalCameraId = mStream.physicalCameraId; + bool idExpected = + r->expectedPhysicalResults.find(physicalCameraId) != + r->expectedPhysicalResults.end(); + if (!idExpected) { + ALOGE("%s: ERROR_RESULT's error stream's physicalCameraId " + "%s must be expected", + __func__, physicalCameraId.c_str()); + ADD_FAILURE(); + } else { + r->expectedPhysicalResults.erase(physicalCameraId); + } + break; + } + } + } + } else { + r->errorCodeValid = true; + r->errorCode = msg.get<NotifyMsg::Tag::error>().errorCode; + r->errorStreamId = msg.get<NotifyMsg::Tag::error>().errorStreamId; + } + } + break; + case NotifyMsg::Tag::shutter: + auto itr = + mParent->mInflightMap.find(msg.get<NotifyMsg::Tag::shutter>().frameNumber); + if (itr == mParent->mInflightMap.end()) { + ALOGE("%s: Unexpected shutter frame number! received: %u", __func__, + msg.get<NotifyMsg::Tag::shutter>().frameNumber); + ADD_FAILURE(); + break; + } + auto& r = itr->second; + r->shutterTimestamp = msg.get<NotifyMsg::Tag::shutter>().timestamp; + r->shutterReadoutTimestampValid = readoutTimestamps[i].first; + r->shutterReadoutTimestamp = readoutTimestamps[i].second; + break; + } + } + + mParent->mResultCondition.notify_one(); + return ScopedAStatus::ok(); +} + +bool DeviceCb::hasOutstandingBuffersLocked() { + if (!mUseHalBufManager) { + return false; + } + for (const auto& outstandingBuffers : mOutstandingBufferIds) { + if (!outstandingBuffers.empty()) { + return true; + } + } + return false; +} diff --git a/camera/provider/aidl/vts/device_cb.h b/camera/provider/aidl/vts/device_cb.h new file mode 100644 index 0000000000..82ca10ddcb --- /dev/null +++ b/camera/provider/aidl/vts/device_cb.h @@ -0,0 +1,82 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_DEVICECB_H_ +#define HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_DEVICECB_H_ + +#include <camera_aidl_test.h> + +#include <unordered_map> + +#include <CameraMetadata.h> +#include <aidl/android/hardware/camera/device/BnCameraDeviceCallback.h> +#include <aidl/android/hardware/camera/device/NotifyMsg.h> + +using ::aidl::android::hardware::camera::device::BnCameraDeviceCallback; +using ::aidl::android::hardware::camera::device::BufferRequest; +using ::aidl::android::hardware::camera::device::BufferRequestStatus; +using ::aidl::android::hardware::camera::device::CaptureResult; +using ::aidl::android::hardware::camera::device::HalStream; +using ::aidl::android::hardware::camera::device::NotifyMsg; +using ::aidl::android::hardware::camera::device::PhysicalCameraMetadata; +using ::aidl::android::hardware::camera::device::Stream; +using ::aidl::android::hardware::camera::device::StreamBuffer; +using ::aidl::android::hardware::camera::device::StreamBufferRet; +using ::aidl::android::hardware::common::NativeHandle; + +using ::ndk::ScopedAStatus; + +class CameraAidlTest; + +class DeviceCb : public BnCameraDeviceCallback { + public: + DeviceCb(CameraAidlTest* parent, camera_metadata_t* staticMeta); + ScopedAStatus notify(const std::vector<NotifyMsg>& msgs) override; + ScopedAStatus processCaptureResult(const std::vector<CaptureResult>& results) override; + ScopedAStatus requestStreamBuffers(const std::vector<BufferRequest>& bufReqs, + std::vector<StreamBufferRet>* buffers, + BufferRequestStatus* _aidl_return) override; + ScopedAStatus returnStreamBuffers(const std::vector<StreamBuffer>& buffers) override; + + void setCurrentStreamConfig(const std::vector<Stream>& streams, + const std::vector<HalStream>& halStreams); + + void waitForBuffersReturned(); + + private: + bool processCaptureResultLocked(const CaptureResult& results, + std::vector<PhysicalCameraMetadata> physicalCameraMetadata); + ScopedAStatus notifyHelper(const std::vector<NotifyMsg>& msgs, + const std::vector<std::pair<bool, nsecs_t>>& readoutTimestamps); + + CameraAidlTest* mParent; // Parent object + + camera_metadata_t* mStaticMetadata; + bool hasOutstandingBuffersLocked(); + + /* members for requestStreamBuffers() and returnStreamBuffers()*/ + std::mutex mLock; // protecting members below + bool mUseHalBufManager = false; + std::vector<Stream> mStreams; + std::vector<HalStream> mHalStreams; + int64_t mNextBufferId = 1; + using OutstandingBuffers = std::unordered_map<uint64_t, NativeHandle>; + // size == mStreams.size(). Tracking each streams outstanding buffers + std::vector<OutstandingBuffers> mOutstandingBufferIds; + std::condition_variable mFlushedCondition; +}; + +#endif // HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_DEVICECB_H_ diff --git a/camera/provider/aidl/vts/empty_device_cb.cpp b/camera/provider/aidl/vts/empty_device_cb.cpp new file mode 100644 index 0000000000..38b0d4c217 --- /dev/null +++ b/camera/provider/aidl/vts/empty_device_cb.cpp @@ -0,0 +1,43 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "empty_device_cb.h" +#include <log/log.h> + +ScopedAStatus EmptyDeviceCb::notify(const std::vector<NotifyMsg>&) { + ALOGI("notify callback"); + ADD_FAILURE(); // Empty callback should not reach here + return ndk::ScopedAStatus::ok(); +} +ScopedAStatus EmptyDeviceCb::processCaptureResult(const std::vector<CaptureResult>&) { + ALOGI("processCaptureResult callback"); + ADD_FAILURE(); // Empty callback should not reach here + return ndk::ScopedAStatus::ok(); +} +ScopedAStatus EmptyDeviceCb::requestStreamBuffers(const std::vector<BufferRequest>&, + std::vector<StreamBufferRet>*, + BufferRequestStatus* _aidl_return) { + ALOGI("requestStreamBuffers callback"); + // HAL might want to request buffer after configureStreams, but tests with EmptyDeviceCb + // doesn't actually need to send capture requests, so just return an error. + *_aidl_return = BufferRequestStatus::FAILED_UNKNOWN; + return ndk::ScopedAStatus::ok(); +} +ScopedAStatus EmptyDeviceCb::returnStreamBuffers(const std::vector<StreamBuffer>&) { + ALOGI("returnStreamBuffers"); + ADD_FAILURE(); // Empty callback should not reach here + return ndk::ScopedAStatus::ok(); +} diff --git a/camera/provider/aidl/vts/empty_device_cb.h b/camera/provider/aidl/vts/empty_device_cb.h new file mode 100644 index 0000000000..e777513068 --- /dev/null +++ b/camera/provider/aidl/vts/empty_device_cb.h @@ -0,0 +1,38 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_EMPTY_DEVICE_CB_H_ +#define HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_EMPTY_DEVICE_CB_H_ + +#include <aidl/android/hardware/camera/device/BnCameraDeviceCallback.h> +#include <gtest/gtest.h> + +using namespace ::aidl::android::hardware::camera::device; + +using ::aidl::android::hardware::camera::device::BnCameraDeviceCallback; +using ::ndk::ScopedAStatus; + +class EmptyDeviceCb : public BnCameraDeviceCallback { + public: + ScopedAStatus notify(const std::vector<NotifyMsg>& msgs) override; + ScopedAStatus processCaptureResult(const std::vector<::CaptureResult>& results) override; + ScopedAStatus requestStreamBuffers(const std::vector<BufferRequest>& bufReqs, + std::vector<StreamBufferRet>* buffers, + BufferRequestStatus* _aidl_return) override; + ScopedAStatus returnStreamBuffers(const std::vector<StreamBuffer>& buffers) override; +}; + +#endif // HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_EMPTY_DEVICE_CB_H_ diff --git a/camera/provider/aidl/vts/simple_device_cb.cpp b/camera/provider/aidl/vts/simple_device_cb.cpp new file mode 100644 index 0000000000..e9a2c73ad7 --- /dev/null +++ b/camera/provider/aidl/vts/simple_device_cb.cpp @@ -0,0 +1,36 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "simple_device_cb.h" + +ScopedAStatus SimpleDeviceCb::notify(const std::vector<NotifyMsg>& msgs) { + std::unique_lock<std::mutex> l(mParent->mLock); + mParent->mNotifyMessages = msgs; + mParent->mResultCondition.notify_one(); + + return ndk::ScopedAStatus::ok(); +} +ScopedAStatus SimpleDeviceCb::processCaptureResult(const std::vector<CaptureResult>&) { + return ndk::ScopedAStatus::fromStatus(STATUS_UNKNOWN_TRANSACTION); +} +ScopedAStatus SimpleDeviceCb::requestStreamBuffers(const std::vector<BufferRequest>&, + std::vector<StreamBufferRet>*, + BufferRequestStatus*) { + return ndk::ScopedAStatus::fromStatus(STATUS_UNKNOWN_TRANSACTION); +} +ScopedAStatus SimpleDeviceCb::returnStreamBuffers(const std::vector<StreamBuffer>&) { + return ndk::ScopedAStatus::fromStatus(STATUS_UNKNOWN_TRANSACTION); +} diff --git a/camera/provider/aidl/vts/simple_device_cb.h b/camera/provider/aidl/vts/simple_device_cb.h new file mode 100644 index 0000000000..6beffc7966 --- /dev/null +++ b/camera/provider/aidl/vts/simple_device_cb.h @@ -0,0 +1,41 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_SIMPLE_DEVICE_CB_H_ +#define HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_SIMPLE_DEVICE_CB_H_ + +#include "camera_aidl_test.h" + +#include <unordered_map> + +#include <aidl/android/hardware/camera/device/BnCameraDeviceCallback.h> + +using namespace ::aidl::android::hardware::camera::device; + +class SimpleDeviceCb : public BnCameraDeviceCallback { + public: + ScopedAStatus notify(const std::vector<NotifyMsg>& msgs) override; + ScopedAStatus processCaptureResult(const std::vector<CaptureResult>& results) override; + ScopedAStatus requestStreamBuffers(const std::vector<BufferRequest>& bufReqs, + std::vector<StreamBufferRet>* buffers, + BufferRequestStatus* _aidl_return) override; + virtual ScopedAStatus returnStreamBuffers(const std::vector<StreamBuffer>& buffers) override; + + private: + CameraAidlTest* mParent; +}; + +#endif // HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_SIMPLE_DEVICE_CB_H_ diff --git a/camera/provider/aidl/vts/torch_provider_cb.cpp b/camera/provider/aidl/vts/torch_provider_cb.cpp new file mode 100644 index 0000000000..52240dfed3 --- /dev/null +++ b/camera/provider/aidl/vts/torch_provider_cb.cpp @@ -0,0 +1,40 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "torch_provider_cb.h" + +TorchProviderCb::TorchProviderCb(CameraAidlTest* parent) { + mParent = parent; +} + +ndk::ScopedAStatus TorchProviderCb::torchModeStatusChange(const std::string&, + TorchModeStatus newStatus) { + std::lock_guard<std::mutex> l(mParent->mTorchLock); + mParent->mTorchStatus = newStatus; + mParent->mTorchCond.notify_one(); + return ndk::ScopedAStatus::ok(); +} +ScopedAStatus TorchProviderCb::cameraDeviceStatusChange( + const std::string&, ::aidl::android::hardware::camera::common::CameraDeviceStatus) { + // Should not be called + return ndk::ScopedAStatus::fromStatus(STATUS_UNKNOWN_TRANSACTION); +} +ScopedAStatus TorchProviderCb::physicalCameraDeviceStatusChange( + const std::string&, const std::string&, + ::aidl::android::hardware::camera::common::CameraDeviceStatus) { + // Should not be called + return ndk::ScopedAStatus::fromStatus(STATUS_UNKNOWN_TRANSACTION); +} diff --git a/camera/provider/aidl/vts/torch_provider_cb.h b/camera/provider/aidl/vts/torch_provider_cb.h new file mode 100644 index 0000000000..03532541b3 --- /dev/null +++ b/camera/provider/aidl/vts/torch_provider_cb.h @@ -0,0 +1,44 @@ +/* + * Copyright (C) 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_TORCH_PROVIDER_CB_H_ +#define HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_TORCH_PROVIDER_CB_H_ + +#import <aidl/android/hardware/camera/provider/BnCameraProviderCallback.h> +#import <camera_aidl_test.h> + +using ::aidl::android::hardware::camera::common::TorchModeStatus; +using ::aidl::android::hardware::camera::provider::BnCameraProviderCallback; + +class TorchProviderCb : public BnCameraProviderCallback { + public: + TorchProviderCb(CameraAidlTest* parent); + ndk::ScopedAStatus torchModeStatusChange(const std::string& cameraDeviceName, + TorchModeStatus newStatus) override; + + ScopedAStatus cameraDeviceStatusChange( + const std::string& in_cameraDeviceName, + ::aidl::android::hardware::camera::common::CameraDeviceStatus in_newStatus) override; + + ScopedAStatus physicalCameraDeviceStatusChange( + const std::string& in_cameraDeviceName, const std::string& in_physicalCameraDeviceName, + ::aidl::android::hardware::camera::common::CameraDeviceStatus in_newStatus) override; + + private: + CameraAidlTest* mParent; +}; + +#endif // HARDWARE_INTERFACES_CAMERA_PROVIDER_AIDL_VTS_TORCH_PROVIDER_CB_H_ |