summaryrefslogtreecommitdiff
path: root/media/jni
diff options
context:
space:
mode:
authorXin Li <delphij@google.com>2020-08-31 21:21:38 -0700
committerXin Li <delphij@google.com>2020-08-31 21:21:38 -0700
commit628590d7ec80e10a3fc24b1c18a1afb55cca10a8 (patch)
tree4b1c3f52d86d7fb53afbe9e9438468588fa489f8 /media/jni
parentb11b8ec3aec8bb42f2c07e1c5ac7942da293baa8 (diff)
parentd2d3a20624d968199353ccf6ddbae6f3ac39c9af (diff)
Merge Android R (rvc-dev-plus-aosp-without-vendor@6692709)
Bug: 166295507 Merged-In: I3d92a6de21a938f6b352ec26dc23420c0fe02b27 Change-Id: Ifdb80563ef042738778ebb8a7581a97c4e3d96e2
Diffstat (limited to 'media/jni')
-rw-r--r--media/jni/Android.bp44
-rw-r--r--media/jni/android_media_DataSourceCallback.h70
-rw-r--r--media/jni/android_media_ImageReader.cpp27
-rw-r--r--media/jni/android_media_ImageWriter.cpp62
-rw-r--r--media/jni/android_media_MediaCodec.cpp1226
-rw-r--r--media/jni/android_media_MediaCodec.h50
-rw-r--r--media/jni/android_media_MediaCodecLinearBlock.h70
-rw-r--r--media/jni/android_media_MediaCodecList.cpp1
-rw-r--r--media/jni/android_media_MediaCrypto.cpp18
-rw-r--r--media/jni/android_media_MediaDataSource.cpp3
-rw-r--r--media/jni/android_media_MediaDataSource.h2
-rw-r--r--media/jni/android_media_MediaDescrambler.cpp5
-rw-r--r--media/jni/android_media_MediaDrm.cpp180
-rw-r--r--media/jni/android_media_MediaDrm.h38
-rw-r--r--media/jni/android_media_MediaExtractor.cpp2
-rw-r--r--media/jni/android_media_MediaHTTPConnection.cpp2
-rw-r--r--media/jni/android_media_MediaMetadataRetriever.cpp158
-rw-r--r--media/jni/android_media_MediaMetricsJNI.cpp386
-rw-r--r--media/jni/android_media_MediaMetricsJNI.h7
-rw-r--r--media/jni/android_media_MediaMuxer.cpp3
-rw-r--r--media/jni/android_media_MediaPlayer.cpp16
-rw-r--r--media/jni/android_media_MediaRecorder.cpp36
-rw-r--r--media/jni/android_media_MediaScanner.cpp468
-rw-r--r--media/jni/android_media_MediaSync.h2
-rw-r--r--media/jni/android_media_MediaTranscodeManager.cpp102
-rw-r--r--media/jni/android_media_Streams.cpp61
-rw-r--r--media/jni/android_media_Streams.h44
-rw-r--r--media/jni/android_media_tv_Tuner.cpp3857
-rw-r--r--media/jni/android_media_tv_Tuner.h259
-rw-r--r--media/jni/android_mtp_MtpDatabase.cpp10
-rw-r--r--media/jni/audioeffect/android_media_AudioEffect.cpp15
-rw-r--r--media/jni/soundpool/Android.bp95
-rw-r--r--media/jni/soundpool/Sound.cpp241
-rw-r--r--media/jni/soundpool/Sound.h93
-rw-r--r--media/jni/soundpool/SoundDecoder.cpp116
-rw-r--r--media/jni/soundpool/SoundDecoder.h52
-rw-r--r--media/jni/soundpool/SoundManager.cpp104
-rw-r--r--media/jni/soundpool/SoundManager.h113
-rw-r--r--media/jni/soundpool/SoundPool.cpp1160
-rw-r--r--media/jni/soundpool/SoundPool.h248
-rw-r--r--media/jni/soundpool/SoundPoolThread.cpp114
-rw-r--r--media/jni/soundpool/SoundPoolThread.h66
-rw-r--r--media/jni/soundpool/Stream.cpp458
-rw-r--r--media/jni/soundpool/Stream.h159
-rw-r--r--media/jni/soundpool/StreamManager.cpp427
-rw-r--r--media/jni/soundpool/StreamManager.h478
-rw-r--r--media/jni/soundpool/android_media_SoundPool.cpp75
-rw-r--r--media/jni/soundpool/tests/Android.bp32
-rwxr-xr-xmedia/jni/soundpool/tests/build_and_run.sh33
-rw-r--r--media/jni/soundpool/tests/soundpool_stress.cpp319
50 files changed, 8978 insertions, 2629 deletions
diff --git a/media/jni/Android.bp b/media/jni/Android.bp
index 27660db1f4a9..d2294b38d992 100644
--- a/media/jni/Android.bp
+++ b/media/jni/Android.bp
@@ -1,6 +1,8 @@
cc_library_shared {
name: "libmedia_jni",
+ defaults: ["libcodec2-internal-defaults"],
+
srcs: [
"android_media_ImageWriter.cpp",
"android_media_ImageReader.cpp",
@@ -18,8 +20,8 @@ cc_library_shared {
"android_media_MediaPlayer.cpp",
"android_media_MediaProfiles.cpp",
"android_media_MediaRecorder.cpp",
- "android_media_MediaScanner.cpp",
"android_media_MediaSync.cpp",
+ "android_media_MediaTranscodeManager.cpp",
"android_media_ResampleInputStream.cpp",
"android_media_Streams.cpp",
"android_media_SyncParams.cpp",
@@ -42,6 +44,7 @@ cc_library_shared {
"libmedia_omx",
"libmediametrics",
"libmediadrm",
+ "libmediadrmmetrics_consumer",
"libhwui",
"libui",
"liblog",
@@ -59,6 +62,7 @@ cc_library_shared {
"libsonivox",
"android.hardware.cas@1.0",
"android.hardware.cas.native@1.0",
+ "android.hardware.drm@1.3",
"android.hidl.memory@1.0",
"android.hidl.token@1.0-utils",
],
@@ -124,3 +128,41 @@ cc_library_shared {
"-Wunreachable-code",
],
}
+
+cc_library_shared {
+ name: "libmedia_tv_tuner",
+ srcs: [
+ "android_media_tv_Tuner.cpp",
+ ],
+
+ shared_libs: [
+ "android.hardware.graphics.bufferqueue@2.0",
+ "android.hardware.tv.tuner@1.0",
+ "libandroid_runtime",
+ "libcutils",
+ "libfmq",
+ "libhidlbase",
+ "liblog",
+ "libmedia",
+ "libnativehelper",
+ "libutils",
+ ],
+ defaults: [
+ "libcodec2-impl-defaults",
+ ],
+
+ header_libs: [
+ "libcodec2_internal",
+ "libstagefright_foundation_headers",
+ ],
+
+ export_include_dirs: ["."],
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wno-error=deprecated-declarations",
+ "-Wunused",
+ "-Wunreachable-code",
+ ],
+}
diff --git a/media/jni/android_media_DataSourceCallback.h b/media/jni/android_media_DataSourceCallback.h
deleted file mode 100644
index 5bde682754f3..000000000000
--- a/media/jni/android_media_DataSourceCallback.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright 2017, The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _ANDROID_MEDIA_DATASOURCECALLBACK_H_
-#define _ANDROID_MEDIA_DATASOURCECALLBACK_H_
-
-#include "jni.h"
-
-#include <media/DataSource.h>
-#include <media/stagefright/foundation/ABase.h>
-#include <utils/Errors.h>
-#include <utils/Mutex.h>
-
-namespace android {
-
-// The native counterpart to a Java android.media.DataSourceCallback. It inherits from
-// DataSource.
-//
-// If the java DataSource returns an error or throws an exception it
-// will be considered to be in a broken state, and the only further call this
-// will make is to close().
-class JDataSourceCallback : public DataSource {
-public:
- JDataSourceCallback(JNIEnv *env, jobject source);
- virtual ~JDataSourceCallback();
-
- virtual status_t initCheck() const override;
- virtual ssize_t readAt(off64_t offset, void *data, size_t size) override;
- virtual status_t getSize(off64_t *size) override;
-
- virtual String8 toString() override;
- virtual String8 getMIMEType() const override;
- virtual void close() override;
-private:
- // Protect all member variables with mLock because this object will be
- // accessed on different threads.
- Mutex mLock;
-
- // The status of the java DataSource. Set to OK unless an error occurred or
- // close() was called.
- status_t mJavaObjStatus;
- // Only call the java getSize() once so the app can't change the size on us.
- bool mSizeIsCached;
- off64_t mCachedSize;
-
- jobject mDataSourceCallbackObj;
- jmethodID mReadAtMethod;
- jmethodID mGetSizeMethod;
- jmethodID mCloseMethod;
- jbyteArray mByteArrayObj;
-
- DISALLOW_EVIL_CONSTRUCTORS(JDataSourceCallback);
-};
-
-} // namespace android
-
-#endif // _ANDROID_MEDIA_DATASOURCECALLBACK_H_
diff --git a/media/jni/android_media_ImageReader.cpp b/media/jni/android_media_ImageReader.cpp
index 7168b2dadf92..0a02156ccf76 100644
--- a/media/jni/android_media_ImageReader.cpp
+++ b/media/jni/android_media_ImageReader.cpp
@@ -360,10 +360,8 @@ static void ImageReader_init(JNIEnv* env, jobject thiz, jobject weakThiz, jint w
__FUNCTION__, width, height, format, maxImages);
PublicFormat publicFormat = static_cast<PublicFormat>(format);
- nativeFormat = android_view_Surface_mapPublicFormatToHalFormat(
- publicFormat);
- nativeDataspace = android_view_Surface_mapPublicFormatToHalDataspace(
- publicFormat);
+ nativeFormat = mapPublicFormatToHalFormat(publicFormat);
+ nativeDataspace = mapPublicFormatToHalDataspace(publicFormat);
jclass clazz = env->GetObjectClass(thiz);
if (clazz == NULL) {
@@ -418,11 +416,13 @@ static void ImageReader_init(JNIEnv* env, jobject thiz, jobject weakThiz, jint w
if (res != OK) {
jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
"Failed to set buffer consumer default format 0x%x", nativeFormat);
+ return;
}
res = bufferConsumer->setDefaultBufferDataSpace(nativeDataspace);
if (res != OK) {
jniThrowExceptionFmt(env, "java/lang/IllegalStateException",
"Failed to set buffer consumer default dataSpace 0x%x", nativeDataspace);
+ return;
}
}
@@ -706,7 +706,7 @@ static void Image_getLockedImage(JNIEnv* env, jobject thiz, LockedImage *image)
// and we don't set them here.
}
-static void Image_getLockedImageInfo(JNIEnv* env, LockedImage* buffer, int idx,
+static bool Image_getLockedImageInfo(JNIEnv* env, LockedImage* buffer, int idx,
int32_t writerFormat, uint8_t **base, uint32_t *size, int *pixelStride, int *rowStride) {
ALOGV("%s", __FUNCTION__);
@@ -715,7 +715,9 @@ static void Image_getLockedImageInfo(JNIEnv* env, LockedImage* buffer, int idx,
if (res != OK) {
jniThrowExceptionFmt(env, "java/lang/UnsupportedOperationException",
"Pixel format: 0x%x is unsupported", buffer->flexFormat);
+ return false;
}
+ return true;
}
static jobjectArray Image_createSurfacePlanes(JNIEnv* env, jobject thiz,
@@ -729,8 +731,7 @@ static jobjectArray Image_createSurfacePlanes(JNIEnv* env, jobject thiz,
jobject byteBuffer = NULL;
PublicFormat publicReaderFormat = static_cast<PublicFormat>(readerFormat);
- int halReaderFormat = android_view_Surface_mapPublicFormatToHalFormat(
- publicReaderFormat);
+ int halReaderFormat = mapPublicFormatToHalFormat(publicReaderFormat);
if (isFormatOpaque(halReaderFormat) && numPlanes > 0) {
String8 msg;
@@ -759,8 +760,10 @@ static jobjectArray Image_createSurfacePlanes(JNIEnv* env, jobject thiz,
}
// Create all SurfacePlanes
for (int i = 0; i < numPlanes; i++) {
- Image_getLockedImageInfo(env, &lockedImg, i, halReaderFormat,
- &pData, &dataSize, &pixelStride, &rowStride);
+ if (!Image_getLockedImageInfo(env, &lockedImg, i, halReaderFormat,
+ &pData, &dataSize, &pixelStride, &rowStride)) {
+ return NULL;
+ }
byteBuffer = env->NewDirectByteBuffer(pData, dataSize);
if ((byteBuffer == NULL) && (env->ExceptionCheck() == false)) {
jniThrowException(env, "java/lang/IllegalStateException",
@@ -796,8 +799,7 @@ static jint Image_getFormat(JNIEnv* env, jobject thiz, jint readerFormat)
return static_cast<jint>(PublicFormat::PRIVATE);
} else {
BufferItem* buffer = Image_getBufferItem(env, thiz);
- int readerHalFormat = android_view_Surface_mapPublicFormatToHalFormat(
- static_cast<PublicFormat>(readerFormat));
+ int readerHalFormat = mapPublicFormatToHalFormat(static_cast<PublicFormat>(readerFormat));
int32_t fmt = applyFormatOverrides(
buffer->mGraphicBuffer->getPixelFormat(), readerHalFormat);
// Override the image format to HAL_PIXEL_FORMAT_YCbCr_420_888 if the actual format is
@@ -808,8 +810,7 @@ static jint Image_getFormat(JNIEnv* env, jobject thiz, jint readerFormat)
if (isPossiblyYUV(fmt)) {
fmt = HAL_PIXEL_FORMAT_YCbCr_420_888;
}
- PublicFormat publicFmt = android_view_Surface_mapHalFormatDataspaceToPublicFormat(
- fmt, buffer->mDataSpace);
+ PublicFormat publicFmt = mapHalFormatDataspaceToPublicFormat(fmt, buffer->mDataSpace);
return static_cast<jint>(publicFmt);
}
}
diff --git a/media/jni/android_media_ImageWriter.cpp b/media/jni/android_media_ImageWriter.cpp
index 7a0eeee17b0d..936edb3fb005 100644
--- a/media/jni/android_media_ImageWriter.cpp
+++ b/media/jni/android_media_ImageWriter.cpp
@@ -86,6 +86,14 @@ public:
void setBufferHeight(int height) { mHeight = height; }
int getBufferHeight() { return mHeight; }
+ void queueAttachedFlag(bool isAttached) {
+ Mutex::Autolock l(mAttachedFlagQueueLock);
+ mAttachedFlagQueue.push_back(isAttached);
+ }
+ void dequeueAttachedFlag() {
+ Mutex::Autolock l(mAttachedFlagQueueLock);
+ mAttachedFlagQueue.pop_back();
+ }
private:
static JNIEnv* getJNIEnv(bool* needsDetach);
static void detachJNI();
@@ -136,6 +144,11 @@ private:
};
static BufferDetacher sBufferDetacher;
+
+ // Buffer queue guarantees both producer and consumer side buffer flows are
+ // in order. See b/19977520. As a result, we can use a queue here.
+ Mutex mAttachedFlagQueueLock;
+ std::deque<bool> mAttachedFlagQueue;
};
JNIImageWriterContext::BufferDetacher JNIImageWriterContext::sBufferDetacher;
@@ -265,11 +278,23 @@ void JNIImageWriterContext::onBufferReleased() {
ALOGV("%s: buffer released", __FUNCTION__);
bool needsDetach = false;
JNIEnv* env = getJNIEnv(&needsDetach);
+
+ bool bufferIsAttached = false;
+ {
+ Mutex::Autolock l(mAttachedFlagQueueLock);
+ if (!mAttachedFlagQueue.empty()) {
+ bufferIsAttached = mAttachedFlagQueue.front();
+ mAttachedFlagQueue.pop_front();
+ } else {
+ ALOGW("onBufferReleased called with no attached flag queued");
+ }
+ }
+
if (env != NULL) {
// Detach the buffer every time when a buffer consumption is done,
// need let this callback give a BufferItem, then only detach if it was attached to this
- // Writer. Do the detach unconditionally for opaque format now. see b/19977520
- if (mFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+ // Writer. see b/19977520
+ if (mFormat == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || bufferIsAttached) {
sBufferDetacher.detach(mProducer);
}
@@ -622,10 +647,16 @@ static void ImageWriter_queueImage(JNIEnv* env, jobject thiz, jlong nativeCtx, j
return;
}
- // Finally, queue input buffer
+ // Finally, queue input buffer.
+ //
+ // Because onBufferReleased may be called before queueBuffer() returns,
+ // queue the "attached" flag before calling queueBuffer. In case
+ // queueBuffer() fails, remove it from the queue.
+ ctx->queueAttachedFlag(false);
res = anw->queueBuffer(anw.get(), buffer, fenceFd);
if (res != OK) {
ALOGE("%s: Queue buffer failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ ctx->dequeueAttachedFlag();
switch (res) {
case NO_INIT:
jniThrowException(env, "java/lang/IllegalStateException",
@@ -720,10 +751,16 @@ static jint ImageWriter_attachAndQueueImage(JNIEnv* env, jobject thiz, jlong nat
}
// Step 3. Queue Image.
+ //
+ // Because onBufferReleased may be called before queueBuffer() returns,
+ // queue the "attached" flag before calling queueBuffer. In case
+ // queueBuffer() fails, remove it from the queue.
+ ctx->queueAttachedFlag(true);
res = anw->queueBuffer(anw.get(), buffer->mGraphicBuffer.get(), /*fenceFd*/
-1);
if (res != OK) {
ALOGE("%s: Queue buffer failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+ ctx->dequeueAttachedFlag();
switch (res) {
case NO_INIT:
jniThrowException(env, "java/lang/IllegalStateException",
@@ -798,6 +835,7 @@ static void Image_unlockIfLocked(JNIEnv* env, jobject thiz) {
status_t res = buffer->unlock();
if (res != OK) {
jniThrowRuntimeException(env, "unlock buffer failed");
+ return;
}
ALOGV("Successfully unlocked the image");
}
@@ -840,8 +878,8 @@ static jint Image_getFormat(JNIEnv* env, jobject thiz) {
}
// ImageWriter doesn't support data space yet, assuming it is unknown.
- PublicFormat publicFmt = android_view_Surface_mapHalFormatDataspaceToPublicFormat(
- buffer->getPixelFormat(), HAL_DATASPACE_UNKNOWN);
+ PublicFormat publicFmt = mapHalFormatDataspaceToPublicFormat(buffer->getPixelFormat(),
+ HAL_DATASPACE_UNKNOWN);
return static_cast<jint>(publicFmt);
}
@@ -893,7 +931,7 @@ static void Image_getLockedImage(JNIEnv* env, jobject thiz, LockedImage *image)
// and we don't set them here.
}
-static void Image_getLockedImageInfo(JNIEnv* env, LockedImage* buffer, int idx,
+static bool Image_getLockedImageInfo(JNIEnv* env, LockedImage* buffer, int idx,
int32_t writerFormat, uint8_t **base, uint32_t *size, int *pixelStride, int *rowStride) {
ALOGV("%s", __FUNCTION__);
@@ -901,8 +939,10 @@ static void Image_getLockedImageInfo(JNIEnv* env, LockedImage* buffer, int idx,
pixelStride, rowStride);
if (res != OK) {
jniThrowExceptionFmt(env, "java/lang/UnsupportedOperationException",
- "Pixel format: 0x%x is unsupported", buffer->flexFormat);
+ "Pixel format: 0x%x is unsupported", writerFormat);
+ return false;
}
+ return true;
}
static jobjectArray Image_createSurfacePlanes(JNIEnv* env, jobject thiz,
@@ -939,10 +979,12 @@ static jobjectArray Image_createSurfacePlanes(JNIEnv* env, jobject thiz,
// Create all SurfacePlanes
PublicFormat publicWriterFormat = static_cast<PublicFormat>(writerFormat);
- writerFormat = android_view_Surface_mapPublicFormatToHalFormat(publicWriterFormat);
+ writerFormat = mapPublicFormatToHalFormat(publicWriterFormat);
for (int i = 0; i < numPlanes; i++) {
- Image_getLockedImageInfo(env, &lockedImg, i, writerFormat,
- &pData, &dataSize, &pixelStride, &rowStride);
+ if (!Image_getLockedImageInfo(env, &lockedImg, i, writerFormat,
+ &pData, &dataSize, &pixelStride, &rowStride)) {
+ return NULL;
+ }
byteBuffer = env->NewDirectByteBuffer(pData, dataSize);
if ((byteBuffer == NULL) && (env->ExceptionCheck() == false)) {
jniThrowException(env, "java/lang/IllegalStateException",
diff --git a/media/jni/android_media_MediaCodec.cpp b/media/jni/android_media_MediaCodec.cpp
index 05aaa82f8ac8..0b0e162d4faf 100644
--- a/media/jni/android_media_MediaCodec.cpp
+++ b/media/jni/android_media_MediaCodec.cpp
@@ -18,8 +18,11 @@
#define LOG_TAG "MediaCodec-JNI"
#include <utils/Log.h>
+#include <type_traits>
+
#include "android_media_MediaCodec.h"
+#include "android_media_MediaCodecLinearBlock.h"
#include "android_media_MediaCrypto.h"
#include "android_media_MediaDescrambler.h"
#include "android_media_MediaMetricsJNI.h"
@@ -31,13 +34,25 @@
#include <nativehelper/JNIHelp.h>
#include <nativehelper/ScopedLocalRef.h>
+#include <C2AllocatorGralloc.h>
+#include <C2BlockInternal.h>
+#include <C2Buffer.h>
+#include <C2PlatformSupport.h>
+
#include <android/hardware/cas/native/1.0/IDescrambler.h>
+#include <android_runtime/android_hardware_HardwareBuffer.h>
+
+#include <binder/MemoryDealer.h>
+
#include <cutils/compiler.h>
#include <gui/Surface.h>
+#include <hidlmemory/FrameworkUtils.h>
+
#include <media/MediaCodecBuffer.h>
+#include <media/hardware/VideoAPI.h>
#include <media/stagefright/MediaCodec.h>
#include <media/stagefright/foundation/ABuffer.h>
#include <media/stagefright/foundation/ADebug.h>
@@ -47,7 +62,8 @@
#include <media/stagefright/MediaErrors.h>
#include <media/stagefright/PersistentSurface.h>
#include <mediadrm/ICrypto.h>
-#include <nativehelper/ScopedLocalRef.h>
+
+#include <private/android/AHardwareBufferHelpers.h>
#include <system/window.h>
@@ -110,6 +126,32 @@ static struct {
jfieldID levelField;
} gCodecInfo;
+static struct {
+ jclass clazz;
+ jobject nativeByteOrder;
+ jmethodID orderId;
+ jmethodID asReadOnlyBufferId;
+ jmethodID positionId;
+ jmethodID limitId;
+ jmethodID getPositionId;
+ jmethodID getLimitId;
+} gByteBufferInfo;
+
+static struct {
+ jmethodID sizeId;
+ jmethodID getId;
+ jmethodID addId;
+} gArrayListInfo;
+
+static struct {
+ jclass clazz;
+ jmethodID ctorId;
+ jmethodID setInternalStateId;
+ jfieldID contextId;
+ jfieldID validId;
+ jfieldID lockId;
+} gLinearBlockInfo;
+
struct fields_t {
jmethodID postEventFromNativeID;
jmethodID lockAndGetContextID;
@@ -123,11 +165,17 @@ struct fields_t {
jfieldID cryptoInfoPatternID;
jfieldID patternEncryptBlocksID;
jfieldID patternSkipBlocksID;
+ jfieldID queueRequestIndexID;
+ jfieldID outputFrameLinearBlockID;
+ jfieldID outputFrameHardwareBufferID;
+ jfieldID outputFrameChangedKeysID;
+ jfieldID outputFrameFormatID;
};
static fields_t gFields;
static const void *sRefBaseOwner;
+
////////////////////////////////////////////////////////////////////////////////
JMediaCodec::JMediaCodec(
@@ -141,8 +189,6 @@ JMediaCodec::JMediaCodec(
mClass = (jclass)env->NewGlobalRef(clazz);
mObject = env->NewWeakGlobalRef(thiz);
- cacheJavaObjects(env);
-
mLooper = new ALooper;
mLooper->setName("MediaCodec_looper");
@@ -163,45 +209,6 @@ JMediaCodec::JMediaCodec(
CHECK((mCodec != NULL) != (mInitStatus != OK));
}
-void JMediaCodec::cacheJavaObjects(JNIEnv *env) {
- jclass clazz = (jclass)env->FindClass("java/nio/ByteBuffer");
- mByteBufferClass = (jclass)env->NewGlobalRef(clazz);
- CHECK(mByteBufferClass != NULL);
-
- ScopedLocalRef<jclass> byteOrderClass(
- env, env->FindClass("java/nio/ByteOrder"));
- CHECK(byteOrderClass.get() != NULL);
-
- jmethodID nativeOrderID = env->GetStaticMethodID(
- byteOrderClass.get(), "nativeOrder", "()Ljava/nio/ByteOrder;");
- CHECK(nativeOrderID != NULL);
-
- jobject nativeByteOrderObj =
- env->CallStaticObjectMethod(byteOrderClass.get(), nativeOrderID);
- mNativeByteOrderObj = env->NewGlobalRef(nativeByteOrderObj);
- CHECK(mNativeByteOrderObj != NULL);
- env->DeleteLocalRef(nativeByteOrderObj);
- nativeByteOrderObj = NULL;
-
- mByteBufferOrderMethodID = env->GetMethodID(
- mByteBufferClass,
- "order",
- "(Ljava/nio/ByteOrder;)Ljava/nio/ByteBuffer;");
- CHECK(mByteBufferOrderMethodID != NULL);
-
- mByteBufferAsReadOnlyBufferMethodID = env->GetMethodID(
- mByteBufferClass, "asReadOnlyBuffer", "()Ljava/nio/ByteBuffer;");
- CHECK(mByteBufferAsReadOnlyBufferMethodID != NULL);
-
- mByteBufferPositionMethodID = env->GetMethodID(
- mByteBufferClass, "position", "(I)Ljava/nio/Buffer;");
- CHECK(mByteBufferPositionMethodID != NULL);
-
- mByteBufferLimitMethodID = env->GetMethodID(
- mByteBufferClass, "limit", "(I)Ljava/nio/Buffer;");
- CHECK(mByteBufferLimitMethodID != NULL);
-}
-
status_t JMediaCodec::initCheck() const {
return mInitStatus;
}
@@ -225,6 +232,18 @@ void JMediaCodec::release() {
});
}
+void JMediaCodec::releaseAsync() {
+ std::call_once(mAsyncReleaseFlag, [this] {
+ if (mCodec != NULL) {
+ sp<AMessage> notify = new AMessage(kWhatAsyncReleaseComplete, this);
+ // Hold strong reference to this until async release is complete
+ notify->setObject("this", this);
+ mCodec->releaseAsync(notify);
+ }
+ mInitStatus = NO_INIT;
+ });
+}
+
JMediaCodec::~JMediaCodec() {
if (mLooper != NULL) {
/* MediaCodec and looper should have been released explicitly already
@@ -247,19 +266,6 @@ JMediaCodec::~JMediaCodec() {
mObject = NULL;
env->DeleteGlobalRef(mClass);
mClass = NULL;
- deleteJavaObjects(env);
-}
-
-void JMediaCodec::deleteJavaObjects(JNIEnv *env) {
- env->DeleteGlobalRef(mByteBufferClass);
- mByteBufferClass = NULL;
- env->DeleteGlobalRef(mNativeByteOrderObj);
- mNativeByteOrderObj = NULL;
-
- mByteBufferOrderMethodID = NULL;
- mByteBufferAsReadOnlyBufferMethodID = NULL;
- mByteBufferPositionMethodID = NULL;
- mByteBufferLimitMethodID = NULL;
}
status_t JMediaCodec::enableOnFrameRenderedListener(jboolean enable) {
@@ -300,6 +306,13 @@ status_t JMediaCodec::configure(
mSurfaceTextureClient.clear();
}
+ constexpr int32_t CONFIGURE_FLAG_ENCODE = 1;
+ AString mime;
+ CHECK(format->findString("mime", &mime));
+ mGraphicOutput = (mime.startsWithIgnoreCase("video/") || mime.startsWithIgnoreCase("image/"))
+ && !(flags & CONFIGURE_FLAG_ENCODE);
+ mHasCryptoOrDescrambler = (crypto != nullptr) || (descrambler != nullptr);
+
return mCodec->configure(
format, mSurfaceTextureClient, crypto, descrambler, flags);
}
@@ -370,6 +383,32 @@ status_t JMediaCodec::queueSecureInputBuffer(
presentationTimeUs, flags, errorDetailMsg);
}
+status_t JMediaCodec::queueBuffer(
+ size_t index, const std::shared_ptr<C2Buffer> &buffer, int64_t timeUs,
+ uint32_t flags, const sp<AMessage> &tunings, AString *errorDetailMsg) {
+ return mCodec->queueBuffer(
+ index, buffer, timeUs, flags, tunings, errorDetailMsg);
+}
+
+status_t JMediaCodec::queueEncryptedLinearBlock(
+ size_t index,
+ const sp<hardware::HidlMemory> &buffer,
+ size_t offset,
+ const CryptoPlugin::SubSample *subSamples,
+ size_t numSubSamples,
+ const uint8_t key[16],
+ const uint8_t iv[16],
+ CryptoPlugin::Mode mode,
+ const CryptoPlugin::Pattern &pattern,
+ int64_t presentationTimeUs,
+ uint32_t flags,
+ const sp<AMessage> &tunings,
+ AString *errorDetailMsg) {
+ return mCodec->queueEncryptedBuffer(
+ index, buffer, offset, subSamples, numSubSamples, key, iv, mode, pattern,
+ presentationTimeUs, flags, tunings, errorDetailMsg);
+}
+
status_t JMediaCodec::dequeueInputBuffer(size_t *index, int64_t timeoutUs) {
return mCodec->dequeueInputBuffer(index, timeoutUs);
}
@@ -444,7 +483,7 @@ status_t JMediaCodec::getBuffers(
}
*bufArray = (jobjectArray)env->NewObjectArray(
- buffers.size(), mByteBufferClass, NULL);
+ buffers.size(), gByteBufferInfo.clazz, NULL);
if (*bufArray == NULL) {
return NO_MEMORY;
}
@@ -470,6 +509,39 @@ status_t JMediaCodec::getBuffers(
return OK;
}
+template <typename T>
+static jobject CreateByteBuffer(
+ JNIEnv *env, T *base, size_t capacity, size_t offset, size_t size,
+ bool readOnly, bool clearBuffer) {
+ jobject byteBuffer =
+ env->NewDirectByteBuffer(
+ const_cast<typename std::remove_const<T>::type *>(base),
+ capacity);
+ if (readOnly && byteBuffer != NULL) {
+ jobject readOnlyBuffer = env->CallObjectMethod(
+ byteBuffer, gByteBufferInfo.asReadOnlyBufferId);
+ env->DeleteLocalRef(byteBuffer);
+ byteBuffer = readOnlyBuffer;
+ }
+ if (byteBuffer == NULL) {
+ return nullptr;
+ }
+ jobject me = env->CallObjectMethod(
+ byteBuffer, gByteBufferInfo.orderId, gByteBufferInfo.nativeByteOrder);
+ env->DeleteLocalRef(me);
+ me = env->CallObjectMethod(
+ byteBuffer, gByteBufferInfo.limitId,
+ clearBuffer ? capacity : offset + size);
+ env->DeleteLocalRef(me);
+ me = env->CallObjectMethod(
+ byteBuffer, gByteBufferInfo.positionId,
+ clearBuffer ? 0 : offset);
+ env->DeleteLocalRef(me);
+ me = NULL;
+ return byteBuffer;
+}
+
+
// static
template <typename T>
status_t JMediaCodec::createByteBufferFromABuffer(
@@ -488,29 +560,9 @@ status_t JMediaCodec::createByteBufferFromABuffer(
return OK;
}
- jobject byteBuffer =
- env->NewDirectByteBuffer(buffer->base(), buffer->capacity());
- if (readOnly && byteBuffer != NULL) {
- jobject readOnlyBuffer = env->CallObjectMethod(
- byteBuffer, mByteBufferAsReadOnlyBufferMethodID);
- env->DeleteLocalRef(byteBuffer);
- byteBuffer = readOnlyBuffer;
- }
- if (byteBuffer == NULL) {
- return NO_MEMORY;
- }
- jobject me = env->CallObjectMethod(
- byteBuffer, mByteBufferOrderMethodID, mNativeByteOrderObj);
- env->DeleteLocalRef(me);
- me = env->CallObjectMethod(
- byteBuffer, mByteBufferLimitMethodID,
- clearBuffer ? buffer->capacity() : (buffer->offset() + buffer->size()));
- env->DeleteLocalRef(me);
- me = env->CallObjectMethod(
- byteBuffer, mByteBufferPositionMethodID,
- clearBuffer ? 0 : buffer->offset());
- env->DeleteLocalRef(me);
- me = NULL;
+ jobject byteBuffer = CreateByteBuffer(
+ env, buffer->base(), buffer->capacity(), buffer->offset(), buffer->size(),
+ readOnly, clearBuffer);
*buf = byteBuffer;
return OK;
@@ -628,6 +680,104 @@ status_t JMediaCodec::getImage(
return OK;
}
+status_t JMediaCodec::getOutputFrame(
+ JNIEnv *env, jobject frame, size_t index) const {
+ sp<MediaCodecBuffer> buffer;
+
+ status_t err = mCodec->getOutputBuffer(index, &buffer);
+ if (err != OK) {
+ return err;
+ }
+
+ if (buffer->size() > 0) {
+ std::shared_ptr<C2Buffer> c2Buffer = buffer->asC2Buffer();
+ if (c2Buffer) {
+ switch (c2Buffer->data().type()) {
+ case C2BufferData::LINEAR: {
+ std::unique_ptr<JMediaCodecLinearBlock> context{new JMediaCodecLinearBlock};
+ context->mBuffer = c2Buffer;
+ ScopedLocalRef<jobject> linearBlock{env, env->NewObject(
+ gLinearBlockInfo.clazz, gLinearBlockInfo.ctorId)};
+ env->CallVoidMethod(
+ linearBlock.get(),
+ gLinearBlockInfo.setInternalStateId,
+ (jlong)context.release(),
+ true);
+ env->SetObjectField(frame, gFields.outputFrameLinearBlockID, linearBlock.get());
+ break;
+ }
+ case C2BufferData::GRAPHIC: {
+ const C2Handle *c2Handle = c2Buffer->data().graphicBlocks().front().handle();
+ uint32_t width, height, format, stride, igbp_slot, generation;
+ uint64_t usage, igbp_id;
+ _UnwrapNativeCodec2GrallocMetadata(
+ c2Handle, &width, &height, &format, &usage, &stride, &generation,
+ &igbp_id, &igbp_slot);
+ native_handle_t *grallocHandle = UnwrapNativeCodec2GrallocHandle(c2Handle);
+ GraphicBuffer* graphicBuffer = new GraphicBuffer(
+ grallocHandle, GraphicBuffer::CLONE_HANDLE,
+ width, height, format, 1, usage, stride);
+ ScopedLocalRef<jobject> hardwareBuffer{
+ env,
+ android_hardware_HardwareBuffer_createFromAHardwareBuffer(
+ env, AHardwareBuffer_from_GraphicBuffer(graphicBuffer))};
+ env->SetObjectField(
+ frame, gFields.outputFrameHardwareBufferID, hardwareBuffer.get());
+ break;
+ }
+ case C2BufferData::LINEAR_CHUNKS: [[fallthrough]];
+ case C2BufferData::GRAPHIC_CHUNKS: [[fallthrough]];
+ case C2BufferData::INVALID: [[fallthrough]];
+ default:
+ return INVALID_OPERATION;
+ }
+ } else {
+ if (!mGraphicOutput) {
+ std::unique_ptr<JMediaCodecLinearBlock> context{new JMediaCodecLinearBlock};
+ context->mLegacyBuffer = buffer;
+ ScopedLocalRef<jobject> linearBlock{env, env->NewObject(
+ gLinearBlockInfo.clazz, gLinearBlockInfo.ctorId)};
+ env->CallVoidMethod(
+ linearBlock.get(),
+ gLinearBlockInfo.setInternalStateId,
+ (jlong)context.release(),
+ true);
+ env->SetObjectField(frame, gFields.outputFrameLinearBlockID, linearBlock.get());
+ } else {
+ // No-op.
+ }
+ }
+ }
+
+ jobject formatMap;
+ err = getOutputFormat(env, index, &formatMap);
+ if (err != OK) {
+ return err;
+ }
+ ScopedLocalRef<jclass> mediaFormatClass{env, env->FindClass("android/media/MediaFormat")};
+ ScopedLocalRef<jobject> format{env, env->NewObject(
+ mediaFormatClass.get(),
+ env->GetMethodID(mediaFormatClass.get(), "<init>", "(Ljava/util/Map;)V"),
+ formatMap)};
+ env->SetObjectField(frame, gFields.outputFrameFormatID, format.get());
+ env->DeleteLocalRef(formatMap);
+ formatMap = nullptr;
+
+ sp<RefBase> obj;
+ if (buffer->meta()->findObject("changedKeys", &obj) && obj) {
+ sp<MediaCodec::WrapperObject<std::set<std::string>>> changedKeys{
+ (decltype(changedKeys.get()))obj.get()};
+ ScopedLocalRef<jobject> changedKeysObj{env, env->GetObjectField(
+ frame, gFields.outputFrameChangedKeysID)};
+ for (const std::string &key : changedKeys->value) {
+ ScopedLocalRef<jstring> keyStr{env, env->NewStringUTF(key.c_str())};
+ (void)env->CallBooleanMethod(changedKeysObj.get(), gArrayListInfo.addId, keyStr.get());
+ }
+ }
+ return OK;
+}
+
+
status_t JMediaCodec::getName(JNIEnv *env, jstring *nameStr) const {
AString name;
@@ -742,11 +892,11 @@ status_t JMediaCodec::getCodecInfo(JNIEnv *env, jobject *codecInfoObject) const
return OK;
}
-status_t JMediaCodec::getMetrics(JNIEnv *, MediaAnalyticsItem * &reply) const {
- mediametrics_handle_t reply2 = MediaAnalyticsItem::convert(reply);
+status_t JMediaCodec::getMetrics(JNIEnv *, mediametrics::Item * &reply) const {
+ mediametrics_handle_t reply2 = mediametrics::Item::convert(reply);
status_t status = mCodec->getMetrics(reply2);
// getMetrics() updates reply2, pass the converted update along to our caller.
- reply = MediaAnalyticsItem::convert(reply2);
+ reply = mediametrics::Item::convert(reply2);
return status;
}
@@ -939,6 +1089,15 @@ void JMediaCodec::onMessageReceived(const sp<AMessage> &msg) {
handleFrameRenderedNotification(msg);
break;
}
+ case kWhatAsyncReleaseComplete:
+ {
+ if (mLooper != NULL) {
+ mLooper->unregisterHandler(id());
+ mLooper->stop();
+ mLooper.clear();
+ }
+ break;
+ }
default:
TRESPASS();
}
@@ -951,7 +1110,7 @@ void JMediaCodec::onMessageReceived(const sp<AMessage> &msg) {
using namespace android;
static sp<JMediaCodec> setMediaCodec(
- JNIEnv *env, jobject thiz, const sp<JMediaCodec> &codec) {
+ JNIEnv *env, jobject thiz, const sp<JMediaCodec> &codec, bool release = true) {
sp<JMediaCodec> old = (JMediaCodec *)env->CallLongMethod(thiz, gFields.lockAndGetContextID);
if (codec != NULL) {
codec->incStrong(thiz);
@@ -962,7 +1121,9 @@ static sp<JMediaCodec> setMediaCodec(
* its message handler, doing release() from there will deadlock
* (as MediaCodec::release() post synchronous message to the same looper)
*/
- old->release();
+ if (release) {
+ old->release();
+ }
old->decStrong(thiz);
}
env->CallVoidMethod(thiz, gFields.setAndUnlockContextID, (jlong)codec.get());
@@ -977,7 +1138,11 @@ static sp<JMediaCodec> getMediaCodec(JNIEnv *env, jobject thiz) {
}
static void android_media_MediaCodec_release(JNIEnv *env, jobject thiz) {
- setMediaCodec(env, thiz, NULL);
+ // Clear Java native reference.
+ sp<JMediaCodec> codec = setMediaCodec(env, thiz, nullptr, false /* release */);
+ if (codec != NULL) {
+ codec->releaseAsync();
+ }
}
static void throwCodecException(JNIEnv *env, status_t err, int32_t actionCode, const char *msg) {
@@ -1086,7 +1251,7 @@ static void android_media_MediaCodec_native_enableOnFrameRenderedListener(
jboolean enabled) {
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1102,7 +1267,7 @@ static void android_media_MediaCodec_native_setCallback(
jobject cb) {
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1122,7 +1287,7 @@ static void android_media_MediaCodec_native_configure(
jint flags) {
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1170,7 +1335,7 @@ static void android_media_MediaCodec_native_setSurface(
jobject jsurface) {
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1293,7 +1458,7 @@ static void android_media_MediaCodec_setInputSurface(
ALOGV("android_media_MediaCodec_setInputSurface");
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1317,7 +1482,7 @@ static jobject android_media_MediaCodec_createInputSurface(JNIEnv* env,
ALOGV("android_media_MediaCodec_createInputSurface");
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return NULL;
}
@@ -1340,7 +1505,7 @@ static void android_media_MediaCodec_start(JNIEnv *env, jobject thiz) {
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1355,7 +1520,7 @@ static void android_media_MediaCodec_stop(JNIEnv *env, jobject thiz) {
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1370,7 +1535,7 @@ static void android_media_MediaCodec_reset(JNIEnv *env, jobject thiz) {
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1392,7 +1557,7 @@ static void android_media_MediaCodec_flush(JNIEnv *env, jobject thiz) {
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1414,7 +1579,7 @@ static void android_media_MediaCodec_queueInputBuffer(
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1428,6 +1593,150 @@ static void android_media_MediaCodec_queueInputBuffer(
env, err, ACTION_CODE_FATAL, errorDetailMsg.empty() ? NULL : errorDetailMsg.c_str());
}
+struct NativeCryptoInfo {
+ NativeCryptoInfo(JNIEnv *env, jobject cryptoInfoObj)
+ : mEnv{env},
+ mIvObj{env, (jbyteArray)env->GetObjectField(cryptoInfoObj, gFields.cryptoInfoIVID)},
+ mKeyObj{env, (jbyteArray)env->GetObjectField(cryptoInfoObj, gFields.cryptoInfoKeyID)} {
+ mNumSubSamples = env->GetIntField(cryptoInfoObj, gFields.cryptoInfoNumSubSamplesID);
+
+ ScopedLocalRef<jintArray> numBytesOfClearDataObj{env, (jintArray)env->GetObjectField(
+ cryptoInfoObj, gFields.cryptoInfoNumBytesOfClearDataID)};
+
+ ScopedLocalRef<jintArray> numBytesOfEncryptedDataObj{env, (jintArray)env->GetObjectField(
+ cryptoInfoObj, gFields.cryptoInfoNumBytesOfEncryptedDataID)};
+
+ jint jmode = env->GetIntField(cryptoInfoObj, gFields.cryptoInfoModeID);
+ if (jmode == gCryptoModes.Unencrypted) {
+ mMode = CryptoPlugin::kMode_Unencrypted;
+ } else if (jmode == gCryptoModes.AesCtr) {
+ mMode = CryptoPlugin::kMode_AES_CTR;
+ } else if (jmode == gCryptoModes.AesCbc) {
+ mMode = CryptoPlugin::kMode_AES_CBC;
+ } else {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ ScopedLocalRef<jobject> patternObj{
+ env, env->GetObjectField(cryptoInfoObj, gFields.cryptoInfoPatternID)};
+
+ if (patternObj.get() == nullptr) {
+ mPattern.mEncryptBlocks = 0;
+ mPattern.mSkipBlocks = 0;
+ } else {
+ mPattern.mEncryptBlocks = env->GetIntField(
+ patternObj.get(), gFields.patternEncryptBlocksID);
+ mPattern.mSkipBlocks = env->GetIntField(
+ patternObj.get(), gFields.patternSkipBlocksID);
+ }
+
+ mErr = OK;
+ if (mNumSubSamples <= 0) {
+ mErr = -EINVAL;
+ } else if (numBytesOfClearDataObj == nullptr
+ && numBytesOfEncryptedDataObj == nullptr) {
+ mErr = -EINVAL;
+ } else if (numBytesOfEncryptedDataObj != nullptr
+ && env->GetArrayLength(numBytesOfEncryptedDataObj.get()) < mNumSubSamples) {
+ mErr = -ERANGE;
+ } else if (numBytesOfClearDataObj != nullptr
+ && env->GetArrayLength(numBytesOfClearDataObj.get()) < mNumSubSamples) {
+ mErr = -ERANGE;
+ // subSamples array may silently overflow if number of samples are too large. Use
+ // INT32_MAX as maximum allocation size may be less than SIZE_MAX on some platforms
+ } else if (CC_UNLIKELY(mNumSubSamples >= (signed)(INT32_MAX / sizeof(*mSubSamples))) ) {
+ mErr = -EINVAL;
+ } else {
+ jint *numBytesOfClearData =
+ (numBytesOfClearDataObj == nullptr)
+ ? nullptr
+ : env->GetIntArrayElements(numBytesOfClearDataObj.get(), nullptr);
+
+ jint *numBytesOfEncryptedData =
+ (numBytesOfEncryptedDataObj == nullptr)
+ ? nullptr
+ : env->GetIntArrayElements(numBytesOfEncryptedDataObj.get(), nullptr);
+
+ mSubSamples = new CryptoPlugin::SubSample[mNumSubSamples];
+
+ for (jint i = 0; i < mNumSubSamples; ++i) {
+ mSubSamples[i].mNumBytesOfClearData =
+ (numBytesOfClearData == nullptr) ? 0 : numBytesOfClearData[i];
+
+ mSubSamples[i].mNumBytesOfEncryptedData =
+ (numBytesOfEncryptedData == nullptr) ? 0 : numBytesOfEncryptedData[i];
+ }
+
+ if (numBytesOfEncryptedData != nullptr) {
+ env->ReleaseIntArrayElements(
+ numBytesOfEncryptedDataObj.get(), numBytesOfEncryptedData, 0);
+ numBytesOfEncryptedData = nullptr;
+ }
+
+ if (numBytesOfClearData != nullptr) {
+ env->ReleaseIntArrayElements(
+ numBytesOfClearDataObj.get(), numBytesOfClearData, 0);
+ numBytesOfClearData = nullptr;
+ }
+ }
+
+ if (mErr == OK && mKeyObj.get() != nullptr) {
+ if (env->GetArrayLength(mKeyObj.get()) != 16) {
+ mErr = -EINVAL;
+ } else {
+ mKey = env->GetByteArrayElements(mKeyObj.get(), nullptr);
+ }
+ }
+
+ if (mErr == OK && mIvObj.get() != nullptr) {
+ if (env->GetArrayLength(mIvObj.get()) != 16) {
+ mErr = -EINVAL;
+ } else {
+ mIv = env->GetByteArrayElements(mIvObj.get(), nullptr);
+ }
+ }
+
+ }
+
+ explicit NativeCryptoInfo(jint size)
+ : mIvObj{nullptr, nullptr},
+ mKeyObj{nullptr, nullptr},
+ mMode{CryptoPlugin::kMode_Unencrypted},
+ mPattern{0, 0} {
+ mSubSamples = new CryptoPlugin::SubSample[1];
+ mNumSubSamples = 1;
+ mSubSamples[0].mNumBytesOfClearData = size;
+ mSubSamples[0].mNumBytesOfEncryptedData = 0;
+ }
+
+ ~NativeCryptoInfo() {
+ if (mIv != nullptr) {
+ mEnv->ReleaseByteArrayElements(mIvObj.get(), mIv, 0);
+ }
+
+ if (mKey != nullptr) {
+ mEnv->ReleaseByteArrayElements(mKeyObj.get(), mKey, 0);
+ }
+
+ if (mSubSamples != nullptr) {
+ delete[] mSubSamples;
+ }
+ }
+
+ JNIEnv *mEnv{nullptr};
+ ScopedLocalRef<jbyteArray> mIvObj;
+ ScopedLocalRef<jbyteArray> mKeyObj;
+ status_t mErr{OK};
+
+ CryptoPlugin::SubSample *mSubSamples{nullptr};
+ int32_t mNumSubSamples{0};
+ jbyte *mIv{nullptr};
+ jbyte *mKey{nullptr};
+ enum CryptoPlugin::Mode mMode;
+ CryptoPlugin::Pattern mPattern;
+};
+
static void android_media_MediaCodec_queueSecureInputBuffer(
JNIEnv *env,
jobject thiz,
@@ -1440,7 +1749,7 @@ static void android_media_MediaCodec_queueSecureInputBuffer(
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1593,13 +1902,388 @@ static void android_media_MediaCodec_queueSecureInputBuffer(
env, err, ACTION_CODE_FATAL, errorDetailMsg.empty() ? NULL : errorDetailMsg.c_str());
}
+static jobject android_media_MediaCodec_mapHardwareBuffer(JNIEnv *env, jclass, jobject bufferObj) {
+ ALOGV("android_media_MediaCodec_mapHardwareBuffer");
+ AHardwareBuffer *hardwareBuffer = android_hardware_HardwareBuffer_getNativeHardwareBuffer(
+ env, bufferObj);
+ AHardwareBuffer_Desc desc;
+ AHardwareBuffer_describe(hardwareBuffer, &desc);
+ if (desc.format != AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420) {
+ ALOGI("mapHardwareBuffer: unmappable format: %d", desc.format);
+ return nullptr;
+ }
+ if ((desc.usage & AHARDWAREBUFFER_USAGE_CPU_READ_MASK) == 0) {
+ ALOGI("mapHardwareBuffer: buffer not CPU readable");
+ return nullptr;
+ }
+ bool readOnly = ((desc.usage & AHARDWAREBUFFER_USAGE_CPU_WRITE_MASK) == 0);
+
+ uint64_t cpuUsage = 0;
+ cpuUsage |= (desc.usage & AHARDWAREBUFFER_USAGE_CPU_READ_MASK);
+ cpuUsage |= (desc.usage & AHARDWAREBUFFER_USAGE_CPU_WRITE_MASK);
+
+ AHardwareBuffer_Planes planes;
+ int err = AHardwareBuffer_lockPlanes(
+ hardwareBuffer, cpuUsage, -1 /* fence */, nullptr /* rect */, &planes);
+ if (err != 0) {
+ ALOGI("mapHardwareBuffer: Failed to lock planes (err=%d)", err);
+ return nullptr;
+ }
+
+ if (planes.planeCount != 3) {
+ ALOGI("mapHardwareBuffer: planeCount expected 3, actual %u", planes.planeCount);
+ return nullptr;
+ }
+
+ ScopedLocalRef<jobjectArray> buffersArray{
+ env, env->NewObjectArray(3, gByteBufferInfo.clazz, NULL)};
+ ScopedLocalRef<jintArray> rowStridesArray{env, env->NewIntArray(3)};
+ ScopedLocalRef<jintArray> pixelStridesArray{env, env->NewIntArray(3)};
+
+ jboolean isCopy = JNI_FALSE;
+ jint *rowStrides = env->GetIntArrayElements(rowStridesArray.get(), &isCopy);
+ jint *pixelStrides = env->GetIntArrayElements(rowStridesArray.get(), &isCopy);
+
+ // For Y plane
+ int rowSampling = 1;
+ int colSampling = 1;
+ // plane indices are Y-U-V.
+ for (uint32_t i = 0; i < 3; ++i) {
+ const AHardwareBuffer_Plane &plane = planes.planes[i];
+ int maxRowOffset = plane.rowStride * (desc.height / rowSampling - 1);
+ int maxColOffset = plane.pixelStride * (desc.width / colSampling - 1);
+ int maxOffset = maxRowOffset + maxColOffset;
+ ScopedLocalRef<jobject> byteBuffer{env, CreateByteBuffer(
+ env,
+ plane.data,
+ maxOffset + 1,
+ 0,
+ maxOffset + 1,
+ readOnly,
+ true)};
+
+ env->SetObjectArrayElement(buffersArray.get(), i, byteBuffer.get());
+ rowStrides[i] = plane.rowStride;
+ pixelStrides[i] = plane.pixelStride;
+ // For U-V planes
+ rowSampling = 2;
+ colSampling = 2;
+ }
+
+ env->ReleaseIntArrayElements(rowStridesArray.get(), rowStrides, 0);
+ env->ReleaseIntArrayElements(pixelStridesArray.get(), pixelStrides, 0);
+ rowStrides = pixelStrides = nullptr;
+
+ ScopedLocalRef<jclass> imageClazz(
+ env, env->FindClass("android/media/MediaCodec$MediaImage"));
+ CHECK(imageClazz.get() != NULL);
+
+ jmethodID imageConstructID = env->GetMethodID(imageClazz.get(), "<init>",
+ "([Ljava/nio/ByteBuffer;[I[IIIIZJIILandroid/graphics/Rect;J)V");
+
+ jobject img = env->NewObject(imageClazz.get(), imageConstructID,
+ buffersArray.get(),
+ rowStridesArray.get(),
+ pixelStridesArray.get(),
+ desc.width,
+ desc.height,
+ desc.format, // ???
+ (jboolean)readOnly /* readOnly */,
+ (jlong)0 /* timestamp */,
+ (jint)0 /* xOffset */, (jint)0 /* yOffset */, nullptr /* cropRect */,
+ (jlong)hardwareBuffer);
+
+ // if MediaImage creation fails, return null
+ if (env->ExceptionCheck()) {
+ env->ExceptionDescribe();
+ env->ExceptionClear();
+ return nullptr;
+ }
+
+ AHardwareBuffer_acquire(hardwareBuffer);
+
+ return img;
+}
+
+static void android_media_MediaCodec_closeMediaImage(JNIEnv *, jclass, jlong context) {
+ ALOGV("android_media_MediaCodec_closeMediaImage");
+ if (context == 0) {
+ return;
+ }
+ AHardwareBuffer *hardwareBuffer = (AHardwareBuffer *)context;
+
+ int err = AHardwareBuffer_unlock(hardwareBuffer, nullptr);
+ if (err != 0) {
+ ALOGI("closeMediaImage: failed to unlock (err=%d)", err);
+ // Continue to release the hardwareBuffer
+ }
+
+ AHardwareBuffer_release(hardwareBuffer);
+}
+
+static status_t ConvertKeyValueListsToAMessage(
+ JNIEnv *env, jobject keys, jobject values, sp<AMessage> *msg) {
+ static struct Fields {
+ explicit Fields(JNIEnv *env) {
+ ScopedLocalRef<jclass> clazz{env, env->FindClass("java/lang/String")};
+ CHECK(clazz.get() != NULL);
+ mStringClass = (jclass)env->NewGlobalRef(clazz.get());
+
+ clazz.reset(env->FindClass("java/lang/Integer"));
+ CHECK(clazz.get() != NULL);
+ mIntegerClass = (jclass)env->NewGlobalRef(clazz.get());
+
+ mIntegerValueId = env->GetMethodID(clazz.get(), "intValue", "()I");
+ CHECK(mIntegerValueId != NULL);
+
+ clazz.reset(env->FindClass("java/lang/Long"));
+ CHECK(clazz.get() != NULL);
+ mLongClass = (jclass)env->NewGlobalRef(clazz.get());
+
+ mLongValueId = env->GetMethodID(clazz.get(), "longValue", "()J");
+ CHECK(mLongValueId != NULL);
+
+ clazz.reset(env->FindClass("java/lang/Float"));
+ CHECK(clazz.get() != NULL);
+ mFloatClass = (jclass)env->NewGlobalRef(clazz.get());
+
+ mFloatValueId = env->GetMethodID(clazz.get(), "floatValue", "()F");
+ CHECK(mFloatValueId != NULL);
+
+ clazz.reset(env->FindClass("java/util/ArrayList"));
+ CHECK(clazz.get() != NULL);
+
+ mByteBufferArrayId = env->GetMethodID(gByteBufferInfo.clazz, "array", "()[B");
+ CHECK(mByteBufferArrayId != NULL);
+ }
+
+ jclass mStringClass;
+ jclass mIntegerClass;
+ jmethodID mIntegerValueId;
+ jclass mLongClass;
+ jmethodID mLongValueId;
+ jclass mFloatClass;
+ jmethodID mFloatValueId;
+ jmethodID mByteBufferArrayId;
+ } sFields{env};
+
+ jint size = env->CallIntMethod(keys, gArrayListInfo.sizeId);
+ if (size != env->CallIntMethod(values, gArrayListInfo.sizeId)) {
+ return BAD_VALUE;
+ }
+
+ sp<AMessage> result{new AMessage};
+ for (jint i = 0; i < size; ++i) {
+ ScopedLocalRef<jstring> jkey{
+ env, (jstring)env->CallObjectMethod(keys, gArrayListInfo.getId, i)};
+ const char *tmp = env->GetStringUTFChars(jkey.get(), nullptr);
+ AString key;
+ if (tmp) {
+ key.setTo(tmp);
+ }
+ env->ReleaseStringUTFChars(jkey.get(), tmp);
+ if (key.empty()) {
+ return NO_MEMORY;
+ }
+
+ ScopedLocalRef<jobject> jvalue{
+ env, env->CallObjectMethod(values, gArrayListInfo.getId, i)};
+
+ if (env->IsInstanceOf(jvalue.get(), sFields.mStringClass)) {
+ const char *tmp = env->GetStringUTFChars((jstring)jvalue.get(), nullptr);
+ AString value;
+ if (!tmp) {
+ return NO_MEMORY;
+ }
+ value.setTo(tmp);
+ env->ReleaseStringUTFChars((jstring)jvalue.get(), tmp);
+ result->setString(key.c_str(), value);
+ } else if (env->IsInstanceOf(jvalue.get(), sFields.mIntegerClass)) {
+ jint value = env->CallIntMethod(jvalue.get(), sFields.mIntegerValueId);
+ result->setInt32(key.c_str(), value);
+ } else if (env->IsInstanceOf(jvalue.get(), sFields.mLongClass)) {
+ jlong value = env->CallLongMethod(jvalue.get(), sFields.mLongValueId);
+ result->setInt64(key.c_str(), value);
+ } else if (env->IsInstanceOf(jvalue.get(), sFields.mFloatClass)) {
+ jfloat value = env->CallFloatMethod(jvalue.get(), sFields.mFloatValueId);
+ result->setFloat(key.c_str(), value);
+ } else if (env->IsInstanceOf(jvalue.get(), gByteBufferInfo.clazz)) {
+ jint position = env->CallIntMethod(jvalue.get(), gByteBufferInfo.getPositionId);
+ jint limit = env->CallIntMethod(jvalue.get(), gByteBufferInfo.getLimitId);
+ sp<ABuffer> buffer{new ABuffer(limit - position)};
+ void *data = env->GetDirectBufferAddress(jvalue.get());
+ if (data != nullptr) {
+ memcpy(buffer->data(),
+ static_cast<const uint8_t *>(data) + position,
+ buffer->size());
+ } else {
+ ScopedLocalRef<jbyteArray> byteArray{env, (jbyteArray)env->CallObjectMethod(
+ jvalue.get(), sFields.mByteBufferArrayId)};
+ env->GetByteArrayRegion(byteArray.get(), position, buffer->size(),
+ reinterpret_cast<jbyte *>(buffer->data()));
+ }
+ result->setBuffer(key.c_str(), buffer);
+ }
+ }
+
+ *msg = result;
+ return OK;
+}
+
+static void android_media_MediaCodec_native_queueLinearBlock(
+ JNIEnv *env, jobject thiz, jint index, jobject bufferObj,
+ jint offset, jint size, jobject cryptoInfoObj,
+ jlong presentationTimeUs, jint flags, jobject keys, jobject values) {
+ ALOGV("android_media_MediaCodec_native_queueLinearBlock");
+
+ sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+ if (codec == nullptr || codec->initCheck() != OK) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ sp<AMessage> tunings;
+ status_t err = ConvertKeyValueListsToAMessage(env, keys, values, &tunings);
+ if (err != OK) {
+ throwExceptionAsNecessary(env, err);
+ return;
+ }
+
+ std::shared_ptr<C2Buffer> buffer;
+ sp<hardware::HidlMemory> memory;
+ ScopedLocalRef<jobject> lock{env, env->GetObjectField(bufferObj, gLinearBlockInfo.lockId)};
+ if (env->MonitorEnter(lock.get()) == JNI_OK) {
+ if (env->GetBooleanField(bufferObj, gLinearBlockInfo.validId)) {
+ JMediaCodecLinearBlock *context =
+ (JMediaCodecLinearBlock *)env->GetLongField(bufferObj, gLinearBlockInfo.contextId);
+ if (codec->hasCryptoOrDescrambler()) {
+ memory = context->toHidlMemory();
+ // TODO: copy if memory is null
+ offset += context->mHidlMemoryOffset;
+ } else {
+ buffer = context->toC2Buffer(offset, size);
+ // TODO: copy if buffer is null
+ }
+ }
+ env->MonitorExit(lock.get());
+ } else {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ AString errorDetailMsg;
+ if (codec->hasCryptoOrDescrambler()) {
+ if (!memory) {
+ ALOGI("queueLinearBlock: no ashmem memory for encrypted content");
+ throwExceptionAsNecessary(env, BAD_VALUE);
+ return;
+ }
+ NativeCryptoInfo cryptoInfo = [env, cryptoInfoObj, size]{
+ if (cryptoInfoObj == nullptr) {
+ return NativeCryptoInfo{size};
+ } else {
+ return NativeCryptoInfo{env, cryptoInfoObj};
+ }
+ }();
+ err = codec->queueEncryptedLinearBlock(
+ index,
+ memory,
+ offset,
+ cryptoInfo.mSubSamples, cryptoInfo.mNumSubSamples,
+ (const uint8_t *)cryptoInfo.mKey, (const uint8_t *)cryptoInfo.mIv,
+ cryptoInfo.mMode,
+ cryptoInfo.mPattern,
+ presentationTimeUs,
+ flags,
+ tunings,
+ &errorDetailMsg);
+ } else {
+ if (!buffer) {
+ ALOGI("queueLinearBlock: no C2Buffer found");
+ throwExceptionAsNecessary(env, BAD_VALUE);
+ return;
+ }
+ err = codec->queueBuffer(
+ index, buffer, presentationTimeUs, flags, tunings, &errorDetailMsg);
+ }
+ throwExceptionAsNecessary(env, err, ACTION_CODE_FATAL, errorDetailMsg.c_str());
+}
+
+static void android_media_MediaCodec_native_queueHardwareBuffer(
+ JNIEnv *env, jobject thiz, jint index, jobject bufferObj,
+ jlong presentationTimeUs, jint flags, jobject keys, jobject values) {
+ ALOGV("android_media_MediaCodec_native_queueHardwareBuffer");
+
+ sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+ if (codec == NULL || codec->initCheck() != OK) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ sp<AMessage> tunings;
+ status_t err = ConvertKeyValueListsToAMessage(env, keys, values, &tunings);
+ if (err != OK) {
+ throwExceptionAsNecessary(env, err);
+ return;
+ }
+
+ AHardwareBuffer *hardwareBuffer = android_hardware_HardwareBuffer_getNativeHardwareBuffer(
+ env, bufferObj);
+ sp<GraphicBuffer> graphicBuffer{AHardwareBuffer_to_GraphicBuffer(hardwareBuffer)};
+ C2Handle *handle = WrapNativeCodec2GrallocHandle(
+ graphicBuffer->handle, graphicBuffer->width, graphicBuffer->height,
+ graphicBuffer->format, graphicBuffer->usage, graphicBuffer->stride);
+ static std::shared_ptr<C2Allocator> sGrallocAlloc = []() -> std::shared_ptr<C2Allocator> {
+ std::shared_ptr<C2Allocator> alloc;
+ c2_status_t err = GetCodec2PlatformAllocatorStore()->fetchAllocator(
+ C2PlatformAllocatorStore::GRALLOC, &alloc);
+ if (err == C2_OK) {
+ return alloc;
+ }
+ return nullptr;
+ }();
+ std::shared_ptr<C2GraphicAllocation> alloc;
+ c2_status_t c2err = sGrallocAlloc->priorGraphicAllocation(handle, &alloc);
+ if (c2err != C2_OK) {
+ ALOGW("Failed to wrap AHardwareBuffer into C2GraphicAllocation");
+ throwExceptionAsNecessary(env, BAD_VALUE);
+ return;
+ }
+ std::shared_ptr<C2GraphicBlock> block = _C2BlockFactory::CreateGraphicBlock(alloc);
+ std::shared_ptr<C2Buffer> buffer = C2Buffer::CreateGraphicBuffer(block->share(
+ block->crop(), C2Fence{}));
+ AString errorDetailMsg;
+ err = codec->queueBuffer(
+ index, buffer, presentationTimeUs, flags, tunings, &errorDetailMsg);
+ throwExceptionAsNecessary(env, err, ACTION_CODE_FATAL, errorDetailMsg.c_str());
+}
+
+static void android_media_MediaCodec_native_getOutputFrame(
+ JNIEnv *env, jobject thiz, jobject frame, jint index) {
+ ALOGV("android_media_MediaCodec_native_getOutputFrame");
+
+ sp<JMediaCodec> codec = getMediaCodec(env, thiz);
+
+ if (codec == NULL || codec->initCheck() != OK) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return;
+ }
+
+ status_t err = codec->getOutputFrame(env, frame, index);
+ if (err != OK) {
+ throwExceptionAsNecessary(env, err);
+ }
+}
+
static jint android_media_MediaCodec_dequeueInputBuffer(
JNIEnv *env, jobject thiz, jlong timeoutUs) {
ALOGV("android_media_MediaCodec_dequeueInputBuffer");
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return -1;
}
@@ -1620,7 +2304,7 @@ static jint android_media_MediaCodec_dequeueOutputBuffer(
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return 0;
}
@@ -1643,7 +2327,7 @@ static void android_media_MediaCodec_releaseOutputBuffer(
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1658,7 +2342,7 @@ static void android_media_MediaCodec_signalEndOfInputStream(JNIEnv* env,
ALOGV("android_media_MediaCodec_signalEndOfInputStream");
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1674,7 +2358,7 @@ static jobject android_media_MediaCodec_getFormatNative(
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return NULL;
}
@@ -1697,7 +2381,7 @@ static jobject android_media_MediaCodec_getOutputFormatForIndexNative(
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return NULL;
}
@@ -1720,7 +2404,7 @@ static jobjectArray android_media_MediaCodec_getBuffers(
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return NULL;
}
@@ -1746,7 +2430,7 @@ static jobject android_media_MediaCodec_getBuffer(
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return NULL;
}
@@ -1772,7 +2456,7 @@ static jobject android_media_MediaCodec_getImage(
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return NULL;
}
@@ -1798,7 +2482,7 @@ static jobject android_media_MediaCodec_getName(
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return NULL;
}
@@ -1821,7 +2505,7 @@ static jobject android_media_MediaCodec_getOwnCodecInfo(
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return NULL;
}
@@ -1844,13 +2528,13 @@ android_media_MediaCodec_native_getMetrics(JNIEnv *env, jobject thiz)
ALOGV("android_media_MediaCodec_native_getMetrics");
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL ) {
+ if (codec == NULL || codec->initCheck() != OK) {
jniThrowException(env, "java/lang/IllegalStateException", NULL);
return 0;
}
// get what we have for the metrics from the codec
- MediaAnalyticsItem *item = 0;
+ mediametrics::Item *item = 0;
status_t err = codec->getMetrics(env, item);
if (err != OK) {
@@ -1873,7 +2557,7 @@ static void android_media_MediaCodec_setParameters(
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1892,7 +2576,7 @@ static void android_media_MediaCodec_setVideoScalingMode(
JNIEnv *env, jobject thiz, jint mode) {
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1910,7 +2594,7 @@ static void android_media_MediaCodec_setAudioPresentation(
JNIEnv *env, jobject thiz, jint presentationId, jint programId) {
sp<JMediaCodec> codec = getMediaCodec(env, thiz);
- if (codec == NULL) {
+ if (codec == NULL || codec->initCheck() != OK) {
throwExceptionAsNecessary(env, INVALID_OPERATION);
return;
}
@@ -1918,7 +2602,7 @@ static void android_media_MediaCodec_setAudioPresentation(
codec->selectAudioPresentation((int32_t)presentationId, (int32_t)programId);
}
-static void android_media_MediaCodec_native_init(JNIEnv *env) {
+static void android_media_MediaCodec_native_init(JNIEnv *env, jclass) {
ScopedLocalRef<jclass> clazz(
env, env->FindClass("android/media/MediaCodec"));
CHECK(clazz.get() != NULL);
@@ -1991,6 +2675,31 @@ static void android_media_MediaCodec_native_init(JNIEnv *env) {
gFields.patternSkipBlocksID = env->GetFieldID(clazz.get(), "mSkipBlocks", "I");
CHECK(gFields.patternSkipBlocksID != NULL);
+ clazz.reset(env->FindClass("android/media/MediaCodec$QueueRequest"));
+ CHECK(clazz.get() != NULL);
+
+ gFields.queueRequestIndexID = env->GetFieldID(clazz.get(), "mIndex", "I");
+ CHECK(gFields.queueRequestIndexID != NULL);
+
+ clazz.reset(env->FindClass("android/media/MediaCodec$OutputFrame"));
+ CHECK(clazz.get() != NULL);
+
+ gFields.outputFrameLinearBlockID =
+ env->GetFieldID(clazz.get(), "mLinearBlock", "Landroid/media/MediaCodec$LinearBlock;");
+ CHECK(gFields.outputFrameLinearBlockID != NULL);
+
+ gFields.outputFrameHardwareBufferID =
+ env->GetFieldID(clazz.get(), "mHardwareBuffer", "Landroid/hardware/HardwareBuffer;");
+ CHECK(gFields.outputFrameHardwareBufferID != NULL);
+
+ gFields.outputFrameChangedKeysID =
+ env->GetFieldID(clazz.get(), "mChangedKeys", "Ljava/util/ArrayList;");
+ CHECK(gFields.outputFrameChangedKeysID != NULL);
+
+ gFields.outputFrameFormatID =
+ env->GetFieldID(clazz.get(), "mFormat", "Landroid/media/MediaFormat;");
+ CHECK(gFields.outputFrameFormatID != NULL);
+
clazz.reset(env->FindClass("android/media/MediaCodec$CryptoException"));
CHECK(clazz.get() != NULL);
@@ -2105,6 +2814,83 @@ static void android_media_MediaCodec_native_init(JNIEnv *env) {
field = env->GetFieldID(clazz.get(), "level", "I");
CHECK(field != NULL);
gCodecInfo.levelField = field;
+
+ clazz.reset(env->FindClass("java/nio/ByteBuffer"));
+ CHECK(clazz.get() != NULL);
+ gByteBufferInfo.clazz = (jclass)env->NewGlobalRef(clazz.get());
+
+ ScopedLocalRef<jclass> byteOrderClass(
+ env, env->FindClass("java/nio/ByteOrder"));
+ CHECK(byteOrderClass.get() != NULL);
+
+ jmethodID nativeOrderID = env->GetStaticMethodID(
+ byteOrderClass.get(), "nativeOrder", "()Ljava/nio/ByteOrder;");
+ CHECK(nativeOrderID != NULL);
+
+ ScopedLocalRef<jobject> nativeByteOrderObj{
+ env, env->CallStaticObjectMethod(byteOrderClass.get(), nativeOrderID)};
+ gByteBufferInfo.nativeByteOrder = env->NewGlobalRef(nativeByteOrderObj.get());
+ CHECK(gByteBufferInfo.nativeByteOrder != NULL);
+ nativeByteOrderObj.reset();
+
+ gByteBufferInfo.orderId = env->GetMethodID(
+ clazz.get(),
+ "order",
+ "(Ljava/nio/ByteOrder;)Ljava/nio/ByteBuffer;");
+ CHECK(gByteBufferInfo.orderId != NULL);
+
+ gByteBufferInfo.asReadOnlyBufferId = env->GetMethodID(
+ clazz.get(), "asReadOnlyBuffer", "()Ljava/nio/ByteBuffer;");
+ CHECK(gByteBufferInfo.asReadOnlyBufferId != NULL);
+
+ gByteBufferInfo.positionId = env->GetMethodID(
+ clazz.get(), "position", "(I)Ljava/nio/Buffer;");
+ CHECK(gByteBufferInfo.positionId != NULL);
+
+ gByteBufferInfo.limitId = env->GetMethodID(
+ clazz.get(), "limit", "(I)Ljava/nio/Buffer;");
+ CHECK(gByteBufferInfo.limitId != NULL);
+
+ gByteBufferInfo.getPositionId = env->GetMethodID(
+ clazz.get(), "position", "()I");
+ CHECK(gByteBufferInfo.getPositionId != NULL);
+
+ gByteBufferInfo.getLimitId = env->GetMethodID(
+ clazz.get(), "limit", "()I");
+ CHECK(gByteBufferInfo.getLimitId != NULL);
+
+ clazz.reset(env->FindClass("java/util/ArrayList"));
+ CHECK(clazz.get() != NULL);
+
+ gArrayListInfo.sizeId = env->GetMethodID(clazz.get(), "size", "()I");
+ CHECK(gArrayListInfo.sizeId != NULL);
+
+ gArrayListInfo.getId = env->GetMethodID(clazz.get(), "get", "(I)Ljava/lang/Object;");
+ CHECK(gArrayListInfo.getId != NULL);
+
+ gArrayListInfo.addId = env->GetMethodID(clazz.get(), "add", "(Ljava/lang/Object;)Z");
+ CHECK(gArrayListInfo.addId != NULL);
+
+ clazz.reset(env->FindClass("android/media/MediaCodec$LinearBlock"));
+ CHECK(clazz.get() != NULL);
+
+ gLinearBlockInfo.clazz = (jclass)env->NewGlobalRef(clazz.get());
+
+ gLinearBlockInfo.ctorId = env->GetMethodID(clazz.get(), "<init>", "()V");
+ CHECK(gLinearBlockInfo.ctorId != NULL);
+
+ gLinearBlockInfo.setInternalStateId = env->GetMethodID(
+ clazz.get(), "setInternalStateLocked", "(JZ)V");
+ CHECK(gLinearBlockInfo.setInternalStateId != NULL);
+
+ gLinearBlockInfo.contextId = env->GetFieldID(clazz.get(), "mNativeContext", "J");
+ CHECK(gLinearBlockInfo.contextId != NULL);
+
+ gLinearBlockInfo.validId = env->GetFieldID(clazz.get(), "mValid", "Z");
+ CHECK(gLinearBlockInfo.validId != NULL);
+
+ gLinearBlockInfo.lockId = env->GetFieldID(clazz.get(), "mLock", "Ljava/lang/Object;");
+ CHECK(gLinearBlockInfo.lockId != NULL);
}
static void android_media_MediaCodec_native_setup(
@@ -2152,7 +2938,174 @@ static void android_media_MediaCodec_native_setup(
static void android_media_MediaCodec_native_finalize(
JNIEnv *env, jobject thiz) {
- android_media_MediaCodec_release(env, thiz);
+ setMediaCodec(env, thiz, NULL);
+}
+
+// MediaCodec.LinearBlock
+
+static jobject android_media_MediaCodec_LinearBlock_native_map(
+ JNIEnv *env, jobject thiz) {
+ JMediaCodecLinearBlock *context =
+ (JMediaCodecLinearBlock *)env->GetLongField(thiz, gLinearBlockInfo.contextId);
+ if (context->mBuffer) {
+ std::shared_ptr<C2Buffer> buffer = context->mBuffer;
+ if (!context->mReadonlyMapping) {
+ const C2BufferData data = buffer->data();
+ if (data.type() != C2BufferData::LINEAR) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return nullptr;
+ }
+ if (data.linearBlocks().size() != 1u) {
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return nullptr;
+ }
+ C2ConstLinearBlock block = data.linearBlocks().front();
+ context->mReadonlyMapping =
+ std::make_shared<C2ReadView>(block.map().get());
+ }
+ return CreateByteBuffer(
+ env,
+ context->mReadonlyMapping->data(), // base
+ context->mReadonlyMapping->capacity(), // capacity
+ 0u, // offset
+ context->mReadonlyMapping->capacity(), // size
+ true, // readOnly
+ true /* clearBuffer */);
+ } else if (context->mBlock) {
+ std::shared_ptr<C2LinearBlock> block = context->mBlock;
+ if (!context->mReadWriteMapping) {
+ context->mReadWriteMapping =
+ std::make_shared<C2WriteView>(block->map().get());
+ }
+ return CreateByteBuffer(
+ env,
+ context->mReadWriteMapping->base(),
+ context->mReadWriteMapping->capacity(),
+ context->mReadWriteMapping->offset(),
+ context->mReadWriteMapping->size(),
+ false, // readOnly
+ true /* clearBuffer */);
+ } else if (context->mLegacyBuffer) {
+ return CreateByteBuffer(
+ env,
+ context->mLegacyBuffer->base(),
+ context->mLegacyBuffer->capacity(),
+ context->mLegacyBuffer->offset(),
+ context->mLegacyBuffer->size(),
+ true, // readOnly
+ true /* clearBuffer */);
+ } else if (context->mMemory) {
+ return CreateByteBuffer(
+ env,
+ context->mMemory->unsecurePointer(),
+ context->mMemory->size(),
+ 0,
+ context->mMemory->size(),
+ false, // readOnly
+ true /* clearBuffer */);
+ }
+ throwExceptionAsNecessary(env, INVALID_OPERATION);
+ return nullptr;
+}
+
+static void android_media_MediaCodec_LinearBlock_native_recycle(
+ JNIEnv *env, jobject thiz) {
+ JMediaCodecLinearBlock *context =
+ (JMediaCodecLinearBlock *)env->GetLongField(thiz, gLinearBlockInfo.contextId);
+ env->CallVoidMethod(thiz, gLinearBlockInfo.setInternalStateId, jlong(0), false);
+ delete context;
+}
+
+static void PopulateNamesVector(
+ JNIEnv *env, jobjectArray codecNames, std::vector<std::string> *names) {
+ jsize length = env->GetArrayLength(codecNames);
+ for (jsize i = 0; i < length; ++i) {
+ jstring jstr = static_cast<jstring>(env->GetObjectArrayElement(codecNames, i));
+ if (jstr == nullptr) {
+ // null entries are ignored
+ continue;
+ }
+ const char *cstr = env->GetStringUTFChars(jstr, nullptr);
+ if (cstr == nullptr) {
+ throwExceptionAsNecessary(env, BAD_VALUE);
+ return;
+ }
+ names->emplace_back(cstr);
+ env->ReleaseStringUTFChars(jstr, cstr);
+ }
+}
+
+static void android_media_MediaCodec_LinearBlock_native_obtain(
+ JNIEnv *env, jobject thiz, jint capacity, jobjectArray codecNames) {
+ std::unique_ptr<JMediaCodecLinearBlock> context{new JMediaCodecLinearBlock};
+ std::vector<std::string> names;
+ PopulateNamesVector(env, codecNames, &names);
+ bool hasSecure = false;
+ bool hasNonSecure = false;
+ for (const std::string &name : names) {
+ if (name.length() >= 7 && name.substr(name.length() - 7) == ".secure") {
+ hasSecure = true;
+ } else {
+ hasNonSecure = true;
+ }
+ }
+ if (hasSecure && !hasNonSecure) {
+ constexpr size_t kInitialDealerCapacity = 1048576; // 1MB
+ thread_local sp<MemoryDealer> sDealer = new MemoryDealer(
+ kInitialDealerCapacity, "JNI(1MB)");
+ context->mMemory = sDealer->allocate(capacity);
+ if (context->mMemory == nullptr) {
+ size_t newDealerCapacity = sDealer->getMemoryHeap()->getSize() * 2;
+ while (capacity * 2 > newDealerCapacity) {
+ newDealerCapacity *= 2;
+ }
+ ALOGI("LinearBlock.native_obtain: "
+ "Dealer capacity increasing from %zuMB to %zuMB",
+ sDealer->getMemoryHeap()->getSize() / 1048576,
+ newDealerCapacity / 1048576);
+ sDealer = new MemoryDealer(
+ newDealerCapacity,
+ AStringPrintf("JNI(%zuMB)", newDealerCapacity).c_str());
+ context->mMemory = sDealer->allocate(capacity);
+ }
+ context->mHidlMemory = hardware::fromHeap(context->mMemory->getMemory(
+ &context->mHidlMemoryOffset, &context->mHidlMemorySize));
+ } else {
+ context->mBlock = MediaCodec::FetchLinearBlock(capacity, names);
+ if (!context->mBlock) {
+ jniThrowException(env, "java/io/IOException", nullptr);
+ return;
+ }
+ }
+ env->CallVoidMethod(
+ thiz,
+ gLinearBlockInfo.setInternalStateId,
+ (jlong)context.release(),
+ true /* isMappable */);
+}
+
+static jboolean android_media_MediaCodec_LinearBlock_checkCompatible(
+ JNIEnv *env, jclass, jobjectArray codecNames) {
+ std::vector<std::string> names;
+ PopulateNamesVector(env, codecNames, &names);
+ bool isCompatible = false;
+ bool hasSecure = false;
+ bool hasNonSecure = false;
+ for (const std::string &name : names) {
+ if (name.length() >= 7 && name.substr(name.length() - 7) == ".secure") {
+ hasSecure = true;
+ } else {
+ hasNonSecure = true;
+ }
+ }
+ if (hasSecure && hasNonSecure) {
+ return false;
+ }
+ status_t err = MediaCodec::CanFetchLinearBlock(names, &isCompatible);
+ if (err != OK) {
+ throwExceptionAsNecessary(env, err);
+ }
+ return isCompatible;
}
static const JNINativeMethod gMethods[] = {
@@ -2200,6 +3153,25 @@ static const JNINativeMethod gMethods[] = {
{ "native_queueSecureInputBuffer", "(IILandroid/media/MediaCodec$CryptoInfo;JI)V",
(void *)android_media_MediaCodec_queueSecureInputBuffer },
+ { "native_mapHardwareBuffer",
+ "(Landroid/hardware/HardwareBuffer;)Landroid/media/Image;",
+ (void *)android_media_MediaCodec_mapHardwareBuffer },
+
+ { "native_closeMediaImage", "(J)V", (void *)android_media_MediaCodec_closeMediaImage },
+
+ { "native_queueLinearBlock",
+ "(ILandroid/media/MediaCodec$LinearBlock;IILandroid/media/MediaCodec$CryptoInfo;JI"
+ "Ljava/util/ArrayList;Ljava/util/ArrayList;)V",
+ (void *)android_media_MediaCodec_native_queueLinearBlock },
+
+ { "native_queueHardwareBuffer",
+ "(ILandroid/hardware/HardwareBuffer;JILjava/util/ArrayList;Ljava/util/ArrayList;)V",
+ (void *)android_media_MediaCodec_native_queueHardwareBuffer },
+
+ { "native_getOutputFrame",
+ "(Landroid/media/MediaCodec$OutputFrame;I)V",
+ (void *)android_media_MediaCodec_native_getOutputFrame },
+
{ "native_dequeueInputBuffer", "(J)I",
(void *)android_media_MediaCodec_dequeueInputBuffer },
@@ -2254,7 +3226,29 @@ static const JNINativeMethod gMethods[] = {
(void *)android_media_MediaCodec_native_finalize },
};
+static const JNINativeMethod gLinearBlockMethods[] = {
+ { "native_map", "()Ljava/nio/ByteBuffer;",
+ (void *)android_media_MediaCodec_LinearBlock_native_map },
+
+ { "native_recycle", "()V",
+ (void *)android_media_MediaCodec_LinearBlock_native_recycle },
+
+ { "native_obtain", "(I[Ljava/lang/String;)V",
+ (void *)android_media_MediaCodec_LinearBlock_native_obtain },
+
+ { "native_checkCompatible", "([Ljava/lang/String;)Z",
+ (void *)android_media_MediaCodec_LinearBlock_checkCompatible },
+};
+
int register_android_media_MediaCodec(JNIEnv *env) {
- return AndroidRuntime::registerNativeMethods(env,
+ int result = AndroidRuntime::registerNativeMethods(env,
"android/media/MediaCodec", gMethods, NELEM(gMethods));
+ if (result != JNI_OK) {
+ return result;
+ }
+ result = AndroidRuntime::registerNativeMethods(env,
+ "android/media/MediaCodec$LinearBlock",
+ gLinearBlockMethods,
+ NELEM(gLinearBlockMethods));
+ return result;
}
diff --git a/media/jni/android_media_MediaCodec.h b/media/jni/android_media_MediaCodec.h
index dfe30a3f5909..a58f9a74b563 100644
--- a/media/jni/android_media_MediaCodec.h
+++ b/media/jni/android_media_MediaCodec.h
@@ -21,12 +21,17 @@
#include "jni.h"
-#include <media/MediaAnalyticsItem.h>
+#include <C2Buffer.h>
+#include <binder/MemoryHeapBase.h>
+#include <media/MediaCodecBuffer.h>
+#include <media/MediaMetricsItem.h>
#include <media/hardware/CryptoAPI.h>
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/foundation/AHandler.h>
#include <utils/Errors.h>
+class C2Buffer;
+
namespace android {
struct ABuffer;
@@ -39,6 +44,7 @@ struct MediaCodec;
struct PersistentSurface;
class Surface;
namespace hardware {
+class HidlMemory;
namespace cas {
namespace native {
namespace V1_0 {
@@ -55,6 +61,7 @@ struct JMediaCodec : public AHandler {
void registerSelf();
void release();
+ void releaseAsync();
status_t enableOnFrameRenderedListener(jboolean enable);
@@ -97,6 +104,26 @@ struct JMediaCodec : public AHandler {
uint32_t flags,
AString *errorDetailMsg);
+ status_t queueBuffer(
+ size_t index, const std::shared_ptr<C2Buffer> &buffer,
+ int64_t timeUs, uint32_t flags, const sp<AMessage> &tunings,
+ AString *errorDetailMsg);
+
+ status_t queueEncryptedLinearBlock(
+ size_t index,
+ const sp<hardware::HidlMemory> &buffer,
+ size_t offset,
+ const CryptoPlugin::SubSample *subSamples,
+ size_t numSubSamples,
+ const uint8_t key[16],
+ const uint8_t iv[16],
+ CryptoPlugin::Mode mode,
+ const CryptoPlugin::Pattern &pattern,
+ int64_t presentationTimeUs,
+ uint32_t flags,
+ const sp<AMessage> &tunings,
+ AString *errorDetailMsg);
+
status_t dequeueInputBuffer(size_t *index, int64_t timeoutUs);
status_t dequeueOutputBuffer(
@@ -120,11 +147,14 @@ struct JMediaCodec : public AHandler {
status_t getImage(
JNIEnv *env, bool input, size_t index, jobject *image) const;
+ status_t getOutputFrame(
+ JNIEnv *env, jobject frame, size_t index) const;
+
status_t getName(JNIEnv *env, jstring *name) const;
status_t getCodecInfo(JNIEnv *env, jobject *codecInfo) const;
- status_t getMetrics(JNIEnv *env, MediaAnalyticsItem * &reply) const;
+ status_t getMetrics(JNIEnv *env, mediametrics::Item * &reply) const;
status_t setParameters(const sp<AMessage> &params);
@@ -132,6 +162,8 @@ struct JMediaCodec : public AHandler {
void selectAudioPresentation(const int32_t presentationId, const int32_t programId);
+ bool hasCryptoOrDescrambler() { return mHasCryptoOrDescrambler; }
+
protected:
virtual ~JMediaCodec();
@@ -141,24 +173,20 @@ private:
enum {
kWhatCallbackNotify,
kWhatFrameRendered,
+ kWhatAsyncReleaseComplete,
};
jclass mClass;
jweak mObject;
sp<Surface> mSurfaceTextureClient;
- // java objects cached
- jclass mByteBufferClass;
- jobject mNativeByteOrderObj;
- jmethodID mByteBufferOrderMethodID;
- jmethodID mByteBufferPositionMethodID;
- jmethodID mByteBufferLimitMethodID;
- jmethodID mByteBufferAsReadOnlyBufferMethodID;
-
sp<ALooper> mLooper;
sp<MediaCodec> mCodec;
AString mNameAtCreation;
+ bool mGraphicOutput{false};
+ bool mHasCryptoOrDescrambler{false};
std::once_flag mReleaseFlag;
+ std::once_flag mAsyncReleaseFlag;
sp<AMessage> mCallbackNotification;
sp<AMessage> mOnFrameRenderedNotification;
@@ -170,8 +198,6 @@ private:
JNIEnv *env, bool readOnly, bool clearBuffer, const sp<T> &buffer,
jobject *buf) const;
- void cacheJavaObjects(JNIEnv *env);
- void deleteJavaObjects(JNIEnv *env);
void handleCallback(const sp<AMessage> &msg);
void handleFrameRenderedNotification(const sp<AMessage> &msg);
diff --git a/media/jni/android_media_MediaCodecLinearBlock.h b/media/jni/android_media_MediaCodecLinearBlock.h
new file mode 100644
index 000000000000..8f1d2fa35d70
--- /dev/null
+++ b/media/jni/android_media_MediaCodecLinearBlock.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_MEDIACODECLINEARBLOCK_H_
+#define _ANDROID_MEDIA_MEDIACODECLINEARBLOCK_H_
+
+#include <C2Buffer.h>
+#include <binder/MemoryHeapBase.h>
+#include <hidl/HidlSupport.h>
+#include <media/MediaCodecBuffer.h>
+
+namespace android {
+
+struct JMediaCodecLinearBlock {
+ std::shared_ptr<C2Buffer> mBuffer;
+ std::shared_ptr<C2ReadView> mReadonlyMapping;
+
+ std::shared_ptr<C2LinearBlock> mBlock;
+ std::shared_ptr<C2WriteView> mReadWriteMapping;
+
+ sp<IMemory> mMemory;
+ sp<hardware::HidlMemory> mHidlMemory;
+ ssize_t mHidlMemoryOffset;
+ size_t mHidlMemorySize;
+
+ sp<MediaCodecBuffer> mLegacyBuffer;
+
+ std::once_flag mCopyWarningFlag;
+
+ std::shared_ptr<C2Buffer> toC2Buffer(size_t offset, size_t size) {
+ if (mBuffer) {
+ if (mBuffer->data().type() != C2BufferData::LINEAR) {
+ return nullptr;
+ }
+ C2ConstLinearBlock block = mBuffer->data().linearBlocks().front();
+ if (offset == 0 && size == block.capacity()) {
+ return mBuffer;
+ }
+ return C2Buffer::CreateLinearBuffer(block.subBlock(offset, size));
+ }
+ if (mBlock) {
+ return C2Buffer::CreateLinearBuffer(mBlock->share(offset, size, C2Fence{}));
+ }
+ return nullptr;
+ }
+
+ sp<hardware::HidlMemory> toHidlMemory() {
+ if (mHidlMemory) {
+ return mHidlMemory;
+ }
+ return nullptr;
+ }
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_MEDIACODECLINEARBLOCK_H_
diff --git a/media/jni/android_media_MediaCodecList.cpp b/media/jni/android_media_MediaCodecList.cpp
index 923d1d253c6e..307d80dc15c0 100644
--- a/media/jni/android_media_MediaCodecList.cpp
+++ b/media/jni/android_media_MediaCodecList.cpp
@@ -26,6 +26,7 @@
#include <utils/Vector.h>
+#include <mutex>
#include <vector>
#include "android_runtime/AndroidRuntime.h"
diff --git a/media/jni/android_media_MediaCrypto.cpp b/media/jni/android_media_MediaCrypto.cpp
index 2d9051f5230d..517672ee6127 100644
--- a/media/jni/android_media_MediaCrypto.cpp
+++ b/media/jni/android_media_MediaCrypto.cpp
@@ -24,11 +24,10 @@
#include "jni.h"
#include <nativehelper/JNIHelp.h>
-#include <binder/IServiceManager.h>
#include <cutils/properties.h>
#include <media/stagefright/foundation/ADebug.h>
+#include <mediadrm/DrmUtils.h>
#include <mediadrm/ICrypto.h>
-#include <mediadrm/IMediaDrmService.h>
namespace android {
@@ -64,20 +63,7 @@ JCrypto::~JCrypto() {
// static
sp<ICrypto> JCrypto::MakeCrypto() {
- sp<IServiceManager> sm = defaultServiceManager();
-
- sp<IBinder> binder = sm->getService(String16("media.drm"));
- sp<IMediaDrmService> service = interface_cast<IMediaDrmService>(binder);
- if (service == NULL) {
- return NULL;
- }
-
- sp<ICrypto> crypto = service->makeCrypto();
- if (crypto == NULL || (crypto->initCheck() != OK && crypto->initCheck() != NO_INIT)) {
- return NULL;
- }
-
- return crypto;
+ return DrmUtils::MakeCrypto();
}
// static
diff --git a/media/jni/android_media_MediaDataSource.cpp b/media/jni/android_media_MediaDataSource.cpp
index 84a0e0d032d9..0cb4b2dc52f0 100644
--- a/media/jni/android_media_MediaDataSource.cpp
+++ b/media/jni/android_media_MediaDataSource.cpp
@@ -105,7 +105,8 @@ ssize_t JMediaDataSource::readAt(off64_t offset, size_t size) {
}
ALOGV("readAt %lld / %zu => %d.", (long long)offset, size, numread);
- env->GetByteArrayRegion(mByteArrayObj, 0, numread, (jbyte*)mMemory->pointer());
+ env->GetByteArrayRegion(mByteArrayObj, 0, numread,
+ (jbyte*)mMemory->unsecurePointer());
return numread;
}
diff --git a/media/jni/android_media_MediaDataSource.h b/media/jni/android_media_MediaDataSource.h
index 378baf433fed..b65039d37139 100644
--- a/media/jni/android_media_MediaDataSource.h
+++ b/media/jni/android_media_MediaDataSource.h
@@ -19,7 +19,7 @@
#include "jni.h"
-#include <media/IDataSource.h>
+#include <android/IDataSource.h>
#include <media/stagefright/foundation/ABase.h>
#include <utils/Errors.h>
#include <utils/Mutex.h>
diff --git a/media/jni/android_media_MediaDescrambler.cpp b/media/jni/android_media_MediaDescrambler.cpp
index aa79ce0a44ab..c61365a448d3 100644
--- a/media/jni/android_media_MediaDescrambler.cpp
+++ b/media/jni/android_media_MediaDescrambler.cpp
@@ -220,7 +220,7 @@ status_t JDescrambler::descramble(
return NO_MEMORY;
}
- memcpy(mMem->pointer(),
+ memcpy(mMem->unsecurePointer(),
(const void*)((const uint8_t*)srcPtr + srcOffset), totalLength);
DestinationBuffer dstBuffer;
@@ -248,7 +248,8 @@ status_t JDescrambler::descramble(
if (*status == Status::OK) {
if (*bytesWritten > 0 && (ssize_t) *bytesWritten <= totalLength) {
- memcpy((void*)((uint8_t*)dstPtr + dstOffset), mMem->pointer(), *bytesWritten);
+ memcpy((void*)((uint8_t*)dstPtr + dstOffset), mMem->unsecurePointer(),
+ *bytesWritten);
} else {
// status seems OK but bytesWritten is invalid, we really
// have no idea what is wrong.
diff --git a/media/jni/android_media_MediaDrm.cpp b/media/jni/android_media_MediaDrm.cpp
index f412161f418a..f38a29c69a3e 100644
--- a/media/jni/android_media_MediaDrm.cpp
+++ b/media/jni/android_media_MediaDrm.cpp
@@ -27,17 +27,19 @@
#include "jni.h"
#include <nativehelper/JNIHelp.h>
-#include <binder/IServiceManager.h>
+#include <android/hardware/drm/1.3/IDrmFactory.h>
#include <binder/Parcel.h>
#include <binder/PersistableBundle.h>
#include <cutils/properties.h>
#include <media/stagefright/foundation/ADebug.h>
#include <media/stagefright/MediaErrors.h>
+#include <mediadrm/DrmMetricsConsumer.h>
+#include <mediadrm/DrmUtils.h>
+#include <mediadrm/IDrmMetricsConsumer.h>
#include <mediadrm/IDrm.h>
-#include <mediadrm/IMediaDrmService.h>
using ::android::os::PersistableBundle;
-
+namespace drm = ::android::hardware::drm;
namespace android {
@@ -180,6 +182,10 @@ struct OfflineLicenseState {
jint kOfflineLicenseStateUnknown;
} gOfflineLicenseStates;
+struct KeyStatusFields {
+ jmethodID init;
+ jclass classId;
+};
struct fields_t {
jfieldID context;
@@ -201,51 +207,21 @@ struct fields_t {
jobject bundleCreator;
jmethodID createFromParcelId;
jclass parcelCreatorClassId;
+ KeyStatusFields keyStatus;
};
static fields_t gFields;
namespace {
-// Helper function to convert a native PersistableBundle to a Java
-// PersistableBundle.
-jobject nativeToJavaPersistableBundle(JNIEnv *env, jobject thiz,
- PersistableBundle* nativeBundle) {
- if (env == NULL || thiz == NULL || nativeBundle == NULL) {
- ALOGE("Unexpected NULL parmeter");
- return NULL;
- }
-
- // Create a Java parcel with the native parcel data.
- // Then create a new PersistableBundle with that parcel as a parameter.
- jobject jParcel = android::createJavaParcelObject(env);
- if (jParcel == NULL) {
- ALOGE("Failed to create a Java Parcel.");
- return NULL;
- }
-
- android::Parcel* nativeParcel = android::parcelForJavaObject(env, jParcel);
- if (nativeParcel == NULL) {
- ALOGE("Failed to get the native Parcel.");
- return NULL;
- }
-
- android::status_t result = nativeBundle->writeToParcel(nativeParcel);
- nativeParcel->setDataPosition(0);
- if (result != android::OK) {
- ALOGE("Failed to write nativeBundle to Parcel: %d.", result);
- return NULL;
- }
-
- jobject newBundle = env->CallObjectMethod(gFields.bundleCreator,
- gFields.createFromParcelId,
- jParcel);
- if (newBundle == NULL) {
- ALOGE("Failed to create a new PersistableBundle "
- "from the createFromParcel call.");
+jbyteArray hidlVectorToJByteArray(const hardware::hidl_vec<uint8_t> &vector) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ size_t length = vector.size();
+ jbyteArray result = env->NewByteArray(length);
+ if (result != NULL) {
+ env->SetByteArrayRegion(result, 0, length, reinterpret_cast<const jbyte *>(vector.data()));
}
-
- return newBundle;
+ return result;
}
} // namespace anonymous
@@ -257,7 +233,7 @@ class JNIDrmListener: public DrmListener
public:
JNIDrmListener(JNIEnv* env, jobject thiz, jobject weak_thiz);
~JNIDrmListener();
- virtual void notify(DrmPlugin::EventType eventType, int extra, const Parcel *obj = NULL);
+ virtual void notify(DrmPlugin::EventType eventType, int extra, const ListenerArgs *arg = NULL);
private:
JNIDrmListener();
jclass mClass; // Reference to MediaDrm class
@@ -291,7 +267,7 @@ JNIDrmListener::~JNIDrmListener()
}
void JNIDrmListener::notify(DrmPlugin::EventType eventType, int extra,
- const Parcel *obj)
+ const ListenerArgs *args)
{
jint jwhat;
jint jeventType = 0;
@@ -333,15 +309,11 @@ void JNIDrmListener::notify(DrmPlugin::EventType eventType, int extra,
}
JNIEnv *env = AndroidRuntime::getJNIEnv();
- if (obj && obj->dataSize() > 0) {
- jobject jParcel = createJavaParcelObject(env);
- if (jParcel != NULL) {
- Parcel* nativeParcel = parcelForJavaObject(env, jParcel);
- nativeParcel->setData(obj->data(), obj->dataSize());
- env->CallStaticVoidMethod(mClass, gFields.post_event, mObject,
- jwhat, jeventType, extra, jParcel);
- env->DeleteLocalRef(jParcel);
- }
+ if (args) {
+ env->CallStaticVoidMethod(mClass, gFields.post_event, mObject,
+ jwhat, jeventType, extra,
+ args->jSessionId, args->jData, args->jExpirationTime,
+ args->jKeyStatusList, args->jHasNewUsableKey);
}
if (env->ExceptionCheck()) {
@@ -486,20 +458,7 @@ JDrm::~JDrm() {
// static
sp<IDrm> JDrm::MakeDrm() {
- sp<IServiceManager> sm = defaultServiceManager();
-
- sp<IBinder> binder = sm->getService(String16("media.drm"));
- sp<IMediaDrmService> service = interface_cast<IMediaDrmService>(binder);
- if (service == NULL) {
- return NULL;
- }
-
- sp<IDrm> drm = service->makeDrm();
- if (drm == NULL || (drm->initCheck() != OK && drm->initCheck() != NO_INIT)) {
- return NULL;
- }
-
- return drm;
+ return DrmUtils::MakeDrm();
}
// static
@@ -525,7 +484,7 @@ status_t JDrm::setListener(const sp<DrmListener>& listener) {
return OK;
}
-void JDrm::notify(DrmPlugin::EventType eventType, int extra, const Parcel *obj) {
+void JDrm::notify(DrmPlugin::EventType eventType, int extra, const ListenerArgs *args) {
sp<DrmListener> listener;
mLock.lock();
listener = mListener;
@@ -533,8 +492,59 @@ void JDrm::notify(DrmPlugin::EventType eventType, int extra, const Parcel *obj)
if (listener != NULL) {
Mutex::Autolock lock(mNotifyLock);
- listener->notify(eventType, extra, obj);
+ listener->notify(eventType, extra, args);
+ }
+}
+
+void JDrm::sendEvent(
+ DrmPlugin::EventType eventType,
+ const hardware::hidl_vec<uint8_t> &sessionId,
+ const hardware::hidl_vec<uint8_t> &data) {
+ ListenerArgs args{
+ .jSessionId = hidlVectorToJByteArray(sessionId),
+ .jData = hidlVectorToJByteArray(data),
+ };
+ notify(eventType, 0, &args);
+}
+
+void JDrm::sendExpirationUpdate(
+ const hardware::hidl_vec<uint8_t> &sessionId,
+ int64_t expiryTimeInMS) {
+ ListenerArgs args{
+ .jSessionId = hidlVectorToJByteArray(sessionId),
+ .jExpirationTime = expiryTimeInMS,
+ };
+ notify(DrmPlugin::kDrmPluginEventExpirationUpdate, 0, &args);
+}
+
+void JDrm::sendKeysChange(
+ const hardware::hidl_vec<uint8_t> &sessionId,
+ const std::vector<DrmKeyStatus> &keyStatusList,
+ bool hasNewUsableKey) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass clazz = gFields.arraylistClassId;
+ jobject arrayList = env->NewObject(clazz, gFields.arraylist.init);
+ clazz = gFields.keyStatus.classId;
+ for (const auto &keyStatus : keyStatusList) {
+ jbyteArray jKeyId(hidlVectorToJByteArray(keyStatus.keyId));
+ jint jStatusCode(keyStatus.type);
+ jobject jKeyStatus = env->NewObject(clazz, gFields.keyStatus.init, jKeyId, jStatusCode);
+ env->CallBooleanMethod(arrayList, gFields.arraylist.add, jKeyStatus);
}
+ ListenerArgs args{
+ .jSessionId = hidlVectorToJByteArray(sessionId),
+ .jKeyStatusList = arrayList,
+ .jHasNewUsableKey = hasNewUsableKey,
+ };
+ notify(DrmPlugin::kDrmPluginEventKeysChange, 0, &args);
+}
+
+void JDrm::sendSessionLostState(
+ const hardware::hidl_vec<uint8_t> &sessionId) {
+ ListenerArgs args{
+ .jSessionId = hidlVectorToJByteArray(sessionId),
+ };
+ notify(DrmPlugin::kDrmPluginEventSessionLostState, 0, &args);
}
void JDrm::disconnect() {
@@ -733,7 +743,7 @@ static void android_media_MediaDrm_native_init(JNIEnv *env) {
FIND_CLASS(clazz, "android/media/MediaDrm");
GET_FIELD_ID(gFields.context, clazz, "mNativeContext", "J");
GET_STATIC_METHOD_ID(gFields.post_event, clazz, "postEventFromNative",
- "(Ljava/lang/Object;IIILjava/lang/Object;)V");
+ "(Ljava/lang/Object;III[B[BJLjava/util/List;Z)V");
jfieldID field;
GET_STATIC_FIELD_ID(field, clazz, "EVENT_PROVISION_REQUIRED", "I");
@@ -893,6 +903,10 @@ static void android_media_MediaDrm_native_init(JNIEnv *env) {
gSessionExceptionErrorCodes.kErrorUnknown = env->GetStaticIntField(clazz, field);
GET_STATIC_FIELD_ID(field, clazz, "ERROR_RESOURCE_CONTENTION", "I");
gSessionExceptionErrorCodes.kResourceContention = env->GetStaticIntField(clazz, field);
+
+ FIND_CLASS(clazz, "android/media/MediaDrm$KeyStatus");
+ gFields.keyStatus.classId = static_cast<jclass>(env->NewGlobalRef(clazz));
+ GET_METHOD_ID(gFields.keyStatus.init, clazz, "<init>", "([BI)V");
}
static void android_media_MediaDrm_native_setup(
@@ -958,6 +972,26 @@ DrmPlugin::SecurityLevel jintToSecurityLevel(jint jlevel) {
return level;
}
+static jbyteArray android_media_MediaDrm_getSupportedCryptoSchemesNative(JNIEnv *env) {
+ std::vector<uint8_t> bv;
+ for (auto &factory : DrmUtils::MakeDrmFactories()) {
+ sp<drm::V1_3::IDrmFactory> factoryV1_3 = drm::V1_3::IDrmFactory::castFrom(factory);
+ if (factoryV1_3 == nullptr) {
+ continue;
+ }
+ factoryV1_3->getSupportedCryptoSchemes(
+ [&](const hardware::hidl_vec<hardware::hidl_array<uint8_t, 16>>& schemes) {
+ for (const auto &scheme : schemes) {
+ bv.insert(bv.end(), scheme.data(), scheme.data() + scheme.size());
+ }
+ });
+ }
+
+ jbyteArray jUuidBytes = env->NewByteArray(bv.size());
+ env->SetByteArrayRegion(jUuidBytes, 0, bv.size(), reinterpret_cast<const jbyte *>(bv.data()));
+ return jUuidBytes;
+}
+
static jboolean android_media_MediaDrm_isCryptoSchemeSupportedNative(
JNIEnv *env, jobject /* thiz */, jbyteArray uuidObj, jstring jmimeType,
jint jSecurityLevel) {
@@ -1878,13 +1912,14 @@ android_media_MediaDrm_native_getMetrics(JNIEnv *env, jobject thiz)
// Retrieve current metrics snapshot from drm.
PersistableBundle metrics;
- status_t err = drm->getMetrics(&metrics);
+ sp<IDrmMetricsConsumer> consumer(new DrmMetricsConsumer(&metrics));
+ status_t err = drm->getMetrics(consumer);
if (err != OK) {
ALOGE("getMetrics failed: %d", (int)err);
return (jobject) NULL;
}
- return nativeToJavaPersistableBundle(env, thiz, &metrics);
+ return MediaMetricsJNI::nativeToJavaPersistableBundle(env, &metrics);
}
static jbyteArray android_media_MediaDrm_signRSANative(
@@ -1927,6 +1962,9 @@ static const JNINativeMethod gMethods[] = {
{ "native_setup", "(Ljava/lang/Object;[BLjava/lang/String;)V",
(void *)android_media_MediaDrm_native_setup },
+ { "getSupportedCryptoSchemesNative", "()[B",
+ (void *)android_media_MediaDrm_getSupportedCryptoSchemesNative },
+
{ "isCryptoSchemeSupportedNative", "([BLjava/lang/String;I)Z",
(void *)android_media_MediaDrm_isCryptoSchemeSupportedNative },
diff --git a/media/jni/android_media_MediaDrm.h b/media/jni/android_media_MediaDrm.h
index 684069b0120a..b1f544cb2dbe 100644
--- a/media/jni/android_media_MediaDrm.h
+++ b/media/jni/android_media_MediaDrm.h
@@ -21,19 +21,33 @@
#include <media/stagefright/foundation/ABase.h>
#include <mediadrm/IDrm.h>
+#include <mediadrm/IDrmClient.h>
+#include <hidl/HidlSupport.h>
#include <utils/Errors.h>
#include <utils/RefBase.h>
+namespace {
+
+struct ListenerArgs {
+ jbyteArray jSessionId;
+ jbyteArray jData;
+ jlong jExpirationTime;
+ jobject jKeyStatusList;
+ jboolean jHasNewUsableKey;
+};
+
+}
+
namespace android {
class DrmListener: virtual public RefBase
{
public:
virtual void notify(DrmPlugin::EventType eventType, int extra,
- const Parcel *obj) = 0;
+ const ListenerArgs *args) = 0;
};
-struct JDrm : public BnDrmClient {
+struct JDrm : public IDrmClient {
static status_t IsCryptoSchemeSupported(const uint8_t uuid[16],
const String8 &mimeType,
DrmPlugin::SecurityLevel level,
@@ -44,7 +58,23 @@ struct JDrm : public BnDrmClient {
status_t initCheck() const;
sp<IDrm> getDrm() { return mDrm; }
- void notify(DrmPlugin::EventType, int extra, const Parcel *obj);
+ void sendEvent(
+ DrmPlugin::EventType eventType,
+ const hardware::hidl_vec<uint8_t> &sessionId,
+ const hardware::hidl_vec<uint8_t> &data) override;
+
+ void sendExpirationUpdate(
+ const hardware::hidl_vec<uint8_t> &sessionId,
+ int64_t expiryTimeInMS) override;
+
+ void sendKeysChange(
+ const hardware::hidl_vec<uint8_t> &sessionId,
+ const std::vector<DrmKeyStatus> &keyStatusList,
+ bool hasNewUsableKey) override;
+
+ void sendSessionLostState(
+ const hardware::hidl_vec<uint8_t> &sessionId) override;
+
status_t setListener(const sp<DrmListener>& listener);
void disconnect();
@@ -63,6 +93,8 @@ private:
static sp<IDrm> MakeDrm();
static sp<IDrm> MakeDrm(const uint8_t uuid[16], const String8 &appPackageName);
+ void notify(DrmPlugin::EventType, int extra, const ListenerArgs *args);
+
DISALLOW_EVIL_CONSTRUCTORS(JDrm);
};
diff --git a/media/jni/android_media_MediaExtractor.cpp b/media/jni/android_media_MediaExtractor.cpp
index 0d395e58a3c5..948ebcd9fdd5 100644
--- a/media/jni/android_media_MediaExtractor.cpp
+++ b/media/jni/android_media_MediaExtractor.cpp
@@ -913,7 +913,7 @@ android_media_MediaExtractor_native_getMetrics(JNIEnv * env, jobject thiz)
}
// build and return the Bundle
- std::unique_ptr<MediaAnalyticsItem> item(MediaAnalyticsItem::create());
+ std::unique_ptr<mediametrics::Item> item(mediametrics::Item::create());
item->readFromParcel(reply);
jobject mybundle = MediaMetricsJNI::writeMetricsToBundle(env, item.get(), NULL);
diff --git a/media/jni/android_media_MediaHTTPConnection.cpp b/media/jni/android_media_MediaHTTPConnection.cpp
index 365e045689f0..53adff3e251e 100644
--- a/media/jni/android_media_MediaHTTPConnection.cpp
+++ b/media/jni/android_media_MediaHTTPConnection.cpp
@@ -148,7 +148,7 @@ static jint android_media_MediaHTTPConnection_native_readAt(
byteArrayObj,
0,
n,
- (jbyte *)conn->getIMemory()->pointer());
+ (jbyte *)conn->getIMemory()->unsecurePointer());
}
return n;
diff --git a/media/jni/android_media_MediaMetadataRetriever.cpp b/media/jni/android_media_MediaMetadataRetriever.cpp
index 91cd6d3826f6..6fbd29cb8623 100644
--- a/media/jni/android_media_MediaMetadataRetriever.cpp
+++ b/media/jni/android_media_MediaMetadataRetriever.cpp
@@ -22,7 +22,7 @@
#include <assert.h>
#include <utils/Log.h>
#include <utils/threads.h>
-#include <SkBitmap.h>
+#include <android/graphics/bitmap.h>
#include <media/IMediaHTTPService.h>
#include <media/mediametadataretriever.h>
#include <media/mediascanner.h>
@@ -36,8 +36,6 @@
#include "android_media_Streams.h"
#include "android_util_Binder.h"
-#include "android/graphics/GraphicsJNI.h"
-
using namespace android;
struct fields_t {
@@ -45,8 +43,6 @@ struct fields_t {
jclass bitmapClazz; // Must be a global ref
jmethodID createBitmapMethod;
jmethodID createScaledBitmapMethod;
- jclass configClazz; // Must be a global ref
- jmethodID createConfigMethod;
jclass bitmapParamsClazz; // Must be a global ref
jfieldID inPreferredConfig;
jfieldID outActualConfig;
@@ -263,7 +259,7 @@ static void rotate(T *dst, const T *src, size_t width, size_t height, int angle)
static jobject getBitmapFromVideoFrame(
JNIEnv *env, VideoFrame *videoFrame, jint dst_width, jint dst_height,
- SkColorType outColorType) {
+ AndroidBitmapFormat outColorType) {
ALOGV("getBitmapFromVideoFrame: dimension = %dx%d, displaySize = %dx%d, bytes = %d",
videoFrame->mWidth,
videoFrame->mHeight,
@@ -271,11 +267,7 @@ static jobject getBitmapFromVideoFrame(
videoFrame->mDisplayHeight,
videoFrame->mSize);
- ScopedLocalRef<jobject> config(env,
- env->CallStaticObjectMethod(
- fields.configClazz,
- fields.createConfigMethod,
- GraphicsJNI::colorTypeToLegacyBitmapConfig(outColorType)));
+ ScopedLocalRef<jobject> config(env, ABitmapConfig_getConfigFromFormat(env, outColorType));
uint32_t width, height, displayWidth, displayHeight;
bool swapWidthAndHeight = false;
@@ -306,10 +298,9 @@ static jobject getBitmapFromVideoFrame(
return NULL;
}
- SkBitmap bitmap;
- GraphicsJNI::getSkBitmap(env, jBitmap, &bitmap);
+ graphics::Bitmap bitmap(env, jBitmap);
- if (outColorType == kRGB_565_SkColorType) {
+ if (outColorType == ANDROID_BITMAP_FORMAT_RGB_565) {
rotate((uint16_t*)bitmap.getPixels(),
(uint16_t*)((char*)videoFrame + sizeof(VideoFrame)),
videoFrame->mWidth,
@@ -350,40 +341,31 @@ static jobject getBitmapFromVideoFrame(
return jBitmap;
}
-static int getColorFormat(JNIEnv *env, jobject options) {
+static AndroidBitmapFormat getColorFormat(JNIEnv *env, jobject options,
+ AndroidBitmapFormat defaultPreferred = ANDROID_BITMAP_FORMAT_RGBA_8888) {
if (options == NULL) {
- return HAL_PIXEL_FORMAT_RGBA_8888;
+ return defaultPreferred;
}
ScopedLocalRef<jobject> inConfig(env, env->GetObjectField(options, fields.inPreferredConfig));
- SkColorType prefColorType = GraphicsJNI::getNativeBitmapColorType(env, inConfig.get());
+ AndroidBitmapFormat format = ABitmapConfig_getFormatFromConfig(env, inConfig.get());
- if (prefColorType == kRGB_565_SkColorType) {
- return HAL_PIXEL_FORMAT_RGB_565;
+ if (format == ANDROID_BITMAP_FORMAT_RGB_565) {
+ return ANDROID_BITMAP_FORMAT_RGB_565;
}
- return HAL_PIXEL_FORMAT_RGBA_8888;
+ return ANDROID_BITMAP_FORMAT_RGBA_8888;
}
-static SkColorType setOutColorType(JNIEnv *env, int colorFormat, jobject options) {
- SkColorType outColorType = kN32_SkColorType;
- if (colorFormat == HAL_PIXEL_FORMAT_RGB_565) {
- outColorType = kRGB_565_SkColorType;
- }
-
+static void setOutConfig(JNIEnv *env, jobject options, AndroidBitmapFormat colorFormat) {
if (options != NULL) {
- ScopedLocalRef<jobject> config(env,
- env->CallStaticObjectMethod(
- fields.configClazz,
- fields.createConfigMethod,
- GraphicsJNI::colorTypeToLegacyBitmapConfig(outColorType)));
-
+ ScopedLocalRef<jobject> config(env, ABitmapConfig_getConfigFromFormat(env, colorFormat));
env->SetObjectField(options, fields.outActualConfig, config.get());
}
- return outColorType;
}
static jobject android_media_MediaMetadataRetriever_getFrameAtTime(
- JNIEnv *env, jobject thiz, jlong timeUs, jint option, jint dst_width, jint dst_height)
+ JNIEnv *env, jobject thiz, jlong timeUs, jint option,
+ jint dst_width, jint dst_height, jobject params)
{
ALOGV("getFrameAtTime: %lld us option: %d dst width: %d heigh: %d",
(long long)timeUs, option, dst_width, dst_height);
@@ -392,19 +374,27 @@ static jobject android_media_MediaMetadataRetriever_getFrameAtTime(
jniThrowException(env, "java/lang/IllegalStateException", "No retriever available");
return NULL;
}
+ // For getFrameAtTime family of calls, default to ANDROID_BITMAP_FORMAT_RGB_565
+ // to keep the behavior consistent with older releases
+ AndroidBitmapFormat colorFormat = getColorFormat(env, params, ANDROID_BITMAP_FORMAT_RGB_565);
// Call native method to retrieve a video frame
VideoFrame *videoFrame = NULL;
- sp<IMemory> frameMemory = retriever->getFrameAtTime(timeUs, option);
+ sp<IMemory> frameMemory = retriever->getFrameAtTime(timeUs, option, colorFormat);
+ // TODO: Using unsecurePointer() has some associated security pitfalls
+ // (see declaration for details).
+ // Either document why it is safe in this case or address the
+ // issue (e.g. by copying).
if (frameMemory != 0) { // cast the shared structure to a VideoFrame object
- videoFrame = static_cast<VideoFrame *>(frameMemory->pointer());
+ videoFrame = static_cast<VideoFrame *>(frameMemory->unsecurePointer());
}
if (videoFrame == NULL) {
ALOGE("getFrameAtTime: videoFrame is a NULL pointer");
return NULL;
}
- return getBitmapFromVideoFrame(env, videoFrame, dst_width, dst_height, kRGB_565_SkColorType);
+ setOutConfig(env, params, colorFormat);
+ return getBitmapFromVideoFrame(env, videoFrame, dst_width, dst_height, colorFormat);
}
static jobject android_media_MediaMetadataRetriever_getImageAtIndex(
@@ -417,22 +407,25 @@ static jobject android_media_MediaMetadataRetriever_getImageAtIndex(
return NULL;
}
- int colorFormat = getColorFormat(env, params);
+ AndroidBitmapFormat colorFormat = getColorFormat(env, params);
// Call native method to retrieve an image
VideoFrame *videoFrame = NULL;
sp<IMemory> frameMemory = retriever->getImageAtIndex(index, colorFormat);
if (frameMemory != 0) { // cast the shared structure to a VideoFrame object
- videoFrame = static_cast<VideoFrame *>(frameMemory->pointer());
+ // TODO: Using unsecurePointer() has some associated security pitfalls
+ // (see declaration for details).
+ // Either document why it is safe in this case or address the
+ // issue (e.g. by copying).
+ videoFrame = static_cast<VideoFrame *>(frameMemory->unsecurePointer());
}
if (videoFrame == NULL) {
ALOGE("getImageAtIndex: videoFrame is a NULL pointer");
return NULL;
}
- SkColorType outColorType = setOutColorType(env, colorFormat, params);
-
- return getBitmapFromVideoFrame(env, videoFrame, -1, -1, outColorType);
+ setOutConfig(env, params, colorFormat);
+ return getBitmapFromVideoFrame(env, videoFrame, -1, -1, colorFormat);
}
static jobject android_media_MediaMetadataRetriever_getThumbnailImageAtIndex(
@@ -446,7 +439,7 @@ static jobject android_media_MediaMetadataRetriever_getThumbnailImageAtIndex(
return NULL;
}
- int colorFormat = getColorFormat(env, params);
+ AndroidBitmapFormat colorFormat = getColorFormat(env, params);
jint dst_width = -1, dst_height = -1;
// Call native method to retrieve an image
@@ -454,7 +447,11 @@ static jobject android_media_MediaMetadataRetriever_getThumbnailImageAtIndex(
sp<IMemory> frameMemory = retriever->getImageAtIndex(
index, colorFormat, true /*metaOnly*/, true /*thumbnail*/);
if (frameMemory != 0) {
- videoFrame = static_cast<VideoFrame *>(frameMemory->pointer());
+ // TODO: Using unsecurePointer() has some associated security pitfalls
+ // (see declaration for details).
+ // Either document why it is safe in this case or address the
+ // issue (e.g. by copying).
+ videoFrame = static_cast<VideoFrame *>(frameMemory->unsecurePointer());
int32_t thumbWidth = videoFrame->mWidth;
int32_t thumbHeight = videoFrame->mHeight;
videoFrame = NULL;
@@ -467,7 +464,11 @@ static jobject android_media_MediaMetadataRetriever_getThumbnailImageAtIndex(
|| thumbPixels * 6 >= maxPixels) {
frameMemory = retriever->getImageAtIndex(
index, colorFormat, false /*metaOnly*/, true /*thumbnail*/);
- videoFrame = static_cast<VideoFrame *>(frameMemory->pointer());
+ // TODO: Using unsecurePointer() has some associated security pitfalls
+ // (see declaration for details).
+ // Either document why it is safe in this case or address the
+ // issue (e.g. by copying).
+ videoFrame = static_cast<VideoFrame *>(frameMemory->unsecurePointer());
if (thumbPixels > maxPixels) {
int downscale = ceil(sqrt(thumbPixels / (float)maxPixels));
@@ -485,9 +486,8 @@ static jobject android_media_MediaMetadataRetriever_getThumbnailImageAtIndex(
// thumbnails extracted by BitmapFactory APIs.
videoFrame->mRotationAngle = 0;
- SkColorType outColorType = setOutColorType(env, colorFormat, params);
-
- return getBitmapFromVideoFrame(env, videoFrame, dst_width, dst_height, outColorType);
+ setOutConfig(env, params, colorFormat);
+ return getBitmapFromVideoFrame(env, videoFrame, dst_width, dst_height, colorFormat);
}
static jobject android_media_MediaMetadataRetriever_getFrameAtIndex(
@@ -501,15 +501,7 @@ static jobject android_media_MediaMetadataRetriever_getFrameAtIndex(
return NULL;
}
- int colorFormat = getColorFormat(env, params);
- std::vector<sp<IMemory> > frames;
- status_t err = retriever->getFrameAtIndex(&frames, frameIndex, numFrames, colorFormat);
- if (err != OK || frames.size() == 0) {
- jniThrowException(env,
- "java/lang/IllegalStateException", "No frames from retriever");
- return NULL;
- }
jobject arrayList = env->NewObject(fields.arrayListClazz, fields.arrayListInit);
if (arrayList == NULL) {
jniThrowException(env,
@@ -517,18 +509,33 @@ static jobject android_media_MediaMetadataRetriever_getFrameAtIndex(
return NULL;
}
- SkColorType outColorType = setOutColorType(env, colorFormat, params);
-
- for (size_t i = 0; i < frames.size(); i++) {
- if (frames[i] == NULL || frames[i]->pointer() == NULL) {
+ AndroidBitmapFormat colorFormat = getColorFormat(env, params);
+ setOutConfig(env, params, colorFormat);
+ size_t i = 0;
+ for (; i < numFrames; i++) {
+ sp<IMemory> frame = retriever->getFrameAtIndex(frameIndex + i, colorFormat);
+ if (frame == NULL || frame->unsecurePointer() == NULL) {
ALOGE("video frame at index %zu is a NULL pointer", frameIndex + i);
- continue;
+ break;
}
- VideoFrame *videoFrame = static_cast<VideoFrame *>(frames[i]->pointer());
- jobject bitmapObj = getBitmapFromVideoFrame(env, videoFrame, -1, -1, outColorType);
+ // TODO: Using unsecurePointer() has some associated security pitfalls
+ // (see declaration for details).
+ // Either document why it is safe in this case or address the
+ // issue (e.g. by copying).
+ VideoFrame *videoFrame = static_cast<VideoFrame *>(frame->unsecurePointer());
+ jobject bitmapObj = getBitmapFromVideoFrame(env, videoFrame, -1, -1, colorFormat);
env->CallBooleanMethod(arrayList, fields.arrayListAdd, bitmapObj);
env->DeleteLocalRef(bitmapObj);
}
+
+ if (i == 0) {
+ env->DeleteLocalRef(arrayList);
+
+ jniThrowException(env,
+ "java/lang/IllegalStateException", "No frames from retriever");
+ return NULL;
+ }
+
return arrayList;
}
@@ -548,7 +555,11 @@ static jbyteArray android_media_MediaMetadataRetriever_getEmbeddedPicture(
// the method name to getEmbeddedPicture().
sp<IMemory> albumArtMemory = retriever->extractAlbumArt();
if (albumArtMemory != 0) { // cast the shared structure to a MediaAlbumArt object
- mediaAlbumArt = static_cast<MediaAlbumArt *>(albumArtMemory->pointer());
+ // TODO: Using unsecurePointer() has some associated security pitfalls
+ // (see declaration for details).
+ // Either document why it is safe in this case or address the
+ // issue (e.g. by copying).
+ mediaAlbumArt = static_cast<MediaAlbumArt *>(albumArtMemory->unsecurePointer());
}
if (mediaAlbumArt == NULL) {
ALOGE("getEmbeddedPicture: Call to getEmbeddedPicture failed.");
@@ -637,21 +648,6 @@ static void android_media_MediaMetadataRetriever_native_init(JNIEnv *env)
return;
}
- clazz.reset(env->FindClass("android/graphics/Bitmap$Config"));
- if (clazz.get() == NULL) {
- return;
- }
- fields.configClazz = (jclass) env->NewGlobalRef(clazz.get());
- if (fields.configClazz == NULL) {
- return;
- }
- fields.createConfigMethod =
- env->GetStaticMethodID(fields.configClazz, "nativeToConfig",
- "(I)Landroid/graphics/Bitmap$Config;");
- if (fields.createConfigMethod == NULL) {
- return;
- }
-
clazz.reset(env->FindClass("android/media/MediaMetadataRetriever$BitmapParams"));
if (clazz.get() == NULL) {
return;
@@ -712,7 +708,7 @@ static const JNINativeMethod nativeMethods[] = {
(void *)android_media_MediaMetadataRetriever_setDataSourceFD},
{"_setDataSource", "(Landroid/media/MediaDataSource;)V",
(void *)android_media_MediaMetadataRetriever_setDataSourceCallback},
- {"_getFrameAtTime", "(JIII)Landroid/graphics/Bitmap;",
+ {"_getFrameAtTime", "(JIIILandroid/media/MediaMetadataRetriever$BitmapParams;)Landroid/graphics/Bitmap;",
(void *)android_media_MediaMetadataRetriever_getFrameAtTime},
{
"_getImageAtIndex",
@@ -732,7 +728,7 @@ static const JNINativeMethod nativeMethods[] = {
(void *)android_media_MediaMetadataRetriever_getFrameAtIndex
},
- {"extractMetadata", "(I)Ljava/lang/String;",
+ {"nativeExtractMetadata", "(I)Ljava/lang/String;",
(void *)android_media_MediaMetadataRetriever_extractMetadata},
{"getEmbeddedPicture", "(I)[B",
(void *)android_media_MediaMetadataRetriever_getEmbeddedPicture},
diff --git a/media/jni/android_media_MediaMetricsJNI.cpp b/media/jni/android_media_MediaMetricsJNI.cpp
index e7487c3cbc67..c064de299b19 100644
--- a/media/jni/android_media_MediaMetricsJNI.cpp
+++ b/media/jni/android_media_MediaMetricsJNI.cpp
@@ -16,212 +16,228 @@
#define LOG_TAG "MediaMetricsJNI"
+#include <binder/Parcel.h>
#include <jni.h>
+#include <media/IMediaMetricsService.h>
+#include <media/MediaMetricsItem.h>
#include <nativehelper/JNIHelp.h>
+#include <variant>
#include "android_media_MediaMetricsJNI.h"
-#include <media/MediaAnalyticsItem.h>
-
+#include "android_os_Parcel.h"
+#include "android_runtime/AndroidRuntime.h"
// This source file is compiled and linked into:
// core/jni/ (libandroid_runtime.so)
namespace android {
+namespace {
+struct BundleHelper {
+ BundleHelper(JNIEnv* _env, jobject _bundle)
+ : env(_env)
+ , clazzBundle(env->FindClass("android/os/PersistableBundle"))
+ , putIntID(env->GetMethodID(clazzBundle, "putInt", "(Ljava/lang/String;I)V"))
+ , putLongID(env->GetMethodID(clazzBundle, "putLong", "(Ljava/lang/String;J)V"))
+ , putDoubleID(env->GetMethodID(clazzBundle, "putDouble", "(Ljava/lang/String;D)V"))
+ , putStringID(env->GetMethodID(clazzBundle,
+ "putString", "(Ljava/lang/String;Ljava/lang/String;)V"))
+ , constructID(env->GetMethodID(clazzBundle, "<init>", "()V"))
+ , bundle(_bundle == nullptr ? env->NewObject(clazzBundle, constructID) : _bundle)
+ { }
+
+ JNIEnv* const env;
+ const jclass clazzBundle;
+ const jmethodID putIntID;
+ const jmethodID putLongID;
+ const jmethodID putDoubleID;
+ const jmethodID putStringID;
+ const jmethodID constructID;
+ jobject const bundle;
+
+ // We use templated put to access mediametrics::Item based on data type not type enum.
+ // See std::variant and std::visit.
+ template<typename T>
+ void put(jstring keyName, const T& value) = delete;
+
+ template<>
+ void put(jstring keyName, const int32_t& value) {
+ env->CallVoidMethod(bundle, putIntID, keyName, (jint)value);
+ }
+
+ template<>
+ void put(jstring keyName, const int64_t& value) {
+ env->CallVoidMethod(bundle, putLongID, keyName, (jlong)value);
+ }
+
+ template<>
+ void put(jstring keyName, const double& value) {
+ env->CallVoidMethod(bundle, putDoubleID, keyName, (jdouble)value);
+ }
+
+ template<>
+ void put(jstring keyName, const std::string& value) {
+ env->CallVoidMethod(bundle, putStringID, keyName, env->NewStringUTF(value.c_str()));
+ }
+
+ template<>
+ void put(jstring keyName, const std::pair<int64_t, int64_t>& value) {
+ ; // rate is currently ignored
+ }
+
+ template<>
+ void put(jstring keyName, const std::monostate& value) {
+ ; // none is currently ignored
+ }
+
+ // string char * helpers
+
+ template<>
+ void put(jstring keyName, const char * const& value) {
+ env->CallVoidMethod(bundle, putStringID, keyName, env->NewStringUTF(value));
+ }
+
+ template<>
+ void put(jstring keyName, char * const& value) {
+ env->CallVoidMethod(bundle, putStringID, keyName, env->NewStringUTF(value));
+ }
+
+ // We allow both jstring and non-jstring variants.
+ template<typename T>
+ void put(const char *keyName, const T& value) {
+ put(env->NewStringUTF(keyName), value);
+ }
+};
+} // namespace
+
// place the attributes into a java PersistableBundle object
-jobject MediaMetricsJNI::writeMetricsToBundle(JNIEnv* env, MediaAnalyticsItem *item, jobject mybundle) {
+jobject MediaMetricsJNI::writeMetricsToBundle(
+ JNIEnv* env, mediametrics::Item *item, jobject bundle)
+{
+ BundleHelper bh(env, bundle);
+
+ if (bh.bundle == nullptr) {
+ ALOGE("%s: unable to create Bundle", __func__);
+ return nullptr;
+ }
- jclass clazzBundle = env->FindClass("android/os/PersistableBundle");
- if (clazzBundle==NULL) {
- ALOGE("can't find android/os/PersistableBundle");
- return NULL;
+ bh.put(mediametrics::BUNDLE_KEY, item->getKey().c_str());
+ if (item->getPid() != -1) {
+ bh.put(mediametrics::BUNDLE_PID, (int32_t)item->getPid());
+ }
+ if (item->getTimestamp() > 0) {
+ bh.put(mediametrics::BUNDLE_TIMESTAMP, (int64_t)item->getTimestamp());
+ }
+ if (item->getUid() != -1) {
+ bh.put(mediametrics::BUNDLE_UID, (int32_t)item->getUid());
}
- // sometimes the caller provides one for us to fill
- if (mybundle == NULL) {
- // create the bundle
- jmethodID constructID = env->GetMethodID(clazzBundle, "<init>", "()V");
- mybundle = env->NewObject(clazzBundle, constructID);
- if (mybundle == NULL) {
- return NULL;
- }
- }
-
- // grab methods that we can invoke
- jmethodID setIntID = env->GetMethodID(clazzBundle, "putInt", "(Ljava/lang/String;I)V");
- jmethodID setLongID = env->GetMethodID(clazzBundle, "putLong", "(Ljava/lang/String;J)V");
- jmethodID setDoubleID = env->GetMethodID(clazzBundle, "putDouble", "(Ljava/lang/String;D)V");
- jmethodID setStringID = env->GetMethodID(clazzBundle, "putString", "(Ljava/lang/String;Ljava/lang/String;)V");
-
- // env, class, method, {parms}
- //env->CallVoidMethod(env, mybundle, setIntID, jstr, jint);
-
- // iterate through my attributes
- // -- get name, get type, get value
- // -- insert appropriately into the bundle
- for (size_t i = 0 ; i < item->mPropCount; i++ ) {
- MediaAnalyticsItem::Prop *prop = &item->mProps[i];
- // build the key parameter from prop->mName
- jstring keyName = env->NewStringUTF(prop->mName);
- // invoke the appropriate method to insert
- switch (prop->mType) {
- case MediaAnalyticsItem::kTypeInt32:
- env->CallVoidMethod(mybundle, setIntID,
- keyName, (jint) prop->u.int32Value);
- break;
- case MediaAnalyticsItem::kTypeInt64:
- env->CallVoidMethod(mybundle, setLongID,
- keyName, (jlong) prop->u.int64Value);
- break;
- case MediaAnalyticsItem::kTypeDouble:
- env->CallVoidMethod(mybundle, setDoubleID,
- keyName, (jdouble) prop->u.doubleValue);
- break;
- case MediaAnalyticsItem::kTypeCString:
- env->CallVoidMethod(mybundle, setStringID, keyName,
- env->NewStringUTF(prop->u.CStringValue));
- break;
- default:
- ALOGE("to_String bad item type: %d for %s",
- prop->mType, prop->mName);
- break;
- }
- }
-
- return mybundle;
+ for (const auto &prop : *item) {
+ const char *name = prop.getName();
+ if (name == nullptr) continue;
+ prop.visit([&] (auto &value) { bh.put(name, value); });
+ }
+ return bh.bundle;
}
-// convert the specified batch metrics attributes to a persistent bundle.
-// The encoding of the byte array is specified in
-// frameworks/av/media/libmediametrics/MediaAnalyticsItem.cpp
-//
-// type encodings; matches frameworks/av/media/libmediametrics/MediaAnalyticsItem.cpp
-enum { kInt32 = 0, kInt64, kDouble, kRate, kCString};
+// Implementation of MediaMetrics.native_submit_bytebuffer(),
+// Delivers the byte buffer to the mediametrics service.
+static jint android_media_MediaMetrics_submit_bytebuffer(
+ JNIEnv* env, jobject thiz, jobject byteBuffer, jint length)
+{
+ const jbyte* buffer =
+ reinterpret_cast<const jbyte*>(env->GetDirectBufferAddress(byteBuffer));
+ if (buffer == nullptr) {
+ ALOGE("Error retrieving source of audio data to play, can't play");
+ return (jint)BAD_VALUE;
+ }
-jobject MediaMetricsJNI::writeAttributesToBundle(JNIEnv* env, jobject mybundle, char *buffer, size_t length) {
- ALOGV("writeAttributes()");
+ sp<IMediaMetricsService> service = mediametrics::BaseItem::getService();
+ if (service == nullptr) {
+ ALOGW("Cannot retrieve mediametrics service");
+ return (jint)NO_INIT;
+ }
+ return (jint)service->submitBuffer((char *)buffer, length);
+}
- if (buffer == NULL || length <= 0) {
- ALOGW("bad parameters to writeAttributesToBundle()");
+// Helper function to convert a native PersistableBundle to a Java
+// PersistableBundle.
+jobject MediaMetricsJNI::nativeToJavaPersistableBundle(JNIEnv *env,
+ os::PersistableBundle* nativeBundle) {
+ if (env == NULL || nativeBundle == NULL) {
+ ALOGE("Unexpected NULL parmeter");
return NULL;
}
- jclass clazzBundle = env->FindClass("android/os/PersistableBundle");
- if (clazzBundle==NULL) {
- ALOGE("can't find android/os/PersistableBundle");
- return NULL;
+ // Create a Java parcel with the native parcel data.
+ // Then create a new PersistableBundle with that parcel as a parameter.
+ jobject jParcel = android::createJavaParcelObject(env);
+ if (jParcel == NULL) {
+ ALOGE("Failed to create a Java Parcel.");
+ return NULL;
+ }
+
+ android::Parcel* nativeParcel = android::parcelForJavaObject(env, jParcel);
+ if (nativeParcel == NULL) {
+ ALOGE("Failed to get the native Parcel.");
+ return NULL;
}
- // sometimes the caller provides one for us to fill
- if (mybundle == NULL) {
- // create the bundle
- jmethodID constructID = env->GetMethodID(clazzBundle, "<init>", "()V");
- mybundle = env->NewObject(clazzBundle, constructID);
- if (mybundle == NULL) {
- ALOGD("unable to create mybundle");
- return NULL;
- }
- }
-
- int left = length;
- char *buf = buffer;
-
- // grab methods that we can invoke
- jmethodID setIntID = env->GetMethodID(clazzBundle, "putInt", "(Ljava/lang/String;I)V");
- jmethodID setLongID = env->GetMethodID(clazzBundle, "putLong", "(Ljava/lang/String;J)V");
- jmethodID setDoubleID = env->GetMethodID(clazzBundle, "putDouble", "(Ljava/lang/String;D)V");
- jmethodID setStringID = env->GetMethodID(clazzBundle, "putString", "(Ljava/lang/String;Ljava/lang/String;)V");
-
-
-#define _EXTRACT(size, val) \
- { if ((size) > left) goto badness; memcpy(&val, buf, (size)); buf += (size); left -= (size);}
-#define _SKIP(size) \
- { if ((size) > left) goto badness; buf += (size); left -= (size);}
-
- int32_t bufsize;
- _EXTRACT(sizeof(int32_t), bufsize);
- if (bufsize != length) {
- goto badness;
- }
- int32_t proto;
- _EXTRACT(sizeof(int32_t), proto);
- if (proto != 0) {
- ALOGE("unsupported wire protocol %d", proto);
- goto badness;
- }
-
- int32_t count;
- _EXTRACT(sizeof(int32_t), count);
-
- // iterate through my attributes
- // -- get name, get type, get value, insert into bundle appropriately.
- for (int i = 0 ; i < count; i++ ) {
- // prop name len (int16)
- int16_t keylen;
- _EXTRACT(sizeof(int16_t), keylen);
- if (keylen <= 0) goto badness;
- // prop name itself
- char *key = buf;
- jstring keyName = env->NewStringUTF(buf);
- _SKIP(keylen);
-
- // prop type (int8_t)
- int8_t attrType;
- _EXTRACT(sizeof(int8_t), attrType);
-
- int16_t attrSize;
- _EXTRACT(sizeof(int16_t), attrSize);
-
- switch (attrType) {
- case kInt32:
- {
- int32_t i32;
- _EXTRACT(sizeof(int32_t), i32);
- env->CallVoidMethod(mybundle, setIntID,
- keyName, (jint) i32);
- break;
- }
- case kInt64:
- {
- int64_t i64;
- _EXTRACT(sizeof(int64_t), i64);
- env->CallVoidMethod(mybundle, setLongID,
- keyName, (jlong) i64);
- break;
- }
- case kDouble:
- {
- double d64;
- _EXTRACT(sizeof(double), d64);
- env->CallVoidMethod(mybundle, setDoubleID,
- keyName, (jdouble) d64);
- break;
- }
- case kCString:
- {
- jstring value = env->NewStringUTF(buf);
- env->CallVoidMethod(mybundle, setStringID,
- keyName, value);
- _SKIP(attrSize);
- break;
- }
- default:
- ALOGW("ignoring Attribute '%s' unknown type: %d",
- key, attrType);
- _SKIP(attrSize);
- break;
- }
- }
-
- // should have consumed it all
- if (left != 0) {
- ALOGW("did not consume entire buffer; left(%d) != 0", left);
- goto badness;
- }
-
- return mybundle;
-
- badness:
- return NULL;
+
+ android::status_t result = nativeBundle->writeToParcel(nativeParcel);
+ nativeParcel->setDataPosition(0);
+ if (result != android::OK) {
+ ALOGE("Failed to write nativeBundle to Parcel: %d.", result);
+ return NULL;
+ }
+
+#define STATIC_INIT_JNI(T, obj, method, globalref, ...) \
+ static T obj{};\
+ if (obj == NULL) { \
+ obj = method(__VA_ARGS__); \
+ if (obj == NULL) { \
+ ALOGE("%s can't find " #obj, __func__); \
+ return NULL; \
+ } else { \
+ obj = globalref; \
+ }\
+ } \
+
+ STATIC_INIT_JNI(jclass, clazzBundle, env->FindClass,
+ static_cast<jclass>(env->NewGlobalRef(clazzBundle)),
+ "android/os/PersistableBundle");
+ STATIC_INIT_JNI(jfieldID, bundleCreatorId, env->GetStaticFieldID,
+ bundleCreatorId,
+ clazzBundle, "CREATOR", "Landroid/os/Parcelable$Creator;");
+ STATIC_INIT_JNI(jobject, bundleCreator, env->GetStaticObjectField,
+ env->NewGlobalRef(bundleCreator),
+ clazzBundle, bundleCreatorId);
+ STATIC_INIT_JNI(jclass, clazzCreator, env->FindClass,
+ static_cast<jclass>(env->NewGlobalRef(clazzCreator)),
+ "android/os/Parcelable$Creator");
+ STATIC_INIT_JNI(jmethodID, createFromParcelId, env->GetMethodID,
+ createFromParcelId,
+ clazzCreator, "createFromParcel", "(Landroid/os/Parcel;)Ljava/lang/Object;");
+
+ jobject newBundle = env->CallObjectMethod(bundleCreator, createFromParcelId, jParcel);
+ if (newBundle == NULL) {
+ ALOGE("Failed to create a new PersistableBundle "
+ "from the createFromParcel call.");
+ }
+
+ return newBundle;
}
-}; // namespace android
+// ----------------------------------------------------------------------------
+
+static constexpr JNINativeMethod gMethods[] = {
+ {"native_submit_bytebuffer", "(Ljava/nio/ByteBuffer;I)I",
+ (void *)android_media_MediaMetrics_submit_bytebuffer},
+};
+// Registers the native methods, called from core/jni/AndroidRuntime.cpp
+int register_android_media_MediaMetrics(JNIEnv *env)
+{
+ return AndroidRuntime::registerNativeMethods(
+ env, "android/media/MediaMetrics", gMethods, std::size(gMethods));
+}
+
+}; // namespace android
diff --git a/media/jni/android_media_MediaMetricsJNI.h b/media/jni/android_media_MediaMetricsJNI.h
index a10780f5c5c3..bcad5587d909 100644
--- a/media/jni/android_media_MediaMetricsJNI.h
+++ b/media/jni/android_media_MediaMetricsJNI.h
@@ -19,15 +19,16 @@
#include <jni.h>
#include <nativehelper/JNIHelp.h>
-#include <media/MediaAnalyticsItem.h>
+#include <media/MediaMetricsItem.h>
+#include <binder/PersistableBundle.h>
// Copeid from core/jni/ (libandroid_runtime.so)
namespace android {
class MediaMetricsJNI {
public:
- static jobject writeMetricsToBundle(JNIEnv* env, MediaAnalyticsItem *item, jobject mybundle);
- static jobject writeAttributesToBundle(JNIEnv* env, jobject mybundle, char *buffer, size_t length);
+ static jobject writeMetricsToBundle(JNIEnv* env, mediametrics::Item *item, jobject mybundle);
+ static jobject nativeToJavaPersistableBundle(JNIEnv*, os::PersistableBundle*);
};
}; // namespace android
diff --git a/media/jni/android_media_MediaMuxer.cpp b/media/jni/android_media_MediaMuxer.cpp
index 243ee4f7ebba..267917653efb 100644
--- a/media/jni/android_media_MediaMuxer.cpp
+++ b/media/jni/android_media_MediaMuxer.cpp
@@ -230,8 +230,9 @@ static void android_media_MediaMuxer_stop(JNIEnv *env, jclass /* clazz */,
status_t err = muxer->stop();
if (err != OK) {
+ ALOGE("Error during stop:%d", err);
jniThrowException(env, "java/lang/IllegalStateException",
- "Failed to stop the muxer");
+ "Error during stop(), muxer would have stopped already");
return;
}
}
diff --git a/media/jni/android_media_MediaPlayer.cpp b/media/jni/android_media_MediaPlayer.cpp
index 40d213fe418b..82b746f6a9e5 100644
--- a/media/jni/android_media_MediaPlayer.cpp
+++ b/media/jni/android_media_MediaPlayer.cpp
@@ -23,7 +23,7 @@
#include <media/AudioResamplerPublic.h>
#include <media/IMediaHTTPService.h>
#include <media/MediaPlayerInterface.h>
-#include <media/MediaAnalyticsItem.h>
+#include <media/MediaMetricsItem.h>
#include <media/stagefright/foundation/ByteUtils.h> // for FOURCC definition
#include <stdio.h>
#include <assert.h>
@@ -682,7 +682,7 @@ android_media_MediaPlayer_native_getMetrics(JNIEnv *env, jobject thiz)
return (jobject) NULL;
}
- std::unique_ptr<MediaAnalyticsItem> item(MediaAnalyticsItem::create());
+ std::unique_ptr<mediametrics::Item> item(mediametrics::Item::create());
item->readFromParcel(p);
jobject mybundle = MediaMetricsJNI::writeMetricsToBundle(env, item.get(), NULL);
@@ -1447,13 +1447,13 @@ extern int register_android_media_MediaHTTPConnection(JNIEnv *env);
extern int register_android_media_MediaMetadataRetriever(JNIEnv *env);
extern int register_android_media_MediaMuxer(JNIEnv *env);
extern int register_android_media_MediaRecorder(JNIEnv *env);
-extern int register_android_media_MediaScanner(JNIEnv *env);
extern int register_android_media_MediaSync(JNIEnv *env);
extern int register_android_media_ResampleInputStream(JNIEnv *env);
extern int register_android_media_MediaProfiles(JNIEnv *env);
extern int register_android_mtp_MtpDatabase(JNIEnv *env);
extern int register_android_mtp_MtpDevice(JNIEnv *env);
extern int register_android_mtp_MtpServer(JNIEnv *env);
+extern int register_android_media_MediaTranscodeManager(JNIEnv *env);
jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
{
@@ -1491,11 +1491,6 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
goto bail;
}
- if (register_android_media_MediaScanner(env) < 0) {
- ALOGE("ERROR: MediaScanner native registration failed\n");
- goto bail;
- }
-
if (register_android_media_MediaMetadataRetriever(env) < 0) {
ALOGE("ERROR: MediaMetadataRetriever native registration failed\n");
goto bail;
@@ -1571,6 +1566,11 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
goto bail;
}
+ if (register_android_media_MediaTranscodeManager(env) < 0) {
+ ALOGE("ERROR: MediaTranscodeManager native registration failed");
+ goto bail;
+ }
+
/* success -- return valid version number */
result = JNI_VERSION_1_4;
diff --git a/media/jni/android_media_MediaRecorder.cpp b/media/jni/android_media_MediaRecorder.cpp
index a70de6d6f5d3..f99dc012be95 100644
--- a/media/jni/android_media_MediaRecorder.cpp
+++ b/media/jni/android_media_MediaRecorder.cpp
@@ -29,7 +29,7 @@
#include <gui/Surface.h>
#include <camera/Camera.h>
#include <media/mediarecorder.h>
-#include <media/MediaAnalyticsItem.h>
+#include <media/MediaMetricsItem.h>
#include <media/MicrophoneInfo.h>
#include <media/stagefright/PersistentSurface.h>
#include <utils/threads.h>
@@ -227,6 +227,36 @@ android_media_MediaRecorder_setAudioSource(JNIEnv *env, jobject thiz, jint as)
}
static void
+android_media_MediaRecorder_setPrivacySensitive(JNIEnv *env, jobject thiz, jboolean privacySensitive)
+{
+ ALOGV("%s(%s)", __func__, privacySensitive ? "true" : "false");
+
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
+ if (mr == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return;
+ }
+ process_media_recorder_call(env, mr->setPrivacySensitive(privacySensitive),
+ "java/lang/RuntimeException", "setPrivacySensitive failed.");
+}
+
+static jboolean
+android_media_MediaRecorder_isPrivacySensitive(JNIEnv *env, jobject thiz)
+{
+ sp<MediaRecorder> mr = getMediaRecorder(env, thiz);
+ if (mr == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException", NULL);
+ return false;
+ }
+ bool privacySensitive;
+ process_media_recorder_call(env, mr->isPrivacySensitive(&privacySensitive),
+ "java/lang/RuntimeException", "isPrivacySensitive failed.");
+
+ ALOGV("%s() -> %s", __func__, privacySensitive ? "true" : "false");
+ return privacySensitive;
+}
+
+static void
android_media_MediaRecorder_setOutputFormat(JNIEnv *env, jobject thiz, jint of)
{
ALOGV("setOutputFormat(%d)", of);
@@ -664,7 +694,7 @@ android_media_MediaRecorder_native_getMetrics(JNIEnv *env, jobject thiz)
}
// build and return the Bundle
- std::unique_ptr<MediaAnalyticsItem> item(MediaAnalyticsItem::create());
+ std::unique_ptr<mediametrics::Item> item(mediametrics::Item::create());
item->readFromParcel(reply);
jobject mybundle = MediaMetricsJNI::writeMetricsToBundle(env, item.get(), NULL);
@@ -817,6 +847,8 @@ static const JNINativeMethod gMethods[] = {
{"setCamera", "(Landroid/hardware/Camera;)V", (void *)android_media_MediaRecorder_setCamera},
{"setVideoSource", "(I)V", (void *)android_media_MediaRecorder_setVideoSource},
{"setAudioSource", "(I)V", (void *)android_media_MediaRecorder_setAudioSource},
+ {"setPrivacySensitive", "(Z)V", (void *)android_media_MediaRecorder_setPrivacySensitive},
+ {"isPrivacySensitive", "()Z", (void *)android_media_MediaRecorder_isPrivacySensitive},
{"setOutputFormat", "(I)V", (void *)android_media_MediaRecorder_setOutputFormat},
{"setVideoEncoder", "(I)V", (void *)android_media_MediaRecorder_setVideoEncoder},
{"setAudioEncoder", "(I)V", (void *)android_media_MediaRecorder_setAudioEncoder},
diff --git a/media/jni/android_media_MediaScanner.cpp b/media/jni/android_media_MediaScanner.cpp
deleted file mode 100644
index 1fb0faa5cd58..000000000000
--- a/media/jni/android_media_MediaScanner.cpp
+++ /dev/null
@@ -1,468 +0,0 @@
-/*
-**
-** Copyright 2007, The Android Open Source Project
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-** http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "MediaScannerJNI"
-#include <utils/Log.h>
-#include <utils/threads.h>
-#include <media/mediascanner.h>
-#include <media/stagefright/StagefrightMediaScanner.h>
-#include <private/media/VideoFrame.h>
-
-#include "jni.h"
-#include <nativehelper/JNIPlatformHelp.h>
-#include "android_runtime/AndroidRuntime.h"
-#include "android_runtime/Log.h"
-#include <android-base/macros.h> // for FALLTHROUGH_INTENDED
-
-using namespace android;
-
-
-static const char* const kClassMediaScannerClient =
- "android/media/MediaScannerClient";
-
-static const char* const kClassMediaScanner =
- "android/media/MediaScanner";
-
-static const char* const kRunTimeException =
- "java/lang/RuntimeException";
-
-static const char* const kIllegalArgumentException =
- "java/lang/IllegalArgumentException";
-
-struct fields_t {
- jfieldID context;
-};
-static fields_t fields;
-
-static status_t checkAndClearExceptionFromCallback(JNIEnv* env, const char* methodName) {
- if (env->ExceptionCheck()) {
- ALOGE("An exception was thrown by callback '%s'.", methodName);
- LOGE_EX(env);
- env->ExceptionClear();
- return UNKNOWN_ERROR;
- }
- return OK;
-}
-
-// stolen from dalvik/vm/checkJni.cpp
-static bool isValidUtf8(const char* bytes) {
- while (*bytes != '\0') {
- unsigned char utf8 = *(bytes++);
- // Switch on the high four bits.
- switch (utf8 >> 4) {
- case 0x00:
- case 0x01:
- case 0x02:
- case 0x03:
- case 0x04:
- case 0x05:
- case 0x06:
- case 0x07:
- // Bit pattern 0xxx. No need for any extra bytes.
- break;
- case 0x08:
- case 0x09:
- case 0x0a:
- case 0x0b:
- case 0x0f:
- /*
- * Bit pattern 10xx or 1111, which are illegal start bytes.
- * Note: 1111 is valid for normal UTF-8, but not the
- * modified UTF-8 used here.
- */
- return false;
- case 0x0e:
- // Bit pattern 1110, so there are two additional bytes.
- utf8 = *(bytes++);
- if ((utf8 & 0xc0) != 0x80) {
- return false;
- }
- // Fall through to take care of the final byte.
- FALLTHROUGH_INTENDED;
- case 0x0c:
- case 0x0d:
- // Bit pattern 110x, so there is one additional byte.
- utf8 = *(bytes++);
- if ((utf8 & 0xc0) != 0x80) {
- return false;
- }
- break;
- }
- }
- return true;
-}
-
-class MyMediaScannerClient : public MediaScannerClient
-{
-public:
- MyMediaScannerClient(JNIEnv *env, jobject client)
- : mEnv(env),
- mClient(env->NewGlobalRef(client)),
- mScanFileMethodID(0),
- mHandleStringTagMethodID(0),
- mSetMimeTypeMethodID(0)
- {
- ALOGV("MyMediaScannerClient constructor");
- jclass mediaScannerClientInterface =
- env->FindClass(kClassMediaScannerClient);
-
- if (mediaScannerClientInterface == NULL) {
- ALOGE("Class %s not found", kClassMediaScannerClient);
- } else {
- mScanFileMethodID = env->GetMethodID(
- mediaScannerClientInterface,
- "scanFile",
- "(Ljava/lang/String;JJZZ)V");
-
- mHandleStringTagMethodID = env->GetMethodID(
- mediaScannerClientInterface,
- "handleStringTag",
- "(Ljava/lang/String;Ljava/lang/String;)V");
-
- mSetMimeTypeMethodID = env->GetMethodID(
- mediaScannerClientInterface,
- "setMimeType",
- "(Ljava/lang/String;)V");
- }
- }
-
- virtual ~MyMediaScannerClient()
- {
- ALOGV("MyMediaScannerClient destructor");
- mEnv->DeleteGlobalRef(mClient);
- }
-
- virtual status_t scanFile(const char* path, long long lastModified,
- long long fileSize, bool isDirectory, bool noMedia)
- {
- ALOGV("scanFile: path(%s), time(%lld), size(%lld) and isDir(%d)",
- path, lastModified, fileSize, isDirectory);
-
- jstring pathStr;
- if ((pathStr = mEnv->NewStringUTF(path)) == NULL) {
- mEnv->ExceptionClear();
- return NO_MEMORY;
- }
-
- mEnv->CallVoidMethod(mClient, mScanFileMethodID, pathStr, lastModified,
- fileSize, isDirectory, noMedia);
-
- mEnv->DeleteLocalRef(pathStr);
- return checkAndClearExceptionFromCallback(mEnv, "scanFile");
- }
-
- virtual status_t handleStringTag(const char* name, const char* value)
- {
- ALOGV("handleStringTag: name(%s) and value(%s)", name, value);
- jstring nameStr, valueStr;
- if ((nameStr = mEnv->NewStringUTF(name)) == NULL) {
- mEnv->ExceptionClear();
- return NO_MEMORY;
- }
- char *cleaned = NULL;
- if (!isValidUtf8(value)) {
- cleaned = strdup(value);
- char *chp = cleaned;
- char ch;
- while ((ch = *chp)) {
- if (ch & 0x80) {
- *chp = '?';
- }
- chp++;
- }
- value = cleaned;
- }
- valueStr = mEnv->NewStringUTF(value);
- free(cleaned);
- if (valueStr == NULL) {
- mEnv->DeleteLocalRef(nameStr);
- mEnv->ExceptionClear();
- return NO_MEMORY;
- }
-
- mEnv->CallVoidMethod(
- mClient, mHandleStringTagMethodID, nameStr, valueStr);
-
- mEnv->DeleteLocalRef(nameStr);
- mEnv->DeleteLocalRef(valueStr);
- return checkAndClearExceptionFromCallback(mEnv, "handleStringTag");
- }
-
- virtual status_t setMimeType(const char* mimeType)
- {
- ALOGV("setMimeType: %s", mimeType);
- jstring mimeTypeStr;
- if ((mimeTypeStr = mEnv->NewStringUTF(mimeType)) == NULL) {
- mEnv->ExceptionClear();
- return NO_MEMORY;
- }
-
- mEnv->CallVoidMethod(mClient, mSetMimeTypeMethodID, mimeTypeStr);
-
- mEnv->DeleteLocalRef(mimeTypeStr);
- return checkAndClearExceptionFromCallback(mEnv, "setMimeType");
- }
-
-private:
- JNIEnv *mEnv;
- jobject mClient;
- jmethodID mScanFileMethodID;
- jmethodID mHandleStringTagMethodID;
- jmethodID mSetMimeTypeMethodID;
-};
-
-
-static MediaScanner *getNativeScanner_l(JNIEnv* env, jobject thiz)
-{
- return (MediaScanner *) env->GetLongField(thiz, fields.context);
-}
-
-static void setNativeScanner_l(JNIEnv* env, jobject thiz, MediaScanner *s)
-{
- env->SetLongField(thiz, fields.context, (jlong)s);
-}
-
-static void
-android_media_MediaScanner_processDirectory(
- JNIEnv *env, jobject thiz, jstring path, jobject client)
-{
- ALOGV("processDirectory");
- MediaScanner *mp = getNativeScanner_l(env, thiz);
- if (mp == NULL) {
- jniThrowException(env, kRunTimeException, "No scanner available");
- return;
- }
-
- if (path == NULL) {
- jniThrowException(env, kIllegalArgumentException, NULL);
- return;
- }
-
- const char *pathStr = env->GetStringUTFChars(path, NULL);
- if (pathStr == NULL) { // Out of memory
- return;
- }
-
- MyMediaScannerClient myClient(env, client);
- MediaScanResult result = mp->processDirectory(pathStr, myClient);
- if (result == MEDIA_SCAN_RESULT_ERROR) {
- ALOGE("An error occurred while scanning directory '%s'.", pathStr);
- }
- env->ReleaseStringUTFChars(path, pathStr);
-}
-
-static jboolean
-android_media_MediaScanner_processFile(
- JNIEnv *env, jobject thiz, jstring path,
- jstring mimeType, jobject client)
-{
- ALOGV("processFile");
-
- // Lock already hold by processDirectory
- MediaScanner *mp = getNativeScanner_l(env, thiz);
- if (mp == NULL) {
- jniThrowException(env, kRunTimeException, "No scanner available");
- return false;
- }
-
- if (path == NULL) {
- jniThrowException(env, kIllegalArgumentException, NULL);
- return false;
- }
-
- const char *pathStr = env->GetStringUTFChars(path, NULL);
- if (pathStr == NULL) { // Out of memory
- return false;
- }
-
- const char *mimeTypeStr =
- (mimeType ? env->GetStringUTFChars(mimeType, NULL) : NULL);
- if (mimeType && mimeTypeStr == NULL) { // Out of memory
- // ReleaseStringUTFChars can be called with an exception pending.
- env->ReleaseStringUTFChars(path, pathStr);
- return false;
- }
-
- MyMediaScannerClient myClient(env, client);
- MediaScanResult result = mp->processFile(pathStr, mimeTypeStr, myClient);
- if (result == MEDIA_SCAN_RESULT_ERROR) {
- ALOGE("An error occurred while scanning file '%s'.", pathStr);
- }
- env->ReleaseStringUTFChars(path, pathStr);
- if (mimeType) {
- env->ReleaseStringUTFChars(mimeType, mimeTypeStr);
- }
- return result != MEDIA_SCAN_RESULT_ERROR;
-}
-
-static void
-android_media_MediaScanner_setLocale(
- JNIEnv *env, jobject thiz, jstring locale)
-{
- ALOGV("setLocale");
- MediaScanner *mp = getNativeScanner_l(env, thiz);
- if (mp == NULL) {
- jniThrowException(env, kRunTimeException, "No scanner available");
- return;
- }
-
- if (locale == NULL) {
- jniThrowException(env, kIllegalArgumentException, NULL);
- return;
- }
- const char *localeStr = env->GetStringUTFChars(locale, NULL);
- if (localeStr == NULL) { // Out of memory
- return;
- }
- mp->setLocale(localeStr);
-
- env->ReleaseStringUTFChars(locale, localeStr);
-}
-
-static jbyteArray
-android_media_MediaScanner_extractAlbumArt(
- JNIEnv *env, jobject thiz, jobject fileDescriptor)
-{
- ALOGV("extractAlbumArt");
- MediaScanner *mp = getNativeScanner_l(env, thiz);
- if (mp == NULL) {
- jniThrowException(env, kRunTimeException, "No scanner available");
- return NULL;
- }
-
- if (fileDescriptor == NULL) {
- jniThrowException(env, kIllegalArgumentException, NULL);
- return NULL;
- }
-
- int fd = jniGetFDFromFileDescriptor(env, fileDescriptor);
- MediaAlbumArt* mediaAlbumArt = mp->extractAlbumArt(fd);
- if (mediaAlbumArt == NULL) {
- return NULL;
- }
-
- jbyteArray array = env->NewByteArray(mediaAlbumArt->size());
- if (array != NULL) {
- const jbyte* data =
- reinterpret_cast<const jbyte*>(mediaAlbumArt->data());
- env->SetByteArrayRegion(array, 0, mediaAlbumArt->size(), data);
- }
-
- free(mediaAlbumArt);
- // if NewByteArray() returned NULL, an out-of-memory
- // exception will have been raised. I just want to
- // return null in that case.
- env->ExceptionClear();
- return array;
-}
-
-// This function gets a field ID, which in turn causes class initialization.
-// It is called from a static block in MediaScanner, which won't run until the
-// first time an instance of this class is used.
-static void
-android_media_MediaScanner_native_init(JNIEnv *env)
-{
- ALOGV("native_init");
- jclass clazz = env->FindClass(kClassMediaScanner);
- if (clazz == NULL) {
- return;
- }
-
- fields.context = env->GetFieldID(clazz, "mNativeContext", "J");
- if (fields.context == NULL) {
- return;
- }
-}
-
-static void
-android_media_MediaScanner_native_setup(JNIEnv *env, jobject thiz)
-{
- ALOGV("native_setup");
- MediaScanner *mp = new StagefrightMediaScanner;
-
- if (mp == NULL) {
- jniThrowException(env, kRunTimeException, "Out of memory");
- return;
- }
-
- env->SetLongField(thiz, fields.context, (jlong)mp);
-}
-
-static void
-android_media_MediaScanner_native_finalize(JNIEnv *env, jobject thiz)
-{
- ALOGV("native_finalize");
- MediaScanner *mp = getNativeScanner_l(env, thiz);
- if (mp == 0) {
- return;
- }
- delete mp;
- setNativeScanner_l(env, thiz, 0);
-}
-
-static const JNINativeMethod gMethods[] = {
- {
- "processDirectory",
- "(Ljava/lang/String;Landroid/media/MediaScannerClient;)V",
- (void *)android_media_MediaScanner_processDirectory
- },
-
- {
- "processFile",
- "(Ljava/lang/String;Ljava/lang/String;Landroid/media/MediaScannerClient;)Z",
- (void *)android_media_MediaScanner_processFile
- },
-
- {
- "setLocale",
- "(Ljava/lang/String;)V",
- (void *)android_media_MediaScanner_setLocale
- },
-
- {
- "extractAlbumArt",
- "(Ljava/io/FileDescriptor;)[B",
- (void *)android_media_MediaScanner_extractAlbumArt
- },
-
- {
- "native_init",
- "()V",
- (void *)android_media_MediaScanner_native_init
- },
-
- {
- "native_setup",
- "()V",
- (void *)android_media_MediaScanner_native_setup
- },
-
- {
- "native_finalize",
- "()V",
- (void *)android_media_MediaScanner_native_finalize
- },
-};
-
-// This function only registers the native methods, and is called from
-// JNI_OnLoad in android_media_MediaPlayer.cpp
-int register_android_media_MediaScanner(JNIEnv *env)
-{
- return AndroidRuntime::registerNativeMethods(env,
- kClassMediaScanner, gMethods, NELEM(gMethods));
-}
diff --git a/media/jni/android_media_MediaSync.h b/media/jni/android_media_MediaSync.h
index 6f808858a0c2..09d69d115479 100644
--- a/media/jni/android_media_MediaSync.h
+++ b/media/jni/android_media_MediaSync.h
@@ -17,6 +17,7 @@
#ifndef _ANDROID_MEDIA_MEDIASYNC_H_
#define _ANDROID_MEDIA_MEDIASYNC_H_
+#include <media/AudioResamplerPublic.h>
#include <media/stagefright/foundation/ABase.h>
#include <media/stagefright/MediaSync.h>
#include <utils/Errors.h>
@@ -24,7 +25,6 @@
namespace android {
-struct AudioPlaybackRate;
class AudioTrack;
class IGraphicBufferProducer;
struct MediaClock;
diff --git a/media/jni/android_media_MediaTranscodeManager.cpp b/media/jni/android_media_MediaTranscodeManager.cpp
new file mode 100644
index 000000000000..0b4048c1170c
--- /dev/null
+++ b/media/jni/android_media_MediaTranscodeManager.cpp
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2019, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+//#define LOG_NDEBUG 0
+#define LOG_TAG "MediaTranscodeManager_JNI"
+
+#include "android_runtime/AndroidRuntime.h"
+#include "jni.h"
+
+#include <nativehelper/JNIHelp.h>
+#include <utils/Log.h>
+
+namespace {
+
+// NOTE: Keep these enums in sync with their equivalents in MediaTranscodeManager.java.
+enum {
+ ID_INVALID = -1
+};
+
+enum {
+ EVENT_JOB_STARTED = 1,
+ EVENT_JOB_PROGRESSED = 2,
+ EVENT_JOB_FINISHED = 3,
+};
+
+enum {
+ RESULT_NONE = 1,
+ RESULT_SUCCESS = 2,
+ RESULT_ERROR = 3,
+ RESULT_CANCELED = 4,
+};
+
+struct {
+ jmethodID postEventFromNative;
+} gMediaTranscodeManagerClassInfo;
+
+using namespace android;
+
+void android_media_MediaTranscodeManager_native_init(JNIEnv *env, jclass clazz) {
+ ALOGV("android_media_MediaTranscodeManager_native_init");
+
+ gMediaTranscodeManagerClassInfo.postEventFromNative = env->GetMethodID(
+ clazz, "postEventFromNative", "(IJI)V");
+ LOG_ALWAYS_FATAL_IF(gMediaTranscodeManagerClassInfo.postEventFromNative == NULL,
+ "can't find android/media/MediaTranscodeManager.postEventFromNative");
+}
+
+jlong android_media_MediaTranscodeManager_requestUniqueJobID(
+ JNIEnv *env __unused, jobject thiz __unused) {
+ ALOGV("android_media_MediaTranscodeManager_reserveUniqueJobID");
+ static std::atomic_int32_t sJobIDCounter{0};
+ jlong id = (jlong)++sJobIDCounter;
+ return id;
+}
+
+jboolean android_media_MediaTranscodeManager_enqueueTranscodingRequest(
+ JNIEnv *env, jobject thiz, jlong id, jobject request, jobject context __unused) {
+ ALOGV("android_media_MediaTranscodeManager_enqueueTranscodingRequest");
+ if (!request) {
+ return ID_INVALID;
+ }
+
+ env->CallVoidMethod(thiz, gMediaTranscodeManagerClassInfo.postEventFromNative,
+ EVENT_JOB_FINISHED, id, RESULT_ERROR);
+ return true;
+}
+
+void android_media_MediaTranscodeManager_cancelTranscodingRequest(
+ JNIEnv *env __unused, jobject thiz __unused, jlong jobID __unused) {
+ ALOGV("android_media_MediaTranscodeManager_cancelTranscodingRequest");
+}
+
+const JNINativeMethod gMethods[] = {
+ { "native_init", "()V",
+ (void *)android_media_MediaTranscodeManager_native_init },
+ { "native_requestUniqueJobID", "()J",
+ (void *)android_media_MediaTranscodeManager_requestUniqueJobID },
+ { "native_enqueueTranscodingRequest",
+ "(JLandroid/media/MediaTranscodeManager$TranscodingRequest;Landroid/content/Context;)Z",
+ (void *)android_media_MediaTranscodeManager_enqueueTranscodingRequest },
+ { "native_cancelTranscodingRequest", "(J)V",
+ (void *)android_media_MediaTranscodeManager_cancelTranscodingRequest },
+};
+
+} // namespace anonymous
+
+int register_android_media_MediaTranscodeManager(JNIEnv *env) {
+ return AndroidRuntime::registerNativeMethods(env,
+ "android/media/MediaTranscodeManager", gMethods, NELEM(gMethods));
+}
diff --git a/media/jni/android_media_Streams.cpp b/media/jni/android_media_Streams.cpp
index b7cbd97409a2..4fd515323181 100644
--- a/media/jni/android_media_Streams.cpp
+++ b/media/jni/android_media_Streams.cpp
@@ -28,67 +28,6 @@
namespace android {
-AssetStream::AssetStream(SkStream* stream)
- : mStream(stream), mPosition(0) {
-}
-
-AssetStream::~AssetStream() {
-}
-
-piex::Error AssetStream::GetData(
- const size_t offset, const size_t length, std::uint8_t* data) {
- // Seek first.
- if (mPosition != offset) {
- if (!mStream->seek(offset)) {
- return piex::Error::kFail;
- }
- }
-
- // Read bytes.
- size_t size = mStream->read((void*)data, length);
- mPosition = offset + size;
-
- return size == length ? piex::Error::kOk : piex::Error::kFail;
-}
-
-BufferedStream::BufferedStream(SkStream* stream)
- : mStream(stream) {
-}
-
-BufferedStream::~BufferedStream() {
-}
-
-piex::Error BufferedStream::GetData(
- const size_t offset, const size_t length, std::uint8_t* data) {
- // Seek first.
- if (offset + length > mStreamBuffer.bytesWritten()) {
- size_t sizeToRead = offset + length - mStreamBuffer.bytesWritten();
- if (sizeToRead <= kMinSizeToRead) {
- sizeToRead = kMinSizeToRead;
- }
-
- void* tempBuffer = malloc(sizeToRead);
- if (tempBuffer == NULL) {
- return piex::Error::kFail;
- }
-
- size_t bytesRead = mStream->read(tempBuffer, sizeToRead);
- if (bytesRead != sizeToRead) {
- free(tempBuffer);
- return piex::Error::kFail;
- }
- mStreamBuffer.write(tempBuffer, bytesRead);
- free(tempBuffer);
- }
-
- // Read bytes.
- if (mStreamBuffer.read((void*)data, offset, length)) {
- return piex::Error::kOk;
- } else {
- return piex::Error::kFail;
- }
-}
-
FileStream::FileStream(const int fd)
: mPosition(0) {
mFile = fdopen(fd, "r");
diff --git a/media/jni/android_media_Streams.h b/media/jni/android_media_Streams.h
index d174f9a6650c..800591c1654f 100644
--- a/media/jni/android_media_Streams.h
+++ b/media/jni/android_media_Streams.h
@@ -25,53 +25,9 @@
#include <utils/KeyedVector.h>
#include <utils/String8.h>
#include <utils/StrongPointer.h>
-#include <SkStream.h>
-
namespace android {
-class AssetStream : public piex::StreamInterface {
-private:
- SkStream *mStream;
- size_t mPosition;
-
-public:
- explicit AssetStream(SkStream* stream);
- ~AssetStream();
-
- // Reads 'length' amount of bytes from 'offset' to 'data'. The 'data' buffer
- // provided by the caller, guaranteed to be at least "length" bytes long.
- // On 'kOk' the 'data' pointer contains 'length' valid bytes beginning at
- // 'offset' bytes from the start of the stream.
- // Returns 'kFail' if 'offset' + 'length' exceeds the stream and does not
- // change the contents of 'data'.
- piex::Error GetData(
- const size_t offset, const size_t length, std::uint8_t* data) override;
-};
-
-class BufferedStream : public piex::StreamInterface {
-private:
- SkStream *mStream;
- // Growable memory stream
- SkDynamicMemoryWStream mStreamBuffer;
-
- // Minimum size to read on filling the buffer.
- const size_t kMinSizeToRead = 8192;
-
-public:
- explicit BufferedStream(SkStream* stream);
- ~BufferedStream();
-
- // Reads 'length' amount of bytes from 'offset' to 'data'. The 'data' buffer
- // provided by the caller, guaranteed to be at least "length" bytes long.
- // On 'kOk' the 'data' pointer contains 'length' valid bytes beginning at
- // 'offset' bytes from the start of the stream.
- // Returns 'kFail' if 'offset' + 'length' exceeds the stream and does not
- // change the contents of 'data'.
- piex::Error GetData(
- const size_t offset, const size_t length, std::uint8_t* data) override;
-};
-
class FileStream : public piex::StreamInterface {
private:
FILE *mFile;
diff --git a/media/jni/android_media_tv_Tuner.cpp b/media/jni/android_media_tv_Tuner.cpp
new file mode 100644
index 000000000000..515d610109ab
--- /dev/null
+++ b/media/jni/android_media_tv_Tuner.cpp
@@ -0,0 +1,3857 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TvTuner-JNI"
+#include <utils/Log.h>
+
+#include "android_media_MediaCodecLinearBlock.h"
+#include "android_media_tv_Tuner.h"
+#include "android_runtime/AndroidRuntime.h"
+
+#include <android-base/logging.h>
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <nativehelper/JNIHelp.h>
+#include <nativehelper/ScopedLocalRef.h>
+#include <utils/NativeHandle.h>
+
+#pragma GCC diagnostic ignored "-Wunused-function"
+
+using ::android::hardware::Void;
+using ::android::hardware::hidl_bitfield;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::tv::tuner::V1_0::AudioExtraMetaData;
+using ::android::hardware::tv::tuner::V1_0::Constant;
+using ::android::hardware::tv::tuner::V1_0::DataFormat;
+using ::android::hardware::tv::tuner::V1_0::DemuxAlpFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxAlpFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxAlpLengthType;
+using ::android::hardware::tv::tuner::V1_0::DemuxCapabilities;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterAvSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterDownloadEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterDownloadSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterIpPayloadEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterMainType;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterMediaEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterMmtpRecordEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterPesDataSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterPesEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterRecordSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterSectionBits;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterSectionEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterSectionSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterTemiEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterTsRecordEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxIpAddress;
+using ::android::hardware::tv::tuner::V1_0::DemuxIpFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxIpFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxMmtpFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxMmtpFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxMmtpPid;
+using ::android::hardware::tv::tuner::V1_0::DemuxQueueNotifyBits;
+using ::android::hardware::tv::tuner::V1_0::DemuxRecordScIndexType;
+using ::android::hardware::tv::tuner::V1_0::DemuxScHevcIndex;
+using ::android::hardware::tv::tuner::V1_0::DemuxScIndex;
+using ::android::hardware::tv::tuner::V1_0::DemuxTlvFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxTlvFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxTpid;
+using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterSettings;
+using ::android::hardware::tv::tuner::V1_0::DemuxTsFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxTsIndex;
+using ::android::hardware::tv::tuner::V1_0::DvrSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendAnalogSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendAnalogSifStandard;
+using ::android::hardware::tv::tuner::V1_0::FrontendAnalogType;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Bandwidth;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3CodeRate;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3DemodOutputFormat;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Fec;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Modulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3PlpSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3Settings;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtsc3TimeInterleaveMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtscSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendAtscModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbcAnnex;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbcModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbcOuterFec;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbcSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbcSpectralInversion;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsCodeRate;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsPilot;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsRolloff;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsStandard;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbsVcmMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtBandwidth;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtCoderate;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtConstellation;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtGuardInterval;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtHierarchy;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtPlpMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtStandard;
+using ::android::hardware::tv::tuner::V1_0::FrontendDvbtTransmissionMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendInnerFec;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Coderate;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Modulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Rolloff;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbs3Settings;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsCoderate;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsRolloff;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbsStreamIdType;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtBandwidth;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtCoderate;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtGuardInterval;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtMode;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtModulation;
+using ::android::hardware::tv::tuner::V1_0::FrontendIsdbtSettings;
+using ::android::hardware::tv::tuner::V1_0::FrontendModulationStatus;
+using ::android::hardware::tv::tuner::V1_0::FrontendScanAtsc3PlpInfo;
+using ::android::hardware::tv::tuner::V1_0::FrontendStatus;
+using ::android::hardware::tv::tuner::V1_0::FrontendStatusAtsc3PlpInfo;
+using ::android::hardware::tv::tuner::V1_0::FrontendStatusType;
+using ::android::hardware::tv::tuner::V1_0::FrontendType;
+using ::android::hardware::tv::tuner::V1_0::ITuner;
+using ::android::hardware::tv::tuner::V1_0::LnbPosition;
+using ::android::hardware::tv::tuner::V1_0::LnbTone;
+using ::android::hardware::tv::tuner::V1_0::LnbVoltage;
+using ::android::hardware::tv::tuner::V1_0::PlaybackSettings;
+using ::android::hardware::tv::tuner::V1_0::RecordSettings;
+
+struct fields_t {
+ jfieldID tunerContext;
+ jfieldID lnbContext;
+ jfieldID filterContext;
+ jfieldID timeFilterContext;
+ jfieldID descramblerContext;
+ jfieldID dvrRecorderContext;
+ jfieldID dvrPlaybackContext;
+ jfieldID mediaEventContext;
+ jmethodID frontendInitID;
+ jmethodID filterInitID;
+ jmethodID timeFilterInitID;
+ jmethodID dvrRecorderInitID;
+ jmethodID dvrPlaybackInitID;
+ jmethodID onFrontendEventID;
+ jmethodID onFilterStatusID;
+ jmethodID onFilterEventID;
+ jmethodID lnbInitID;
+ jmethodID onLnbEventID;
+ jmethodID onLnbDiseqcMessageID;
+ jmethodID onDvrRecordStatusID;
+ jmethodID onDvrPlaybackStatusID;
+ jmethodID descramblerInitID;
+ jmethodID linearBlockInitID;
+ jmethodID linearBlockSetInternalStateID;
+};
+
+static fields_t gFields;
+
+
+static int IP_V4_LENGTH = 4;
+static int IP_V6_LENGTH = 16;
+
+void DestroyCallback(const C2Buffer * /* buf */, void *arg) {
+ android::sp<android::MediaEvent> event = (android::MediaEvent *)arg;
+ event->mAvHandleRefCnt--;
+ event->finalize();
+}
+
+namespace android {
+/////////////// LnbCallback ///////////////////////
+LnbCallback::LnbCallback(jobject lnbObj, LnbId id) : mId(id) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ mLnb = env->NewWeakGlobalRef(lnbObj);
+}
+
+Return<void> LnbCallback::onEvent(LnbEventType lnbEventType) {
+ ALOGD("LnbCallback::onEvent, type=%d", lnbEventType);
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ env->CallVoidMethod(
+ mLnb,
+ gFields.onLnbEventID,
+ (jint)lnbEventType);
+ return Void();
+}
+Return<void> LnbCallback::onDiseqcMessage(const hidl_vec<uint8_t>& diseqcMessage) {
+ ALOGD("LnbCallback::onDiseqcMessage");
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jbyteArray array = env->NewByteArray(diseqcMessage.size());
+ env->SetByteArrayRegion(
+ array, 0, diseqcMessage.size(), reinterpret_cast<jbyte*>(diseqcMessage[0]));
+
+ env->CallVoidMethod(
+ mLnb,
+ gFields.onLnbDiseqcMessageID,
+ array);
+ return Void();
+}
+
+/////////////// Lnb ///////////////////////
+
+Lnb::Lnb(sp<ILnb> sp, jobject obj) : mLnbSp(sp) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ mLnbObj = env->NewWeakGlobalRef(obj);
+}
+
+Lnb::~Lnb() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ env->DeleteWeakGlobalRef(mLnbObj);
+ mLnbObj = NULL;
+}
+
+sp<ILnb> Lnb::getILnb() {
+ return mLnbSp;
+}
+
+/////////////// DvrCallback ///////////////////////
+Return<void> DvrCallback::onRecordStatus(RecordStatus status) {
+ ALOGD("DvrCallback::onRecordStatus");
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ env->CallVoidMethod(
+ mDvr,
+ gFields.onDvrRecordStatusID,
+ (jint) status);
+ return Void();
+}
+
+Return<void> DvrCallback::onPlaybackStatus(PlaybackStatus status) {
+ ALOGD("DvrCallback::onPlaybackStatus");
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ env->CallVoidMethod(
+ mDvr,
+ gFields.onDvrPlaybackStatusID,
+ (jint) status);
+ return Void();
+}
+
+void DvrCallback::setDvr(const jobject dvr) {
+ ALOGD("DvrCallback::setDvr");
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ mDvr = env->NewWeakGlobalRef(dvr);
+}
+
+DvrCallback::~DvrCallback() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ if (mDvr != NULL) {
+ env->DeleteWeakGlobalRef(mDvr);
+ mDvr = NULL;
+ }
+}
+
+/////////////// Dvr ///////////////////////
+
+Dvr::Dvr(sp<IDvr> sp, jobject obj) : mDvrSp(sp), mDvrMQEventFlag(nullptr) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ mDvrObj = env->NewWeakGlobalRef(obj);
+}
+
+Dvr::~Dvr() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ env->DeleteWeakGlobalRef(mDvrObj);
+ mDvrObj = NULL;
+}
+
+jint Dvr::close() {
+ Result r = mDvrSp->close();
+ if (r == Result::SUCCESS) {
+ EventFlag::deleteEventFlag(&mDvrMQEventFlag);
+ }
+ return (jint) r;
+}
+
+sp<IDvr> Dvr::getIDvr() {
+ return mDvrSp;
+}
+
+MQ& Dvr::getDvrMQ() {
+ return *mDvrMQ;
+}
+
+/////////////// C2DataIdInfo ///////////////////////
+
+C2DataIdInfo::C2DataIdInfo(uint32_t index, uint64_t value) : C2Param(kParamSize, index) {
+ CHECK(isGlobal());
+ CHECK_EQ(C2Param::INFO, kind());
+ DummyInfo info{value};
+ memcpy(this + 1, static_cast<C2Param *>(&info) + 1, kParamSize - sizeof(C2Param));
+}
+
+/////////////// MediaEvent ///////////////////////
+
+MediaEvent::MediaEvent(sp<IFilter> iFilter, hidl_handle avHandle,
+ uint64_t dataId, uint64_t dataLength, jobject obj) : mIFilter(iFilter),
+ mDataId(dataId), mDataLength(dataLength), mBuffer(nullptr),
+ mDataIdRefCnt(0), mAvHandleRefCnt(0), mIonHandle(nullptr) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ mMediaEventObj = env->NewWeakGlobalRef(obj);
+ mAvHandle = native_handle_clone(avHandle.getNativeHandle());
+}
+
+MediaEvent::~MediaEvent() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ env->DeleteWeakGlobalRef(mMediaEventObj);
+ mMediaEventObj = NULL;
+ native_handle_delete(mAvHandle);
+ if (mIonHandle != NULL) {
+ delete mIonHandle;
+ }
+ std::shared_ptr<C2Buffer> pC2Buffer = mC2Buffer.lock();
+ if (pC2Buffer != NULL) {
+ pC2Buffer->unregisterOnDestroyNotify(&DestroyCallback, this);
+ }
+}
+
+void MediaEvent::finalize() {
+ if (mAvHandleRefCnt == 0) {
+ mIFilter->releaseAvHandle(hidl_handle(mAvHandle), mDataIdRefCnt == 0 ? mDataId : 0);
+ native_handle_close(mAvHandle);
+ }
+}
+
+jobject MediaEvent::getLinearBlock() {
+ ALOGD("MediaEvent::getLinearBlock");
+ if (mAvHandle == NULL) {
+ return NULL;
+ }
+ if (mLinearBlockObj != NULL) {
+ return mLinearBlockObj;
+ }
+ mIonHandle = new C2HandleIon(dup(mAvHandle->data[0]), mDataLength);
+ std::shared_ptr<C2LinearBlock> block = _C2BlockFactory::CreateLinearBlock(mIonHandle);
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ std::unique_ptr<JMediaCodecLinearBlock> context{new JMediaCodecLinearBlock};
+ context->mBlock = block;
+ std::shared_ptr<C2Buffer> pC2Buffer = context->toC2Buffer(0, mDataLength);
+ context->mBuffer = pC2Buffer;
+ mC2Buffer = pC2Buffer;
+ if (mAvHandle->numInts > 0) {
+ // use first int in the native_handle as the index
+ int index = mAvHandle->data[mAvHandle->numFds];
+ std::shared_ptr<C2Param> c2param = std::make_shared<C2DataIdInfo>(index, mDataId);
+ std::shared_ptr<C2Info> info(std::static_pointer_cast<C2Info>(c2param));
+ pC2Buffer->setInfo(info);
+ }
+ pC2Buffer->registerOnDestroyNotify(&DestroyCallback, this);
+ jobject linearBlock =
+ env->NewObject(
+ env->FindClass("android/media/MediaCodec$LinearBlock"),
+ gFields.linearBlockInitID);
+ env->CallVoidMethod(
+ linearBlock,
+ gFields.linearBlockSetInternalStateID,
+ (jlong)context.release(),
+ true);
+ mLinearBlockObj = env->NewWeakGlobalRef(linearBlock);
+ mAvHandleRefCnt++;
+ return mLinearBlockObj;
+}
+
+uint64_t MediaEvent::getAudioHandle() {
+ mDataIdRefCnt++;
+ return mDataId;
+}
+
+/////////////// FilterCallback ///////////////////////
+
+jobjectArray FilterCallback::getSectionEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass eventClazz = env->FindClass("android/media/tv/tuner/filter/SectionEvent");
+ jmethodID eventInit = env->GetMethodID(eventClazz, "<init>", "(IIII)V");
+
+ for (int i = 0; i < events.size(); i++) {
+ auto event = events[i];
+ DemuxFilterSectionEvent sectionEvent = event.section();
+
+ jint tableId = static_cast<jint>(sectionEvent.tableId);
+ jint version = static_cast<jint>(sectionEvent.version);
+ jint sectionNum = static_cast<jint>(sectionEvent.sectionNum);
+ jint dataLength = static_cast<jint>(sectionEvent.dataLength);
+
+ jobject obj =
+ env->NewObject(eventClazz, eventInit, tableId, version, sectionNum, dataLength);
+ env->SetObjectArrayElement(arr, i, obj);
+ }
+ return arr;
+}
+
+jobjectArray FilterCallback::getMediaEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass eventClazz = env->FindClass("android/media/tv/tuner/filter/MediaEvent");
+ jmethodID eventInit = env->GetMethodID(eventClazz,
+ "<init>",
+ "(IZJJJLandroid/media/MediaCodec$LinearBlock;"
+ "ZJIZLandroid/media/tv/tuner/filter/AudioDescriptor;)V");
+ jfieldID eventContext = env->GetFieldID(eventClazz, "mNativeContext", "J");
+
+ for (int i = 0; i < events.size(); i++) {
+ auto event = events[i];
+ DemuxFilterMediaEvent mediaEvent = event.media();
+
+ jobject audioDescriptor = NULL;
+ if (mediaEvent.extraMetaData.getDiscriminator()
+ == DemuxFilterMediaEvent::ExtraMetaData::hidl_discriminator::audio) {
+ jclass adClazz = env->FindClass("android/media/tv/tuner/filter/AudioDescriptor");
+ jmethodID adInit = env->GetMethodID(adClazz, "<init>", "(BBCBBB)V");
+
+ AudioExtraMetaData ad = mediaEvent.extraMetaData.audio();
+ jbyte adFade = static_cast<jbyte>(ad.adFade);
+ jbyte adPan = static_cast<jbyte>(ad.adPan);
+ jchar versionTextTag = static_cast<jchar>(ad.versionTextTag);
+ jbyte adGainCenter = static_cast<jbyte>(ad.adGainCenter);
+ jbyte adGainFront = static_cast<jbyte>(ad.adGainFront);
+ jbyte adGainSurround = static_cast<jbyte>(ad.adGainSurround);
+
+ audioDescriptor =
+ env->NewObject(adClazz, adInit, adFade, adPan, versionTextTag, adGainCenter,
+ adGainFront, adGainSurround);
+ }
+
+ jlong dataLength = static_cast<jlong>(mediaEvent.dataLength);
+
+ jint streamId = static_cast<jint>(mediaEvent.streamId);
+ jboolean isPtsPresent = static_cast<jboolean>(mediaEvent.isPtsPresent);
+ jlong pts = static_cast<jlong>(mediaEvent.pts);
+ jlong offset = static_cast<jlong>(mediaEvent.offset);
+ jboolean isSecureMemory = static_cast<jboolean>(mediaEvent.isSecureMemory);
+ jlong avDataId = static_cast<jlong>(mediaEvent.avDataId);
+ jint mpuSequenceNumber = static_cast<jint>(mediaEvent.mpuSequenceNumber);
+ jboolean isPesPrivateData = static_cast<jboolean>(mediaEvent.isPesPrivateData);
+
+ jobject obj =
+ env->NewObject(eventClazz, eventInit, streamId, isPtsPresent, pts, dataLength,
+ offset, NULL, isSecureMemory, avDataId, mpuSequenceNumber, isPesPrivateData,
+ audioDescriptor);
+
+ if (mediaEvent.avMemory.getNativeHandle() != NULL || mediaEvent.avDataId != 0) {
+ sp<MediaEvent> mediaEventSp =
+ new MediaEvent(mIFilter, mediaEvent.avMemory,
+ mediaEvent.avDataId, dataLength, obj);
+ mediaEventSp->mAvHandleRefCnt++;
+ env->SetLongField(obj, eventContext, (jlong) mediaEventSp.get());
+ mediaEventSp->incStrong(obj);
+ }
+
+ env->SetObjectArrayElement(arr, i, obj);
+ }
+ return arr;
+}
+
+jobjectArray FilterCallback::getPesEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass eventClazz = env->FindClass("android/media/tv/tuner/filter/PesEvent");
+ jmethodID eventInit = env->GetMethodID(eventClazz, "<init>", "(III)V");
+
+ for (int i = 0; i < events.size(); i++) {
+ auto event = events[i];
+ DemuxFilterPesEvent pesEvent = event.pes();
+
+ jint streamId = static_cast<jint>(pesEvent.streamId);
+ jint dataLength = static_cast<jint>(pesEvent.dataLength);
+ jint mpuSequenceNumber = static_cast<jint>(pesEvent.mpuSequenceNumber);
+
+ jobject obj =
+ env->NewObject(eventClazz, eventInit, streamId, dataLength, mpuSequenceNumber);
+ env->SetObjectArrayElement(arr, i, obj);
+ }
+ return arr;
+}
+
+jobjectArray FilterCallback::getTsRecordEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass eventClazz = env->FindClass("android/media/tv/tuner/filter/TsRecordEvent");
+ jmethodID eventInit = env->GetMethodID(eventClazz, "<init>", "(IIIJ)V");
+
+ for (int i = 0; i < events.size(); i++) {
+ auto event = events[i];
+ DemuxFilterTsRecordEvent tsRecordEvent = event.tsRecord();
+ DemuxPid pid = tsRecordEvent.pid;
+
+ jint jpid = static_cast<jint>(Constant::INVALID_TS_PID);
+
+ if (pid.getDiscriminator() == DemuxPid::hidl_discriminator::tPid) {
+ jpid = static_cast<jint>(pid.tPid());
+ } else if (pid.getDiscriminator() == DemuxPid::hidl_discriminator::mmtpPid) {
+ jpid = static_cast<jint>(pid.mmtpPid());
+ }
+
+ jint sc = 0;
+
+ if (tsRecordEvent.scIndexMask.getDiscriminator()
+ == DemuxFilterTsRecordEvent::ScIndexMask::hidl_discriminator::sc) {
+ sc = static_cast<jint>(tsRecordEvent.scIndexMask.sc());
+ } else if (tsRecordEvent.scIndexMask.getDiscriminator()
+ == DemuxFilterTsRecordEvent::ScIndexMask::hidl_discriminator::scHevc) {
+ sc = static_cast<jint>(tsRecordEvent.scIndexMask.scHevc());
+ }
+
+ jint ts = static_cast<jint>(tsRecordEvent.tsIndexMask);
+
+ jlong byteNumber = static_cast<jlong>(tsRecordEvent.byteNumber);
+
+ jobject obj =
+ env->NewObject(eventClazz, eventInit, jpid, ts, sc, byteNumber);
+ env->SetObjectArrayElement(arr, i, obj);
+ }
+ return arr;
+}
+
+jobjectArray FilterCallback::getMmtpRecordEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass eventClazz = env->FindClass("android/media/tv/tuner/filter/MmtpRecordEvent");
+ jmethodID eventInit = env->GetMethodID(eventClazz, "<init>", "(IJ)V");
+
+ for (int i = 0; i < events.size(); i++) {
+ auto event = events[i];
+ DemuxFilterMmtpRecordEvent mmtpRecordEvent = event.mmtpRecord();
+
+ jint scHevcIndexMask = static_cast<jint>(mmtpRecordEvent.scHevcIndexMask);
+ jlong byteNumber = static_cast<jlong>(mmtpRecordEvent.byteNumber);
+
+ jobject obj =
+ env->NewObject(eventClazz, eventInit, scHevcIndexMask, byteNumber);
+ env->SetObjectArrayElement(arr, i, obj);
+ }
+ return arr;
+}
+
+jobjectArray FilterCallback::getDownloadEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass eventClazz = env->FindClass("android/media/tv/tuner/filter/DownloadEvent");
+ jmethodID eventInit = env->GetMethodID(eventClazz, "<init>", "(IIIII)V");
+
+ for (int i = 0; i < events.size(); i++) {
+ auto event = events[i];
+ DemuxFilterDownloadEvent downloadEvent = event.download();
+
+ jint itemId = static_cast<jint>(downloadEvent.itemId);
+ jint mpuSequenceNumber = static_cast<jint>(downloadEvent.mpuSequenceNumber);
+ jint itemFragmentIndex = static_cast<jint>(downloadEvent.itemFragmentIndex);
+ jint lastItemFragmentIndex = static_cast<jint>(downloadEvent.lastItemFragmentIndex);
+ jint dataLength = static_cast<jint>(downloadEvent.dataLength);
+
+ jobject obj =
+ env->NewObject(eventClazz, eventInit, itemId, mpuSequenceNumber, itemFragmentIndex,
+ lastItemFragmentIndex, dataLength);
+ env->SetObjectArrayElement(arr, i, obj);
+ }
+ return arr;
+}
+
+jobjectArray FilterCallback::getIpPayloadEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass eventClazz = env->FindClass("android/media/tv/tuner/filter/IpPayloadEvent");
+ jmethodID eventInit = env->GetMethodID(eventClazz, "<init>", "(I)V");
+
+ for (int i = 0; i < events.size(); i++) {
+ auto event = events[i];
+ DemuxFilterIpPayloadEvent ipPayloadEvent = event.ipPayload();
+ jint dataLength = static_cast<jint>(ipPayloadEvent.dataLength);
+ jobject obj = env->NewObject(eventClazz, eventInit, dataLength);
+ env->SetObjectArrayElement(arr, i, obj);
+ }
+ return arr;
+}
+
+jobjectArray FilterCallback::getTemiEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass eventClazz = env->FindClass("android/media/tv/tuner/filter/TemiEvent");
+ jmethodID eventInit = env->GetMethodID(eventClazz, "<init>", "(JB[B)V");
+
+ for (int i = 0; i < events.size(); i++) {
+ auto event = events[i];
+ DemuxFilterTemiEvent temiEvent = event.temi();
+ jlong pts = static_cast<jlong>(temiEvent.pts);
+ jbyte descrTag = static_cast<jbyte>(temiEvent.descrTag);
+ std::vector<uint8_t> descrData = temiEvent.descrData;
+
+ jbyteArray array = env->NewByteArray(descrData.size());
+ env->SetByteArrayRegion(
+ array, 0, descrData.size(), reinterpret_cast<jbyte*>(&descrData[0]));
+
+ jobject obj = env->NewObject(eventClazz, eventInit, pts, descrTag, array);
+ env->SetObjectArrayElement(arr, i, obj);
+ }
+ return arr;
+}
+
+Return<void> FilterCallback::onFilterEvent(const DemuxFilterEvent& filterEvent) {
+ ALOGD("FilterCallback::onFilterEvent");
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+ std::vector<DemuxFilterEvent::Event> events = filterEvent.events;
+ jclass eventClazz = env->FindClass("android/media/tv/tuner/filter/FilterEvent");
+ jobjectArray array = env->NewObjectArray(events.size(), eventClazz, NULL);
+
+ if (!events.empty()) {
+ auto event = events[0];
+ switch (event.getDiscriminator()) {
+ case DemuxFilterEvent::Event::hidl_discriminator::media: {
+ array = getMediaEvent(array, events);
+ break;
+ }
+ case DemuxFilterEvent::Event::hidl_discriminator::section: {
+ array = getSectionEvent(array, events);
+ break;
+ }
+ case DemuxFilterEvent::Event::hidl_discriminator::pes: {
+ array = getPesEvent(array, events);
+ break;
+ }
+ case DemuxFilterEvent::Event::hidl_discriminator::tsRecord: {
+ array = getTsRecordEvent(array, events);
+ break;
+ }
+ case DemuxFilterEvent::Event::hidl_discriminator::mmtpRecord: {
+ array = getMmtpRecordEvent(array, events);
+ break;
+ }
+ case DemuxFilterEvent::Event::hidl_discriminator::download: {
+ array = getDownloadEvent(array, events);
+ break;
+ }
+ case DemuxFilterEvent::Event::hidl_discriminator::ipPayload: {
+ array = getIpPayloadEvent(array, events);
+ break;
+ }
+ case DemuxFilterEvent::Event::hidl_discriminator::temi: {
+ array = getTemiEvent(array, events);
+ break;
+ }
+ default: {
+ break;
+ }
+ }
+ }
+ env->CallVoidMethod(
+ mFilter,
+ gFields.onFilterEventID,
+ array);
+ return Void();
+}
+
+
+Return<void> FilterCallback::onFilterStatus(const DemuxFilterStatus status) {
+ ALOGD("FilterCallback::onFilterStatus");
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ env->CallVoidMethod(
+ mFilter,
+ gFields.onFilterStatusID,
+ (jint)status);
+ return Void();
+}
+
+void FilterCallback::setFilter(const sp<Filter> filter) {
+ ALOGD("FilterCallback::setFilter");
+ mFilter = filter->mFilterObj;
+ mIFilter = filter->mFilterSp;
+}
+
+FilterCallback::~FilterCallback() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ if (mFilter != NULL) {
+ env->DeleteWeakGlobalRef(mFilter);
+ mFilter = NULL;
+ }
+}
+
+/////////////// Filter ///////////////////////
+
+Filter::Filter(sp<IFilter> sp, jobject obj) : mFilterSp(sp) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ mFilterObj = env->NewWeakGlobalRef(obj);
+}
+
+Filter::~Filter() {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+ env->DeleteWeakGlobalRef(mFilterObj);
+ mFilterObj = NULL;
+ EventFlag::deleteEventFlag(&mFilterMQEventFlag);
+}
+
+int Filter::close() {
+ Result r = mFilterSp->close();
+ if (r == Result::SUCCESS) {
+ EventFlag::deleteEventFlag(&mFilterMQEventFlag);
+ }
+ return (int)r;
+}
+
+sp<IFilter> Filter::getIFilter() {
+ return mFilterSp;
+}
+
+/////////////// TimeFilter ///////////////////////
+
+TimeFilter::TimeFilter(sp<ITimeFilter> sp, jobject obj) : mTimeFilterSp(sp) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ mTimeFilterObj = env->NewWeakGlobalRef(obj);
+}
+
+TimeFilter::~TimeFilter() {
+ ALOGD("~TimeFilter");
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+ env->DeleteWeakGlobalRef(mTimeFilterObj);
+ mTimeFilterObj = NULL;
+}
+
+sp<ITimeFilter> TimeFilter::getITimeFilter() {
+ return mTimeFilterSp;
+}
+
+/////////////// FrontendCallback ///////////////////////
+
+FrontendCallback::FrontendCallback(jweak tunerObj, FrontendId id) : mObject(tunerObj), mId(id) {}
+
+Return<void> FrontendCallback::onEvent(FrontendEventType frontendEventType) {
+ ALOGD("FrontendCallback::onEvent, type=%d", frontendEventType);
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ env->CallVoidMethod(
+ mObject,
+ gFields.onFrontendEventID,
+ (jint)frontendEventType);
+ return Void();
+}
+
+Return<void> FrontendCallback::onScanMessage(FrontendScanMessageType type, const FrontendScanMessage& message) {
+ ALOGD("FrontendCallback::onScanMessage, type=%d", type);
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass clazz = env->FindClass("android/media/tv/tuner/Tuner");
+ switch(type) {
+ case FrontendScanMessageType::LOCKED: {
+ if (message.isLocked()) {
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onLocked", "()V"));
+ }
+ break;
+ }
+ case FrontendScanMessageType::END: {
+ if (message.isEnd()) {
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onScanStopped", "()V"));
+ }
+ break;
+ }
+ case FrontendScanMessageType::PROGRESS_PERCENT: {
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onProgress", "(I)V"),
+ (jint) message.progressPercent());
+ break;
+ }
+ case FrontendScanMessageType::FREQUENCY: {
+ std::vector<uint32_t> v = message.frequencies();
+ jintArray freqs = env->NewIntArray(v.size());
+ env->SetIntArrayRegion(freqs, 0, v.size(), reinterpret_cast<jint*>(&v[0]));
+
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onFrequenciesReport", "([I)V"),
+ freqs);
+ break;
+ }
+ case FrontendScanMessageType::SYMBOL_RATE: {
+ std::vector<uint32_t> v = message.symbolRates();
+ jintArray symbolRates = env->NewIntArray(v.size());
+ env->SetIntArrayRegion(symbolRates, 0, v.size(), reinterpret_cast<jint*>(&v[0]));
+
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onSymbolRates", "([I)V"),
+ symbolRates);
+ break;
+ }
+ case FrontendScanMessageType::HIERARCHY: {
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onHierarchy", "(I)V"),
+ (jint) message.hierarchy());
+ break;
+ }
+ case FrontendScanMessageType::ANALOG_TYPE: {
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onSignalType", "(I)V"),
+ (jint) message.analogType());
+ break;
+ }
+ case FrontendScanMessageType::PLP_IDS: {
+ std::vector<uint8_t> v = message.plpIds();
+ std::vector<jint> jintV(v.begin(), v.end());
+ jintArray plpIds = env->NewIntArray(v.size());
+ env->SetIntArrayRegion(plpIds, 0, jintV.size(), &jintV[0]);
+
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onPlpIds", "([I)V"),
+ plpIds);
+ break;
+ }
+ case FrontendScanMessageType::GROUP_IDS: {
+ std::vector<uint8_t> v = message.groupIds();
+ std::vector<jint> jintV(v.begin(), v.end());
+ jintArray groupIds = env->NewIntArray(v.size());
+ env->SetIntArrayRegion(groupIds, 0, jintV.size(), &jintV[0]);
+
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onGroupIds", "([I)V"),
+ groupIds);
+ break;
+ }
+ case FrontendScanMessageType::INPUT_STREAM_IDS: {
+ std::vector<uint16_t> v = message.inputStreamIds();
+ std::vector<jint> jintV(v.begin(), v.end());
+ jintArray streamIds = env->NewIntArray(v.size());
+ env->SetIntArrayRegion(streamIds, 0, jintV.size(), &jintV[0]);
+
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onInputStreamIds", "([I)V"),
+ streamIds);
+ break;
+ }
+ case FrontendScanMessageType::STANDARD: {
+ FrontendScanMessage::Standard std = message.std();
+ jint standard;
+ if (std.getDiscriminator() == FrontendScanMessage::Standard::hidl_discriminator::sStd) {
+ standard = (jint) std.sStd();
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onDvbsStandard", "(I)V"),
+ standard);
+ } else if (std.getDiscriminator() == FrontendScanMessage::Standard::hidl_discriminator::tStd) {
+ standard = (jint) std.tStd();
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onDvbtStandard", "(I)V"),
+ standard);
+ } else if (std.getDiscriminator() == FrontendScanMessage::Standard::hidl_discriminator::sifStd) {
+ standard = (jint) std.sifStd();
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onAnalogSifStandard", "(I)V"),
+ standard);
+ }
+ break;
+ }
+ case FrontendScanMessageType::ATSC3_PLP_INFO: {
+ jclass plpClazz = env->FindClass("android/media/tv/tuner/frontend/Atsc3PlpInfo");
+ jmethodID init = env->GetMethodID(plpClazz, "<init>", "(IZ)V");
+ std::vector<FrontendScanAtsc3PlpInfo> plpInfos = message.atsc3PlpInfos();
+ jobjectArray array = env->NewObjectArray(plpInfos.size(), plpClazz, NULL);
+
+ for (int i = 0; i < plpInfos.size(); i++) {
+ auto info = plpInfos[i];
+ jint plpId = (jint) info.plpId;
+ jboolean lls = (jboolean) info.bLlsFlag;
+
+ jobject obj = env->NewObject(plpClazz, init, plpId, lls);
+ env->SetObjectArrayElement(array, i, obj);
+ }
+ env->CallVoidMethod(
+ mObject,
+ env->GetMethodID(clazz, "onAtsc3PlpInfos", "([Landroid/media/tv/tuner/frontend/Atsc3PlpInfo;)V"),
+ array);
+ break;
+ }
+ }
+ return Void();
+}
+
+/////////////// Tuner ///////////////////////
+
+sp<ITuner> JTuner::mTuner;
+
+JTuner::JTuner(JNIEnv *env, jobject thiz)
+ : mClass(NULL) {
+ jclass clazz = env->GetObjectClass(thiz);
+ CHECK(clazz != NULL);
+
+ mClass = (jclass)env->NewGlobalRef(clazz);
+ mObject = env->NewWeakGlobalRef(thiz);
+ if (mTuner == NULL) {
+ mTuner = getTunerService();
+ }
+}
+
+JTuner::~JTuner() {
+ if (mFe != NULL) {
+ mFe->close();
+ }
+ if (mDemux != NULL) {
+ mDemux->close();
+ }
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+ env->DeleteWeakGlobalRef(mObject);
+ env->DeleteGlobalRef(mClass);
+ mTuner = NULL;
+ mClass = NULL;
+ mObject = NULL;
+}
+
+sp<ITuner> JTuner::getTunerService() {
+ if (mTuner == nullptr) {
+ mTuner = ITuner::getService();
+
+ if (mTuner == nullptr) {
+ ALOGW("Failed to get tuner service.");
+ }
+ }
+ return mTuner;
+}
+
+jobject JTuner::getFrontendIds() {
+ ALOGD("JTuner::getFrontendIds()");
+ mTuner->getFrontendIds([&](Result, const hidl_vec<FrontendId>& frontendIds) {
+ mFeIds = frontendIds;
+ });
+ if (mFeIds.size() == 0) {
+ ALOGW("Frontend isn't available");
+ return NULL;
+ }
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass arrayListClazz = env->FindClass("java/util/ArrayList");
+ jmethodID arrayListAdd = env->GetMethodID(arrayListClazz, "add", "(Ljava/lang/Object;)Z");
+ jobject obj = env->NewObject(arrayListClazz, env->GetMethodID(arrayListClazz, "<init>", "()V"));
+
+ jclass integerClazz = env->FindClass("java/lang/Integer");
+ jmethodID intInit = env->GetMethodID(integerClazz, "<init>", "(I)V");
+
+ for (int i=0; i < mFeIds.size(); i++) {
+ jobject idObj = env->NewObject(integerClazz, intInit, mFeIds[i]);
+ env->CallBooleanMethod(obj, arrayListAdd, idObj);
+ }
+ return obj;
+}
+
+jobject JTuner::openFrontendById(int id) {
+ sp<IFrontend> fe;
+ Result res;
+ mTuner->openFrontendById(id, [&](Result r, const sp<IFrontend>& frontend) {
+ fe = frontend;
+ res = r;
+ });
+ if (res != Result::SUCCESS || fe == nullptr) {
+ ALOGE("Failed to open frontend");
+ return NULL;
+ }
+ mFe = fe;
+ mFeId = id;
+ if (mDemux != NULL) {
+ mDemux->setFrontendDataSource(mFeId);
+ }
+ sp<FrontendCallback> feCb = new FrontendCallback(mObject, id);
+ fe->setCallback(feCb);
+
+ jint jId = (jint) id;
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ // TODO: add more fields to frontend
+ return env->NewObject(
+ env->FindClass("android/media/tv/tuner/Tuner$Frontend"),
+ gFields.frontendInitID,
+ mObject,
+ (jint) jId);
+}
+
+jint JTuner::closeFrontendById(int id) {
+ if (mFe != NULL && mFeId == id) {
+ Result r = mFe->close();
+ return (jint) r;
+ }
+ return (jint) Result::SUCCESS;
+}
+
+jobject JTuner::getAnalogFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/AnalogFrontendCapabilities");
+ jmethodID capsInit = env->GetMethodID(clazz, "<init>", "(II)V");
+
+ jint typeCap = caps.analogCaps().typeCap;
+ jint sifStandardCap = caps.analogCaps().sifStandardCap;
+ return env->NewObject(clazz, capsInit, typeCap, sifStandardCap);
+}
+
+jobject JTuner::getAtsc3FrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/Atsc3FrontendCapabilities");
+ jmethodID capsInit = env->GetMethodID(clazz, "<init>", "(IIIIII)V");
+
+ jint bandwidthCap = caps.atsc3Caps().bandwidthCap;
+ jint modulationCap = caps.atsc3Caps().modulationCap;
+ jint timeInterleaveModeCap = caps.atsc3Caps().timeInterleaveModeCap;
+ jint codeRateCap = caps.atsc3Caps().codeRateCap;
+ jint fecCap = caps.atsc3Caps().fecCap;
+ jint demodOutputFormatCap = caps.atsc3Caps().demodOutputFormatCap;
+
+ return env->NewObject(clazz, capsInit, bandwidthCap, modulationCap, timeInterleaveModeCap,
+ codeRateCap, fecCap, demodOutputFormatCap);
+}
+
+jobject JTuner::getAtscFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/AtscFrontendCapabilities");
+ jmethodID capsInit = env->GetMethodID(clazz, "<init>", "(I)V");
+
+ jint modulationCap = caps.atscCaps().modulationCap;
+
+ return env->NewObject(clazz, capsInit, modulationCap);
+}
+
+jobject JTuner::getDvbcFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/DvbcFrontendCapabilities");
+ jmethodID capsInit = env->GetMethodID(clazz, "<init>", "(III)V");
+
+ jint modulationCap = caps.dvbcCaps().modulationCap;
+ jint fecCap = caps.dvbcCaps().fecCap;
+ jint annexCap = caps.dvbcCaps().annexCap;
+
+ return env->NewObject(clazz, capsInit, modulationCap, fecCap, annexCap);
+}
+
+jobject JTuner::getDvbsFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/DvbsFrontendCapabilities");
+ jmethodID capsInit = env->GetMethodID(clazz, "<init>", "(IJI)V");
+
+ jint modulationCap = caps.dvbsCaps().modulationCap;
+ jlong innerfecCap = caps.dvbsCaps().innerfecCap;
+ jint standard = caps.dvbsCaps().standard;
+
+ return env->NewObject(clazz, capsInit, modulationCap, innerfecCap, standard);
+}
+
+jobject JTuner::getDvbtFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/DvbtFrontendCapabilities");
+ jmethodID capsInit = env->GetMethodID(clazz, "<init>", "(IIIIIIZZ)V");
+
+ jint transmissionModeCap = caps.dvbtCaps().transmissionModeCap;
+ jint bandwidthCap = caps.dvbtCaps().bandwidthCap;
+ jint constellationCap = caps.dvbtCaps().constellationCap;
+ jint coderateCap = caps.dvbtCaps().coderateCap;
+ jint hierarchyCap = caps.dvbtCaps().hierarchyCap;
+ jint guardIntervalCap = caps.dvbtCaps().guardIntervalCap;
+ jboolean isT2Supported = caps.dvbtCaps().isT2Supported;
+ jboolean isMisoSupported = caps.dvbtCaps().isMisoSupported;
+
+ return env->NewObject(clazz, capsInit, transmissionModeCap, bandwidthCap, constellationCap,
+ coderateCap, hierarchyCap, guardIntervalCap, isT2Supported, isMisoSupported);
+}
+
+jobject JTuner::getIsdbs3FrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/Isdbs3FrontendCapabilities");
+ jmethodID capsInit = env->GetMethodID(clazz, "<init>", "(II)V");
+
+ jint modulationCap = caps.isdbs3Caps().modulationCap;
+ jint coderateCap = caps.isdbs3Caps().coderateCap;
+
+ return env->NewObject(clazz, capsInit, modulationCap, coderateCap);
+}
+
+jobject JTuner::getIsdbsFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/IsdbsFrontendCapabilities");
+ jmethodID capsInit = env->GetMethodID(clazz, "<init>", "(II)V");
+
+ jint modulationCap = caps.isdbsCaps().modulationCap;
+ jint coderateCap = caps.isdbsCaps().coderateCap;
+
+ return env->NewObject(clazz, capsInit, modulationCap, coderateCap);
+}
+
+jobject JTuner::getIsdbtFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/IsdbtFrontendCapabilities");
+ jmethodID capsInit = env->GetMethodID(clazz, "<init>", "(IIIII)V");
+
+ jint modeCap = caps.isdbtCaps().modeCap;
+ jint bandwidthCap = caps.isdbtCaps().bandwidthCap;
+ jint modulationCap = caps.isdbtCaps().modulationCap;
+ jint coderateCap = caps.isdbtCaps().coderateCap;
+ jint guardIntervalCap = caps.isdbtCaps().guardIntervalCap;
+
+ return env->NewObject(clazz, capsInit, modeCap, bandwidthCap, modulationCap, coderateCap,
+ guardIntervalCap);
+}
+
+jobject JTuner::getFrontendInfo(int id) {
+ FrontendInfo feInfo;
+ Result res;
+ mTuner->getFrontendInfo(id, [&](Result r, const FrontendInfo& info) {
+ feInfo = info;
+ res = r;
+ });
+ if (res != Result::SUCCESS) {
+ return NULL;
+ }
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/FrontendInfo");
+ jmethodID infoInit = env->GetMethodID(clazz, "<init>",
+ "(IIIIIIII[ILandroid/media/tv/tuner/frontend/FrontendCapabilities;)V");
+
+ jint type = (jint) feInfo.type;
+ jint minFrequency = feInfo.minFrequency;
+ jint maxFrequency = feInfo.maxFrequency;
+ jint minSymbolRate = feInfo.minSymbolRate;
+ jint maxSymbolRate = feInfo.maxSymbolRate;
+ jint acquireRange = feInfo.acquireRange;
+ jint exclusiveGroupId = feInfo.exclusiveGroupId;
+ jintArray statusCaps = env->NewIntArray(feInfo.statusCaps.size());
+ env->SetIntArrayRegion(
+ statusCaps, 0, feInfo.statusCaps.size(),
+ reinterpret_cast<jint*>(&feInfo.statusCaps[0]));
+ FrontendInfo::FrontendCapabilities caps = feInfo.frontendCaps;
+
+ jobject jcaps = NULL;
+ switch(feInfo.type) {
+ case FrontendType::ANALOG:
+ if (FrontendInfo::FrontendCapabilities::hidl_discriminator::analogCaps
+ == caps.getDiscriminator()) {
+ jcaps = getAnalogFrontendCaps(env, caps);
+ }
+ break;
+ case FrontendType::ATSC3:
+ if (FrontendInfo::FrontendCapabilities::hidl_discriminator::atsc3Caps
+ == caps.getDiscriminator()) {
+ jcaps = getAtsc3FrontendCaps(env, caps);
+ }
+ break;
+ case FrontendType::ATSC:
+ if (FrontendInfo::FrontendCapabilities::hidl_discriminator::atscCaps
+ == caps.getDiscriminator()) {
+ jcaps = getAtscFrontendCaps(env, caps);
+ }
+ break;
+ case FrontendType::DVBC:
+ if (FrontendInfo::FrontendCapabilities::hidl_discriminator::dvbcCaps
+ == caps.getDiscriminator()) {
+ jcaps = getDvbcFrontendCaps(env, caps);
+ }
+ break;
+ case FrontendType::DVBS:
+ if (FrontendInfo::FrontendCapabilities::hidl_discriminator::dvbsCaps
+ == caps.getDiscriminator()) {
+ jcaps = getDvbsFrontendCaps(env, caps);
+ }
+ break;
+ case FrontendType::DVBT:
+ if (FrontendInfo::FrontendCapabilities::hidl_discriminator::dvbtCaps
+ == caps.getDiscriminator()) {
+ jcaps = getDvbtFrontendCaps(env, caps);
+ }
+ break;
+ case FrontendType::ISDBS:
+ if (FrontendInfo::FrontendCapabilities::hidl_discriminator::isdbsCaps
+ == caps.getDiscriminator()) {
+ jcaps = getIsdbsFrontendCaps(env, caps);
+ }
+ break;
+ case FrontendType::ISDBS3:
+ if (FrontendInfo::FrontendCapabilities::hidl_discriminator::isdbs3Caps
+ == caps.getDiscriminator()) {
+ jcaps = getIsdbs3FrontendCaps(env, caps);
+ }
+ break;
+ case FrontendType::ISDBT:
+ if (FrontendInfo::FrontendCapabilities::hidl_discriminator::isdbtCaps
+ == caps.getDiscriminator()) {
+ jcaps = getIsdbtFrontendCaps(env, caps);
+ }
+ break;
+ default:
+ break;
+ }
+
+ return env->NewObject(
+ clazz, infoInit, (jint) id, type, minFrequency, maxFrequency, minSymbolRate,
+ maxSymbolRate, acquireRange, exclusiveGroupId, statusCaps, jcaps);
+}
+
+jintArray JTuner::getLnbIds() {
+ ALOGD("JTuner::getLnbIds()");
+ Result res;
+ hidl_vec<LnbId> lnbIds;
+ mTuner->getLnbIds([&](Result r, const hidl_vec<LnbId>& ids) {
+ lnbIds = ids;
+ res = r;
+ });
+ if (res != Result::SUCCESS || lnbIds.size() == 0) {
+ ALOGW("Lnb isn't available");
+ return NULL;
+ }
+
+ mLnbIds = lnbIds;
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+
+ jintArray ids = env->NewIntArray(mLnbIds.size());
+ env->SetIntArrayRegion(ids, 0, mLnbIds.size(), reinterpret_cast<jint*>(&mLnbIds[0]));
+
+ return ids;
+}
+
+jobject JTuner::openLnbById(int id) {
+ sp<ILnb> iLnbSp;
+ Result r;
+ mTuner->openLnbById(id, [&](Result res, const sp<ILnb>& lnb) {
+ r = res;
+ iLnbSp = lnb;
+ });
+ if (r != Result::SUCCESS || iLnbSp == nullptr) {
+ ALOGE("Failed to open lnb");
+ return NULL;
+ }
+ mLnb = iLnbSp;
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jobject lnbObj = env->NewObject(
+ env->FindClass("android/media/tv/tuner/Lnb"),
+ gFields.lnbInitID,
+ (jint) id);
+
+ sp<LnbCallback> lnbCb = new LnbCallback(lnbObj, id);
+ mLnb->setCallback(lnbCb);
+
+ sp<Lnb> lnbSp = new Lnb(iLnbSp, lnbObj);
+ lnbSp->incStrong(lnbObj);
+ env->SetLongField(lnbObj, gFields.lnbContext, (jlong) lnbSp.get());
+
+ return lnbObj;
+}
+
+jobject JTuner::openLnbByName(jstring name) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ std::string lnbName(env->GetStringUTFChars(name, nullptr));
+ sp<ILnb> iLnbSp;
+ Result res;
+ LnbId id;
+ mTuner->openLnbByName(lnbName, [&](Result r, LnbId lnbId, const sp<ILnb>& lnb) {
+ res = r;
+ iLnbSp = lnb;
+ id = lnbId;
+ });
+ if (res != Result::SUCCESS || iLnbSp == nullptr) {
+ ALOGE("Failed to open lnb");
+ return NULL;
+ }
+ mLnb = iLnbSp;
+
+ jobject lnbObj = env->NewObject(
+ env->FindClass("android/media/tv/tuner/Lnb"),
+ gFields.lnbInitID,
+ id);
+
+ sp<LnbCallback> lnbCb = new LnbCallback(lnbObj, id);
+ mLnb->setCallback(lnbCb);
+
+ sp<Lnb> lnbSp = new Lnb(iLnbSp, lnbObj);
+ lnbSp->incStrong(lnbObj);
+ env->SetLongField(lnbObj, gFields.lnbContext, (jlong) lnbSp.get());
+
+ return lnbObj;
+}
+
+int JTuner::tune(const FrontendSettings& settings) {
+ if (mFe == NULL) {
+ ALOGE("frontend is not initialized");
+ return (int)Result::INVALID_STATE;
+ }
+ Result result = mFe->tune(settings);
+ return (int)result;
+}
+
+int JTuner::stopTune() {
+ if (mFe == NULL) {
+ ALOGE("frontend is not initialized");
+ return (int)Result::INVALID_STATE;
+ }
+ Result result = mFe->stopTune();
+ return (int)result;
+}
+
+int JTuner::scan(const FrontendSettings& settings, FrontendScanType scanType) {
+ if (mFe == NULL) {
+ ALOGE("frontend is not initialized");
+ return (int)Result::INVALID_STATE;
+ }
+ Result result = mFe->scan(settings, scanType);
+ return (int)result;
+}
+
+int JTuner::stopScan() {
+ if (mFe == NULL) {
+ ALOGE("frontend is not initialized");
+ return (int)Result::INVALID_STATE;
+ }
+ Result result = mFe->stopScan();
+ return (int)result;
+}
+
+int JTuner::setLnb(int id) {
+ if (mFe == NULL) {
+ ALOGE("frontend is not initialized");
+ return (int)Result::INVALID_STATE;
+ }
+ Result result = mFe->setLnb(id);
+ return (int)result;
+}
+
+int JTuner::setLna(bool enable) {
+ if (mFe == NULL) {
+ ALOGE("frontend is not initialized");
+ return (int)Result::INVALID_STATE;
+ }
+ Result result = mFe->setLna(enable);
+ return (int)result;
+}
+
+Result JTuner::openDemux() {
+ if (mTuner == nullptr) {
+ return Result::NOT_INITIALIZED;
+ }
+ if (mDemux != nullptr) {
+ return Result::SUCCESS;
+ }
+ Result res;
+ uint32_t id;
+ sp<IDemux> demuxSp;
+ mTuner->openDemux([&](Result r, uint32_t demuxId, const sp<IDemux>& demux) {
+ demuxSp = demux;
+ id = demuxId;
+ res = r;
+ ALOGD("open demux, id = %d", demuxId);
+ });
+ if (res == Result::SUCCESS) {
+ mDemux = demuxSp;
+ mDemuxId = id;
+ if (mFe != NULL) {
+ mDemux->setFrontendDataSource(mFeId);
+ }
+ }
+ return res;
+}
+
+jint JTuner::close() {
+ Result res = Result::SUCCESS;
+ if (mFe != NULL) {
+ res = mFe->close();
+ if (res != Result::SUCCESS) {
+ return (jint) res;
+ }
+ }
+ if (mDemux != NULL) {
+ res = mDemux->close();
+ if (res != Result::SUCCESS) {
+ return (jint) res;
+ }
+ }
+ return (jint) res;
+}
+
+jobject JTuner::getAvSyncHwId(sp<Filter> filter) {
+ if (mDemux == NULL) {
+ return NULL;
+ }
+
+ uint32_t avSyncHwId;
+ Result res;
+ sp<IFilter> iFilterSp = filter->getIFilter();
+ mDemux->getAvSyncHwId(iFilterSp,
+ [&](Result r, uint32_t id) {
+ res = r;
+ avSyncHwId = id;
+ });
+ if (res == Result::SUCCESS) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass integerClazz = env->FindClass("java/lang/Integer");
+ jmethodID intInit = env->GetMethodID(integerClazz, "<init>", "(I)V");
+ return env->NewObject(integerClazz, intInit, avSyncHwId);
+ }
+ return NULL;
+}
+
+jobject JTuner::getAvSyncTime(jint id) {
+ if (mDemux == NULL) {
+ return NULL;
+ }
+ uint64_t time;
+ Result res;
+ mDemux->getAvSyncTime(static_cast<uint32_t>(id),
+ [&](Result r, uint64_t ts) {
+ res = r;
+ time = ts;
+ });
+ if (res == Result::SUCCESS) {
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass longClazz = env->FindClass("java/lang/Long");
+ jmethodID longInit = env->GetMethodID(longClazz, "<init>", "(J)V");
+ return env->NewObject(longClazz, longInit, static_cast<jlong>(time));
+ }
+ return NULL;
+}
+
+int JTuner::connectCiCam(jint id) {
+ if (mDemux == NULL) {
+ Result r = openDemux();
+ if (r != Result::SUCCESS) {
+ return (int) r;
+ }
+ }
+ Result r = mDemux->connectCiCam(static_cast<uint32_t>(id));
+ return (int) r;
+}
+
+int JTuner::disconnectCiCam() {
+ if (mDemux == NULL) {
+ Result r = openDemux();
+ if (r != Result::SUCCESS) {
+ return (int) r;
+ }
+ }
+ Result r = mDemux->disconnectCiCam();
+ return (int) r;
+}
+
+jobject JTuner::openDescrambler() {
+ ALOGD("JTuner::openDescrambler");
+ if (mTuner == nullptr || mDemux == nullptr) {
+ return NULL;
+ }
+ sp<IDescrambler> descramblerSp;
+ Result res;
+ mTuner->openDescrambler([&](Result r, const sp<IDescrambler>& descrambler) {
+ res = r;
+ descramblerSp = descrambler;
+ });
+
+ if (res != Result::SUCCESS || descramblerSp == NULL) {
+ return NULL;
+ }
+
+ descramblerSp->setDemuxSource(mDemuxId);
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jobject descramblerObj =
+ env->NewObject(
+ env->FindClass("android/media/tv/tuner/Descrambler"),
+ gFields.descramblerInitID);
+
+ descramblerSp->incStrong(descramblerObj);
+ env->SetLongField(descramblerObj, gFields.descramblerContext, (jlong)descramblerSp.get());
+
+ return descramblerObj;
+}
+
+jobject JTuner::openFilter(DemuxFilterType type, int bufferSize) {
+ if (mDemux == NULL) {
+ if (openDemux() != Result::SUCCESS) {
+ return NULL;
+ }
+ }
+
+ sp<IFilter> iFilterSp;
+ sp<FilterCallback> callback = new FilterCallback();
+ Result res;
+ mDemux->openFilter(type, bufferSize, callback,
+ [&](Result r, const sp<IFilter>& filter) {
+ iFilterSp = filter;
+ res = r;
+ });
+ if (res != Result::SUCCESS || iFilterSp == NULL) {
+ ALOGD("Failed to open filter, type = %d", type.mainType);
+ return NULL;
+ }
+ int fId;
+ iFilterSp->getId([&](Result, uint32_t filterId) {
+ fId = filterId;
+ });
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jobject filterObj =
+ env->NewObject(
+ env->FindClass("android/media/tv/tuner/filter/Filter"),
+ gFields.filterInitID,
+ (jint) fId);
+
+ sp<Filter> filterSp = new Filter(iFilterSp, filterObj);
+ filterSp->incStrong(filterObj);
+ env->SetLongField(filterObj, gFields.filterContext, (jlong)filterSp.get());
+
+ callback->setFilter(filterSp);
+
+ return filterObj;
+}
+
+jobject JTuner::openTimeFilter() {
+ if (mDemux == NULL) {
+ if (openDemux() != Result::SUCCESS) {
+ return NULL;
+ }
+ }
+ sp<ITimeFilter> iTimeFilterSp;
+ Result res;
+ mDemux->openTimeFilter(
+ [&](Result r, const sp<ITimeFilter>& filter) {
+ iTimeFilterSp = filter;
+ res = r;
+ });
+
+ if (res != Result::SUCCESS || iTimeFilterSp == NULL) {
+ return NULL;
+ }
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jobject timeFilterObj =
+ env->NewObject(
+ env->FindClass("android/media/tv/tuner/filter/TimeFilter"),
+ gFields.timeFilterInitID);
+ sp<TimeFilter> timeFilterSp = new TimeFilter(iTimeFilterSp, timeFilterObj);
+ timeFilterSp->incStrong(timeFilterObj);
+ env->SetLongField(timeFilterObj, gFields.timeFilterContext, (jlong)timeFilterSp.get());
+
+ return timeFilterObj;
+}
+
+jobject JTuner::openDvr(DvrType type, jlong bufferSize) {
+ ALOGD("JTuner::openDvr");
+ if (mDemux == NULL) {
+ if (openDemux() != Result::SUCCESS) {
+ return NULL;
+ }
+ }
+ sp<IDvr> iDvrSp;
+ sp<DvrCallback> callback = new DvrCallback();
+ Result res;
+ mDemux->openDvr(type, (uint32_t) bufferSize, callback,
+ [&](Result r, const sp<IDvr>& dvr) {
+ res = r;
+ iDvrSp = dvr;
+ });
+
+ if (res != Result::SUCCESS || iDvrSp == NULL) {
+ return NULL;
+ }
+
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jobject dvrObj;
+ if (type == DvrType::RECORD) {
+ dvrObj =
+ env->NewObject(
+ env->FindClass("android/media/tv/tuner/dvr/DvrRecorder"),
+ gFields.dvrRecorderInitID,
+ mObject);
+ sp<Dvr> dvrSp = new Dvr(iDvrSp, dvrObj);
+ dvrSp->incStrong(dvrObj);
+ env->SetLongField(dvrObj, gFields.dvrRecorderContext, (jlong)dvrSp.get());
+ } else {
+ dvrObj =
+ env->NewObject(
+ env->FindClass("android/media/tv/tuner/dvr/DvrPlayback"),
+ gFields.dvrPlaybackInitID,
+ mObject);
+ sp<Dvr> dvrSp = new Dvr(iDvrSp, dvrObj);
+ dvrSp->incStrong(dvrObj);
+ env->SetLongField(dvrObj, gFields.dvrPlaybackContext, (jlong)dvrSp.get());
+ }
+
+ callback->setDvr(dvrObj);
+
+ return dvrObj;
+}
+
+jobject JTuner::getDemuxCaps() {
+ DemuxCapabilities caps;
+ Result res;
+ mTuner->getDemuxCaps([&](Result r, const DemuxCapabilities& demuxCaps) {
+ caps = demuxCaps;
+ res = r;
+ });
+ if (res != Result::SUCCESS) {
+ return NULL;
+ }
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jclass clazz = env->FindClass("android/media/tv/tuner/DemuxCapabilities");
+ jmethodID capsInit = env->GetMethodID(clazz, "<init>", "(IIIIIIIIIJI[IZ)V");
+
+ jint numDemux = caps.numDemux;
+ jint numRecord = caps.numRecord;
+ jint numPlayback = caps.numPlayback;
+ jint numTsFilter = caps.numTsFilter;
+ jint numSectionFilter = caps.numSectionFilter;
+ jint numAudioFilter = caps.numAudioFilter;
+ jint numVideoFilter = caps.numVideoFilter;
+ jint numPesFilter = caps.numPesFilter;
+ jint numPcrFilter = caps.numPcrFilter;
+ jlong numBytesInSectionFilter = caps.numBytesInSectionFilter;
+ jint filterCaps = static_cast<jint>(caps.filterCaps);
+ jboolean bTimeFilter = caps.bTimeFilter;
+
+ jintArray linkCaps = env->NewIntArray(caps.linkCaps.size());
+ env->SetIntArrayRegion(
+ linkCaps, 0, caps.linkCaps.size(), reinterpret_cast<jint*>(&caps.linkCaps[0]));
+
+ return env->NewObject(clazz, capsInit, numDemux, numRecord, numPlayback, numTsFilter,
+ numSectionFilter, numAudioFilter, numVideoFilter, numPesFilter, numPcrFilter,
+ numBytesInSectionFilter, filterCaps, linkCaps, bTimeFilter);
+}
+
+jobject JTuner::getFrontendStatus(jintArray types) {
+ if (mFe == NULL) {
+ return NULL;
+ }
+ JNIEnv *env = AndroidRuntime::getJNIEnv();
+ jsize size = env->GetArrayLength(types);
+ std::vector<FrontendStatusType> v(size);
+ env->GetIntArrayRegion(types, 0, size, reinterpret_cast<jint*>(&v[0]));
+
+ Result res;
+ hidl_vec<FrontendStatus> status;
+ mFe->getStatus(v,
+ [&](Result r, const hidl_vec<FrontendStatus>& s) {
+ res = r;
+ status = s;
+ });
+ if (res != Result::SUCCESS) {
+ return NULL;
+ }
+
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/FrontendStatus");
+ jmethodID init = env->GetMethodID(clazz, "<init>", "()V");
+ jobject statusObj = env->NewObject(clazz, init);
+
+ jclass intClazz = env->FindClass("java/lang/Integer");
+ jmethodID initInt = env->GetMethodID(intClazz, "<init>", "(I)V");
+ jclass booleanClazz = env->FindClass("java/lang/Boolean");
+ jmethodID initBoolean = env->GetMethodID(booleanClazz, "<init>", "(Z)V");
+
+ for (auto s : status) {
+ switch(s.getDiscriminator()) {
+ case FrontendStatus::hidl_discriminator::isDemodLocked: {
+ jfieldID field = env->GetFieldID(clazz, "mIsDemodLocked", "Ljava/lang/Boolean;");
+ jobject newBooleanObj = env->NewObject(
+ booleanClazz, initBoolean, static_cast<jboolean>(s.isDemodLocked()));
+ env->SetObjectField(statusObj, field, newBooleanObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::snr: {
+ jfieldID field = env->GetFieldID(clazz, "mSnr", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.snr()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::ber: {
+ jfieldID field = env->GetFieldID(clazz, "mBer", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.ber()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::per: {
+ jfieldID field = env->GetFieldID(clazz, "mPer", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.per()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::preBer: {
+ jfieldID field = env->GetFieldID(clazz, "mPerBer", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.preBer()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::signalQuality: {
+ jfieldID field = env->GetFieldID(clazz, "mSignalQuality", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.signalQuality()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::signalStrength: {
+ jfieldID field = env->GetFieldID(clazz, "mSignalStrength", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.signalStrength()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::symbolRate: {
+ jfieldID field = env->GetFieldID(clazz, "mSymbolRate", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.symbolRate()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::innerFec: {
+ jfieldID field = env->GetFieldID(clazz, "mInnerFec", "Ljava/lang/Long;");
+ jclass longClazz = env->FindClass("java/lang/Long");
+ jmethodID initLong = env->GetMethodID(longClazz, "<init>", "(J)V");
+ jobject newLongObj = env->NewObject(
+ longClazz, initLong, static_cast<jlong>(s.innerFec()));
+ env->SetObjectField(statusObj, field, newLongObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::modulation: {
+ jfieldID field = env->GetFieldID(clazz, "mModulation", "Ljava/lang/Integer;");
+ FrontendModulationStatus modulation = s.modulation();
+ jint intModulation;
+ bool valid = true;
+ switch(modulation.getDiscriminator()) {
+ case FrontendModulationStatus::hidl_discriminator::dvbc: {
+ intModulation = static_cast<jint>(modulation.dvbc());
+ break;
+ }
+ case FrontendModulationStatus::hidl_discriminator::dvbs: {
+ intModulation = static_cast<jint>(modulation.dvbs());
+ break;
+ }
+ case FrontendModulationStatus::hidl_discriminator::isdbs: {
+ intModulation = static_cast<jint>(modulation.isdbs());
+ break;
+ }
+ case FrontendModulationStatus::hidl_discriminator::isdbs3: {
+ intModulation = static_cast<jint>(modulation.isdbs3());
+ break;
+ }
+ case FrontendModulationStatus::hidl_discriminator::isdbt: {
+ intModulation = static_cast<jint>(modulation.isdbt());
+ break;
+ }
+ default: {
+ valid = false;
+ break;
+ }
+ }
+ if (valid) {
+ jobject newIntegerObj = env->NewObject(intClazz, initInt, intModulation);
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ }
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::inversion: {
+ jfieldID field = env->GetFieldID(clazz, "mInversion", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.inversion()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::lnbVoltage: {
+ jfieldID field = env->GetFieldID(clazz, "mLnbVoltage", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.lnbVoltage()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::plpId: {
+ jfieldID field = env->GetFieldID(clazz, "mPlpId", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.plpId()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::isEWBS: {
+ jfieldID field = env->GetFieldID(clazz, "mIsEwbs", "Ljava/lang/Boolean;");
+ jobject newBooleanObj = env->NewObject(
+ booleanClazz, initBoolean, static_cast<jboolean>(s.isEWBS()));
+ env->SetObjectField(statusObj, field, newBooleanObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::agc: {
+ jfieldID field = env->GetFieldID(clazz, "mAgc", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.agc()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::isLnaOn: {
+ jfieldID field = env->GetFieldID(clazz, "mIsLnaOn", "Ljava/lang/Boolean;");
+ jobject newBooleanObj = env->NewObject(
+ booleanClazz, initBoolean, static_cast<jboolean>(s.isLnaOn()));
+ env->SetObjectField(statusObj, field, newBooleanObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::isLayerError: {
+ jfieldID field = env->GetFieldID(clazz, "mIsLayerErrors", "[Z");
+ hidl_vec<bool> layerErr = s.isLayerError();
+
+ jbooleanArray valObj = env->NewBooleanArray(layerErr.size());
+
+ for (size_t i = 0; i < layerErr.size(); i++) {
+ jboolean x = layerErr[i];
+ env->SetBooleanArrayRegion(valObj, i, 1, &x);
+ }
+ env->SetObjectField(statusObj, field, valObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::mer: {
+ jfieldID field = env->GetFieldID(clazz, "mMer", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.mer()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::freqOffset: {
+ jfieldID field = env->GetFieldID(clazz, "mFreqOffset", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.freqOffset()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::hierarchy: {
+ jfieldID field = env->GetFieldID(clazz, "mHierarchy", "Ljava/lang/Integer;");
+ jobject newIntegerObj = env->NewObject(
+ intClazz, initInt, static_cast<jint>(s.hierarchy()));
+ env->SetObjectField(statusObj, field, newIntegerObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::isRfLocked: {
+ jfieldID field = env->GetFieldID(clazz, "mIsRfLocked", "Ljava/lang/Boolean;");
+ jobject newBooleanObj = env->NewObject(
+ booleanClazz, initBoolean, static_cast<jboolean>(s.isRfLocked()));
+ env->SetObjectField(statusObj, field, newBooleanObj);
+ break;
+ }
+ case FrontendStatus::hidl_discriminator::plpInfo: {
+ jfieldID field = env->GetFieldID(clazz, "mPlpInfo",
+ "[Landroid/media/tv/tuner/frontend/FrontendStatus$Atsc3PlpTuningInfo;");
+ jclass plpClazz = env->FindClass(
+ "android/media/tv/tuner/frontend/FrontendStatus$Atsc3PlpTuningInfo");
+ jmethodID initPlp = env->GetMethodID(plpClazz, "<init>", "(IZI)V");
+
+ hidl_vec<FrontendStatusAtsc3PlpInfo> plpInfos = s.plpInfo();
+
+ jobjectArray valObj = env->NewObjectArray(plpInfos.size(), plpClazz, NULL);
+ for (int i = 0; i < plpInfos.size(); i++) {
+ auto info = plpInfos[i];
+ jint plpId = (jint) info.plpId;
+ jboolean isLocked = (jboolean) info.isLocked;
+ jint uec = (jint) info.uec;
+
+ jobject plpObj = env->NewObject(plpClazz, initPlp, plpId, isLocked, uec);
+ env->SetObjectArrayElement(valObj, i, plpObj);
+ }
+
+ env->SetObjectField(statusObj, field, valObj);
+ break;
+ }
+ default: {
+ break;
+ }
+ }
+ }
+
+ return statusObj;
+}
+
+jint JTuner::closeFrontend() {
+ Result r = Result::SUCCESS;
+ if (mFe != NULL) {
+ r = mFe->close();
+ }
+ return (jint) r;
+}
+
+jint JTuner::closeDemux() {
+ Result r = Result::SUCCESS;
+ if (mDemux != NULL) {
+ r = mDemux->close();
+ }
+ return (jint) r;
+}
+
+} // namespace android
+
+////////////////////////////////////////////////////////////////////////////////
+
+using namespace android;
+
+static sp<JTuner> setTuner(JNIEnv *env, jobject thiz, const sp<JTuner> &tuner) {
+ sp<JTuner> old = (JTuner *)env->GetLongField(thiz, gFields.tunerContext);
+
+ if (tuner != NULL) {
+ tuner->incStrong(thiz);
+ }
+ if (old != NULL) {
+ old->decStrong(thiz);
+ }
+ env->SetLongField(thiz, gFields.tunerContext, (jlong)tuner.get());
+
+ return old;
+}
+
+static sp<JTuner> getTuner(JNIEnv *env, jobject thiz) {
+ return (JTuner *)env->GetLongField(thiz, gFields.tunerContext);
+}
+
+static sp<IDescrambler> getDescrambler(JNIEnv *env, jobject descrambler) {
+ return (IDescrambler *)env->GetLongField(descrambler, gFields.descramblerContext);
+}
+
+static uint32_t getResourceIdFromHandle(jint handle) {
+ return (handle & 0x00ff0000) >> 16;
+}
+
+static DemuxPid getDemuxPid(int pidType, int pid) {
+ DemuxPid demuxPid;
+ if ((int)pidType == 1) {
+ demuxPid.tPid(static_cast<DemuxTpid>(pid));
+ } else if ((int)pidType == 2) {
+ demuxPid.mmtpPid(static_cast<DemuxMmtpPid>(pid));
+ }
+ return demuxPid;
+}
+
+static uint32_t getFrontendSettingsFreq(JNIEnv *env, const jobject& settings) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/FrontendSettings");
+ jfieldID freqField = env->GetFieldID(clazz, "mFrequency", "I");
+ uint32_t freq = static_cast<uint32_t>(env->GetIntField(settings, freqField));
+ return freq;
+}
+
+static FrontendSettings getAnalogFrontendSettings(JNIEnv *env, const jobject& settings) {
+ FrontendSettings frontendSettings;
+ uint32_t freq = getFrontendSettingsFreq(env, settings);
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/AnalogFrontendSettings");
+ FrontendAnalogType analogType =
+ static_cast<FrontendAnalogType>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mSignalType", "I")));
+ FrontendAnalogSifStandard sifStandard =
+ static_cast<FrontendAnalogSifStandard>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mSifStandard", "I")));
+ FrontendAnalogSettings frontendAnalogSettings {
+ .frequency = freq,
+ .type = analogType,
+ .sifStandard = sifStandard,
+ };
+ frontendSettings.analog(frontendAnalogSettings);
+ return frontendSettings;
+}
+
+static hidl_vec<FrontendAtsc3PlpSettings> getAtsc3PlpSettings(
+ JNIEnv *env, const jobject& settings) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/Atsc3FrontendSettings");
+ jobjectArray plpSettings =
+ reinterpret_cast<jobjectArray>(
+ env->GetObjectField(settings,
+ env->GetFieldID(
+ clazz,
+ "mPlpSettings",
+ "[Landroid/media/tv/tuner/frontend/Atsc3PlpSettings;")));
+ int len = env->GetArrayLength(plpSettings);
+
+ jclass plpClazz = env->FindClass("android/media/tv/tuner/frontend/Atsc3PlpSettings");
+ hidl_vec<FrontendAtsc3PlpSettings> plps = hidl_vec<FrontendAtsc3PlpSettings>(len);
+ // parse PLP settings
+ for (int i = 0; i < len; i++) {
+ jobject plp = env->GetObjectArrayElement(plpSettings, i);
+ uint8_t plpId =
+ static_cast<uint8_t>(
+ env->GetIntField(plp, env->GetFieldID(plpClazz, "mPlpId", "I")));
+ FrontendAtsc3Modulation modulation =
+ static_cast<FrontendAtsc3Modulation>(
+ env->GetIntField(plp, env->GetFieldID(plpClazz, "mModulation", "I")));
+ FrontendAtsc3TimeInterleaveMode interleaveMode =
+ static_cast<FrontendAtsc3TimeInterleaveMode>(
+ env->GetIntField(
+ plp, env->GetFieldID(plpClazz, "mInterleaveMode", "I")));
+ FrontendAtsc3CodeRate codeRate =
+ static_cast<FrontendAtsc3CodeRate>(
+ env->GetIntField(plp, env->GetFieldID(plpClazz, "mCodeRate", "I")));
+ FrontendAtsc3Fec fec =
+ static_cast<FrontendAtsc3Fec>(
+ env->GetIntField(plp, env->GetFieldID(plpClazz, "mFec", "I")));
+ FrontendAtsc3PlpSettings frontendAtsc3PlpSettings {
+ .plpId = plpId,
+ .modulation = modulation,
+ .interleaveMode = interleaveMode,
+ .codeRate = codeRate,
+ .fec = fec,
+ };
+ plps[i] = frontendAtsc3PlpSettings;
+ }
+ return plps;
+}
+
+static FrontendSettings getAtsc3FrontendSettings(JNIEnv *env, const jobject& settings) {
+ FrontendSettings frontendSettings;
+ uint32_t freq = getFrontendSettingsFreq(env, settings);
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/Atsc3FrontendSettings");
+
+ FrontendAtsc3Bandwidth bandwidth =
+ static_cast<FrontendAtsc3Bandwidth>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mBandwidth", "I")));
+ FrontendAtsc3DemodOutputFormat demod =
+ static_cast<FrontendAtsc3DemodOutputFormat>(
+ env->GetIntField(
+ settings, env->GetFieldID(clazz, "mDemodOutputFormat", "I")));
+ hidl_vec<FrontendAtsc3PlpSettings> plps = getAtsc3PlpSettings(env, settings);
+ FrontendAtsc3Settings frontendAtsc3Settings {
+ .frequency = freq,
+ .bandwidth = bandwidth,
+ .demodOutputFormat = demod,
+ .plpSettings = plps,
+ };
+ frontendSettings.atsc3(frontendAtsc3Settings);
+ return frontendSettings;
+}
+
+static FrontendSettings getAtscFrontendSettings(JNIEnv *env, const jobject& settings) {
+ FrontendSettings frontendSettings;
+ uint32_t freq = getFrontendSettingsFreq(env, settings);
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/AtscFrontendSettings");
+ FrontendAtscModulation modulation =
+ static_cast<FrontendAtscModulation>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mModulation", "I")));
+ FrontendAtscSettings frontendAtscSettings {
+ .frequency = freq,
+ .modulation = modulation,
+ };
+ frontendSettings.atsc(frontendAtscSettings);
+ return frontendSettings;
+}
+
+static FrontendSettings getDvbcFrontendSettings(JNIEnv *env, const jobject& settings) {
+ FrontendSettings frontendSettings;
+ uint32_t freq = getFrontendSettingsFreq(env, settings);
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/DvbcFrontendSettings");
+ FrontendDvbcModulation modulation =
+ static_cast<FrontendDvbcModulation>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mModulation", "I")));
+ FrontendInnerFec innerFec =
+ static_cast<FrontendInnerFec>(
+ env->GetLongField(settings, env->GetFieldID(clazz, "mFec", "J")));
+ uint32_t symbolRate =
+ static_cast<uint32_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mSymbolRate", "I")));
+ FrontendDvbcOuterFec outerFec =
+ static_cast<FrontendDvbcOuterFec>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mOuterFec", "I")));
+ FrontendDvbcAnnex annex =
+ static_cast<FrontendDvbcAnnex>(
+ env->GetByteField(settings, env->GetFieldID(clazz, "mAnnex", "B")));
+ FrontendDvbcSpectralInversion spectralInversion =
+ static_cast<FrontendDvbcSpectralInversion>(
+ env->GetIntField(
+ settings, env->GetFieldID(clazz, "mSpectralInversion", "I")));
+ FrontendDvbcSettings frontendDvbcSettings {
+ .frequency = freq,
+ .modulation = modulation,
+ .fec = innerFec,
+ .symbolRate = symbolRate,
+ .outerFec = outerFec,
+ .annex = annex,
+ .spectralInversion = spectralInversion,
+ };
+ frontendSettings.dvbc(frontendDvbcSettings);
+ return frontendSettings;
+}
+
+static FrontendDvbsCodeRate getDvbsCodeRate(JNIEnv *env, const jobject& settings) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/DvbsFrontendSettings");
+ jobject jcodeRate =
+ env->GetObjectField(settings,
+ env->GetFieldID(
+ clazz,
+ "mCodeRate",
+ "Landroid/media/tv/tuner/frontend/DvbsCodeRate;"));
+
+ jclass codeRateClazz = env->FindClass("android/media/tv/tuner/frontend/DvbsCodeRate");
+ FrontendInnerFec innerFec =
+ static_cast<FrontendInnerFec>(
+ env->GetLongField(
+ jcodeRate, env->GetFieldID(codeRateClazz, "mInnerFec", "J")));
+ bool isLinear =
+ static_cast<bool>(
+ env->GetBooleanField(
+ jcodeRate, env->GetFieldID(codeRateClazz, "mIsLinear", "Z")));
+ bool isShortFrames =
+ static_cast<bool>(
+ env->GetBooleanField(
+ jcodeRate, env->GetFieldID(codeRateClazz, "mIsShortFrames", "Z")));
+ uint32_t bitsPer1000Symbol =
+ static_cast<uint32_t>(
+ env->GetIntField(
+ jcodeRate, env->GetFieldID(
+ codeRateClazz, "mBitsPer1000Symbol", "I")));
+ FrontendDvbsCodeRate coderate {
+ .fec = innerFec,
+ .isLinear = isLinear,
+ .isShortFrames = isShortFrames,
+ .bitsPer1000Symbol = bitsPer1000Symbol,
+ };
+ return coderate;
+}
+
+static FrontendSettings getDvbsFrontendSettings(JNIEnv *env, const jobject& settings) {
+ FrontendSettings frontendSettings;
+ uint32_t freq = getFrontendSettingsFreq(env, settings);
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/DvbsFrontendSettings");
+
+
+ FrontendDvbsModulation modulation =
+ static_cast<FrontendDvbsModulation>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mModulation", "I")));
+ uint32_t symbolRate =
+ static_cast<uint32_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mSymbolRate", "I")));
+ FrontendDvbsRolloff rolloff =
+ static_cast<FrontendDvbsRolloff>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mRolloff", "I")));
+ FrontendDvbsPilot pilot =
+ static_cast<FrontendDvbsPilot>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mPilot", "I")));
+ uint32_t inputStreamId =
+ static_cast<uint32_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mInputStreamId", "I")));
+ FrontendDvbsStandard standard =
+ static_cast<FrontendDvbsStandard>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mStandard", "I")));
+ FrontendDvbsVcmMode vcmMode =
+ static_cast<FrontendDvbsVcmMode>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mVcmMode", "I")));
+ FrontendDvbsCodeRate coderate = getDvbsCodeRate(env, settings);
+
+ FrontendDvbsSettings frontendDvbsSettings {
+ .frequency = freq,
+ .modulation = modulation,
+ .coderate = coderate,
+ .symbolRate = symbolRate,
+ .rolloff = rolloff,
+ .pilot = pilot,
+ .inputStreamId = inputStreamId,
+ .standard = standard,
+ .vcmMode = vcmMode,
+ };
+ frontendSettings.dvbs(frontendDvbsSettings);
+ return frontendSettings;
+}
+
+static FrontendSettings getDvbtFrontendSettings(JNIEnv *env, const jobject& settings) {
+ FrontendSettings frontendSettings;
+ uint32_t freq = getFrontendSettingsFreq(env, settings);
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/DvbtFrontendSettings");
+ FrontendDvbtTransmissionMode transmissionMode =
+ static_cast<FrontendDvbtTransmissionMode>(
+ env->GetIntField(
+ settings, env->GetFieldID(clazz, "mTransmissionMode", "I")));
+ FrontendDvbtBandwidth bandwidth =
+ static_cast<FrontendDvbtBandwidth>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mBandwidth", "I")));
+ FrontendDvbtConstellation constellation =
+ static_cast<FrontendDvbtConstellation>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mConstellation", "I")));
+ FrontendDvbtHierarchy hierarchy =
+ static_cast<FrontendDvbtHierarchy>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mHierarchy", "I")));
+ FrontendDvbtCoderate hpCoderate =
+ static_cast<FrontendDvbtCoderate>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mHpCodeRate", "I")));
+ FrontendDvbtCoderate lpCoderate =
+ static_cast<FrontendDvbtCoderate>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mLpCodeRate", "I")));
+ FrontendDvbtGuardInterval guardInterval =
+ static_cast<FrontendDvbtGuardInterval>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mGuardInterval", "I")));
+ bool isHighPriority =
+ static_cast<bool>(
+ env->GetBooleanField(
+ settings, env->GetFieldID(clazz, "mIsHighPriority", "Z")));
+ FrontendDvbtStandard standard =
+ static_cast<FrontendDvbtStandard>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mStandard", "I")));
+ bool isMiso =
+ static_cast<bool>(
+ env->GetBooleanField(settings, env->GetFieldID(clazz, "mIsMiso", "Z")));
+ FrontendDvbtPlpMode plpMode =
+ static_cast<FrontendDvbtPlpMode>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mPlpMode", "I")));
+ uint8_t plpId =
+ static_cast<uint8_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mPlpId", "I")));
+ uint8_t plpGroupId =
+ static_cast<uint8_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mPlpGroupId", "I")));
+
+ FrontendDvbtSettings frontendDvbtSettings {
+ .frequency = freq,
+ .transmissionMode = transmissionMode,
+ .bandwidth = bandwidth,
+ .constellation = constellation,
+ .hierarchy = hierarchy,
+ .hpCoderate = hpCoderate,
+ .lpCoderate = lpCoderate,
+ .guardInterval = guardInterval,
+ .isHighPriority = isHighPriority,
+ .standard = standard,
+ .isMiso = isMiso,
+ .plpMode = plpMode,
+ .plpId = plpId,
+ .plpGroupId = plpGroupId,
+ };
+ frontendSettings.dvbt(frontendDvbtSettings);
+ return frontendSettings;
+}
+
+static FrontendSettings getIsdbsFrontendSettings(JNIEnv *env, const jobject& settings) {
+ FrontendSettings frontendSettings;
+ uint32_t freq = getFrontendSettingsFreq(env, settings);
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/IsdbsFrontendSettings");
+ uint16_t streamId =
+ static_cast<uint16_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mStreamId", "I")));
+ FrontendIsdbsStreamIdType streamIdType =
+ static_cast<FrontendIsdbsStreamIdType>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mStreamIdType", "I")));
+ FrontendIsdbsModulation modulation =
+ static_cast<FrontendIsdbsModulation>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mModulation", "I")));
+ FrontendIsdbsCoderate coderate =
+ static_cast<FrontendIsdbsCoderate>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mCodeRate", "I")));
+ uint32_t symbolRate =
+ static_cast<uint32_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mSymbolRate", "I")));
+ FrontendIsdbsRolloff rolloff =
+ static_cast<FrontendIsdbsRolloff>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mRolloff", "I")));
+
+ FrontendIsdbsSettings frontendIsdbsSettings {
+ .frequency = freq,
+ .streamId = streamId,
+ .streamIdType = streamIdType,
+ .modulation = modulation,
+ .coderate = coderate,
+ .symbolRate = symbolRate,
+ .rolloff = rolloff,
+ };
+ frontendSettings.isdbs(frontendIsdbsSettings);
+ return frontendSettings;
+}
+
+static FrontendSettings getIsdbs3FrontendSettings(JNIEnv *env, const jobject& settings) {
+ FrontendSettings frontendSettings;
+ uint32_t freq = getFrontendSettingsFreq(env, settings);
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/Isdbs3FrontendSettings");
+ uint16_t streamId =
+ static_cast<uint16_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mStreamId", "I")));
+ FrontendIsdbsStreamIdType streamIdType =
+ static_cast<FrontendIsdbsStreamIdType>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mStreamIdType", "I")));
+ FrontendIsdbs3Modulation modulation =
+ static_cast<FrontendIsdbs3Modulation>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mModulation", "I")));
+ FrontendIsdbs3Coderate coderate =
+ static_cast<FrontendIsdbs3Coderate>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mCodeRate", "I")));
+ uint32_t symbolRate =
+ static_cast<uint32_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mSymbolRate", "I")));
+ FrontendIsdbs3Rolloff rolloff =
+ static_cast<FrontendIsdbs3Rolloff>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mRolloff", "I")));
+
+ FrontendIsdbs3Settings frontendIsdbs3Settings {
+ .frequency = freq,
+ .streamId = streamId,
+ .streamIdType = streamIdType,
+ .modulation = modulation,
+ .coderate = coderate,
+ .symbolRate = symbolRate,
+ .rolloff = rolloff,
+ };
+ frontendSettings.isdbs3(frontendIsdbs3Settings);
+ return frontendSettings;
+}
+
+static FrontendSettings getIsdbtFrontendSettings(JNIEnv *env, const jobject& settings) {
+ FrontendSettings frontendSettings;
+ uint32_t freq = getFrontendSettingsFreq(env, settings);
+ jclass clazz = env->FindClass("android/media/tv/tuner/frontend/IsdbtFrontendSettings");
+ FrontendIsdbtModulation modulation =
+ static_cast<FrontendIsdbtModulation>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mModulation", "I")));
+ FrontendIsdbtBandwidth bandwidth =
+ static_cast<FrontendIsdbtBandwidth>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mBandwidth", "I")));
+ FrontendIsdbtMode mode =
+ static_cast<FrontendIsdbtMode>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mMode", "I")));
+ FrontendIsdbtCoderate coderate =
+ static_cast<FrontendIsdbtCoderate>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mCodeRate", "I")));
+ FrontendIsdbtGuardInterval guardInterval =
+ static_cast<FrontendIsdbtGuardInterval>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mGuardInterval", "I")));
+ uint32_t serviceAreaId =
+ static_cast<uint32_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mServiceAreaId", "I")));
+
+ FrontendIsdbtSettings frontendIsdbtSettings {
+ .frequency = freq,
+ .modulation = modulation,
+ .bandwidth = bandwidth,
+ .mode = mode,
+ .coderate = coderate,
+ .guardInterval = guardInterval,
+ .serviceAreaId = serviceAreaId,
+ };
+ frontendSettings.isdbt(frontendIsdbtSettings);
+ return frontendSettings;
+}
+
+static FrontendSettings getFrontendSettings(JNIEnv *env, int type, jobject settings) {
+ ALOGD("getFrontendSettings %d", type);
+
+ FrontendType feType = static_cast<FrontendType>(type);
+ switch(feType) {
+ case FrontendType::ANALOG:
+ return getAnalogFrontendSettings(env, settings);
+ case FrontendType::ATSC3:
+ return getAtsc3FrontendSettings(env, settings);
+ case FrontendType::ATSC:
+ return getAtscFrontendSettings(env, settings);
+ case FrontendType::DVBC:
+ return getDvbcFrontendSettings(env, settings);
+ case FrontendType::DVBS:
+ return getDvbsFrontendSettings(env, settings);
+ case FrontendType::DVBT:
+ return getDvbtFrontendSettings(env, settings);
+ case FrontendType::ISDBS:
+ return getIsdbsFrontendSettings(env, settings);
+ case FrontendType::ISDBS3:
+ return getIsdbs3FrontendSettings(env, settings);
+ case FrontendType::ISDBT:
+ return getIsdbtFrontendSettings(env, settings);
+ default:
+ // should never happen because a type is associated with a subclass of
+ // FrontendSettings and not set by users
+ jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
+ "Unsupported frontend type %d", type);
+ return FrontendSettings();
+ }
+}
+
+static sp<Filter> getFilter(JNIEnv *env, jobject filter) {
+ return (Filter *)env->GetLongField(filter, gFields.filterContext);
+}
+
+static DvrSettings getDvrSettings(JNIEnv *env, jobject settings, bool isRecorder) {
+ DvrSettings dvrSettings;
+ jclass clazz = env->FindClass("android/media/tv/tuner/dvr/DvrSettings");
+ uint32_t statusMask =
+ static_cast<uint32_t>(env->GetIntField(
+ settings, env->GetFieldID(clazz, "mStatusMask", "I")));
+ uint32_t lowThreshold =
+ static_cast<uint32_t>(env->GetLongField(
+ settings, env->GetFieldID(clazz, "mLowThreshold", "J")));
+ uint32_t highThreshold =
+ static_cast<uint32_t>(env->GetLongField(
+ settings, env->GetFieldID(clazz, "mHighThreshold", "J")));
+ uint8_t packetSize =
+ static_cast<uint8_t>(env->GetLongField(
+ settings, env->GetFieldID(clazz, "mPacketSize", "J")));
+ DataFormat dataFormat =
+ static_cast<DataFormat>(env->GetIntField(
+ settings, env->GetFieldID(clazz, "mDataFormat", "I")));
+ if (isRecorder) {
+ RecordSettings recordSettings {
+ .statusMask = static_cast<unsigned char>(statusMask),
+ .lowThreshold = lowThreshold,
+ .highThreshold = highThreshold,
+ .dataFormat = dataFormat,
+ .packetSize = packetSize,
+ };
+ dvrSettings.record(recordSettings);
+ } else {
+ PlaybackSettings PlaybackSettings {
+ .statusMask = statusMask,
+ .lowThreshold = lowThreshold,
+ .highThreshold = highThreshold,
+ .dataFormat = dataFormat,
+ .packetSize = packetSize,
+ };
+ dvrSettings.playback(PlaybackSettings);
+ }
+ return dvrSettings;
+}
+
+static sp<Dvr> getDvr(JNIEnv *env, jobject dvr) {
+ bool isRecorder =
+ env->IsInstanceOf(dvr, env->FindClass("android/media/tv/tuner/dvr/DvrRecorder"));
+ jfieldID fieldId =
+ isRecorder ? gFields.dvrRecorderContext : gFields.dvrPlaybackContext;
+ return (Dvr *)env->GetLongField(dvr, fieldId);
+}
+
+static void android_media_tv_Tuner_native_init(JNIEnv *env) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/Tuner");
+ CHECK(clazz != NULL);
+
+ gFields.tunerContext = env->GetFieldID(clazz, "mNativeContext", "J");
+ CHECK(gFields.tunerContext != NULL);
+
+ gFields.onFrontendEventID = env->GetMethodID(clazz, "onFrontendEvent", "(I)V");
+
+ jclass frontendClazz = env->FindClass("android/media/tv/tuner/Tuner$Frontend");
+ gFields.frontendInitID =
+ env->GetMethodID(frontendClazz, "<init>", "(Landroid/media/tv/tuner/Tuner;I)V");
+
+ jclass lnbClazz = env->FindClass("android/media/tv/tuner/Lnb");
+ gFields.lnbContext = env->GetFieldID(lnbClazz, "mNativeContext", "J");
+ gFields.lnbInitID = env->GetMethodID(lnbClazz, "<init>", "(I)V");
+ gFields.onLnbEventID = env->GetMethodID(lnbClazz, "onEvent", "(I)V");
+ gFields.onLnbDiseqcMessageID = env->GetMethodID(lnbClazz, "onDiseqcMessage", "([B)V");
+
+ jclass filterClazz = env->FindClass("android/media/tv/tuner/filter/Filter");
+ gFields.filterContext = env->GetFieldID(filterClazz, "mNativeContext", "J");
+ gFields.filterInitID =
+ env->GetMethodID(filterClazz, "<init>", "(I)V");
+ gFields.onFilterStatusID =
+ env->GetMethodID(filterClazz, "onFilterStatus", "(I)V");
+ gFields.onFilterEventID =
+ env->GetMethodID(filterClazz, "onFilterEvent",
+ "([Landroid/media/tv/tuner/filter/FilterEvent;)V");
+
+ jclass timeFilterClazz = env->FindClass("android/media/tv/tuner/filter/TimeFilter");
+ gFields.timeFilterContext = env->GetFieldID(timeFilterClazz, "mNativeContext", "J");
+ gFields.timeFilterInitID = env->GetMethodID(timeFilterClazz, "<init>", "()V");
+
+ jclass descramblerClazz = env->FindClass("android/media/tv/tuner/Descrambler");
+ gFields.descramblerContext = env->GetFieldID(descramblerClazz, "mNativeContext", "J");
+ gFields.descramblerInitID = env->GetMethodID(descramblerClazz, "<init>", "()V");
+
+ jclass dvrRecorderClazz = env->FindClass("android/media/tv/tuner/dvr/DvrRecorder");
+ gFields.dvrRecorderContext = env->GetFieldID(dvrRecorderClazz, "mNativeContext", "J");
+ gFields.dvrRecorderInitID = env->GetMethodID(dvrRecorderClazz, "<init>", "()V");
+ gFields.onDvrRecordStatusID =
+ env->GetMethodID(dvrRecorderClazz, "onRecordStatusChanged", "(I)V");
+
+ jclass dvrPlaybackClazz = env->FindClass("android/media/tv/tuner/dvr/DvrPlayback");
+ gFields.dvrPlaybackContext = env->GetFieldID(dvrPlaybackClazz, "mNativeContext", "J");
+ gFields.dvrPlaybackInitID = env->GetMethodID(dvrPlaybackClazz, "<init>", "()V");
+ gFields.onDvrPlaybackStatusID =
+ env->GetMethodID(dvrPlaybackClazz, "onPlaybackStatusChanged", "(I)V");
+
+ jclass mediaEventClazz = env->FindClass("android/media/tv/tuner/filter/MediaEvent");
+ gFields.mediaEventContext = env->GetFieldID(mediaEventClazz, "mNativeContext", "J");
+
+ jclass linearBlockClazz = env->FindClass("android/media/MediaCodec$LinearBlock");
+ gFields.linearBlockInitID = env->GetMethodID(linearBlockClazz, "<init>", "()V");
+ gFields.linearBlockSetInternalStateID =
+ env->GetMethodID(linearBlockClazz, "setInternalStateLocked", "(JZ)V");
+}
+
+static void android_media_tv_Tuner_native_setup(JNIEnv *env, jobject thiz) {
+ sp<JTuner> tuner = new JTuner(env, thiz);
+ setTuner(env,thiz, tuner);
+}
+
+static jobject android_media_tv_Tuner_get_frontend_ids(JNIEnv *env, jobject thiz) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->getFrontendIds();
+}
+
+static jobject android_media_tv_Tuner_open_frontend_by_handle(
+ JNIEnv *env, jobject thiz, jint handle) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ uint32_t id = getResourceIdFromHandle(handle);
+ return tuner->openFrontendById(id);
+}
+
+static jint android_media_tv_Tuner_close_frontend_by_handle(
+ JNIEnv *env, jobject thiz, jint handle) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ uint32_t id = getResourceIdFromHandle(handle);
+ return tuner->closeFrontendById(id);
+}
+
+static int android_media_tv_Tuner_tune(JNIEnv *env, jobject thiz, jint type, jobject settings) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->tune(getFrontendSettings(env, type, settings));
+}
+
+static int android_media_tv_Tuner_stop_tune(JNIEnv *env, jobject thiz) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->stopTune();
+}
+
+static int android_media_tv_Tuner_scan(
+ JNIEnv *env, jobject thiz, jint settingsType, jobject settings, jint scanType) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->scan(getFrontendSettings(
+ env, settingsType, settings), static_cast<FrontendScanType>(scanType));
+}
+
+static int android_media_tv_Tuner_stop_scan(JNIEnv *env, jobject thiz) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->stopScan();
+}
+
+static int android_media_tv_Tuner_set_lnb(JNIEnv *env, jobject thiz, jint id) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->setLnb(id);
+}
+
+static int android_media_tv_Tuner_set_lna(JNIEnv *env, jobject thiz, jboolean enable) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->setLna(enable);
+}
+
+static jobject android_media_tv_Tuner_get_frontend_status(
+ JNIEnv* env, jobject thiz, jintArray types) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->getFrontendStatus(types);
+}
+
+static jobject android_media_tv_Tuner_get_av_sync_hw_id(
+ JNIEnv *env, jobject thiz, jobject filter) {
+ sp<Filter> filterSp = getFilter(env, filter);
+ if (filterSp == NULL) {
+ ALOGD("Failed to get sync ID. Filter not found");
+ return NULL;
+ }
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->getAvSyncHwId(filterSp);
+}
+
+static jobject android_media_tv_Tuner_get_av_sync_time(JNIEnv *env, jobject thiz, jint id) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->getAvSyncTime(id);
+}
+
+static int android_media_tv_Tuner_connect_cicam(JNIEnv *env, jobject thiz, jint id) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->connectCiCam(id);
+}
+
+static int android_media_tv_Tuner_disconnect_cicam(JNIEnv *env, jobject thiz) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->disconnectCiCam();
+}
+
+static jobject android_media_tv_Tuner_get_frontend_info(JNIEnv *env, jobject thiz, jint id) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->getFrontendInfo(id);
+}
+
+static jintArray android_media_tv_Tuner_get_lnb_ids(JNIEnv *env, jobject thiz) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->getLnbIds();
+}
+
+static jobject android_media_tv_Tuner_open_lnb_by_handle(JNIEnv *env, jobject thiz, jint handle) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ uint32_t id = getResourceIdFromHandle(handle);
+ return tuner->openLnbById(id);
+}
+
+static jobject android_media_tv_Tuner_open_lnb_by_name(JNIEnv *env, jobject thiz, jstring name) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->openLnbByName(name);
+}
+
+
+static jobject android_media_tv_Tuner_open_filter(
+ JNIEnv *env, jobject thiz, jint type, jint subType, jlong bufferSize) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ DemuxFilterMainType mainType = static_cast<DemuxFilterMainType>(type);
+ DemuxFilterType filterType {
+ .mainType = mainType,
+ };
+
+ switch(mainType) {
+ case DemuxFilterMainType::TS:
+ filterType.subType.tsFilterType(static_cast<DemuxTsFilterType>(subType));
+ break;
+ case DemuxFilterMainType::MMTP:
+ filterType.subType.mmtpFilterType(static_cast<DemuxMmtpFilterType>(subType));
+ break;
+ case DemuxFilterMainType::IP:
+ filterType.subType.ipFilterType(static_cast<DemuxIpFilterType>(subType));
+ break;
+ case DemuxFilterMainType::TLV:
+ filterType.subType.tlvFilterType(static_cast<DemuxTlvFilterType>(subType));
+ break;
+ case DemuxFilterMainType::ALP:
+ filterType.subType.alpFilterType(static_cast<DemuxAlpFilterType>(subType));
+ break;
+ }
+
+ return tuner->openFilter(filterType, bufferSize);
+}
+
+static jobject android_media_tv_Tuner_open_time_filter(JNIEnv *env, jobject thiz) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->openTimeFilter();
+}
+
+static DemuxFilterSectionBits getFilterSectionBits(JNIEnv *env, const jobject& settings) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/filter/SectionSettingsWithSectionBits");
+ jbyteArray jfilterBytes = static_cast<jbyteArray>(
+ env->GetObjectField(settings, env->GetFieldID(clazz, "mFilter", "[B")));
+ jsize size = env->GetArrayLength(jfilterBytes);
+ std::vector<uint8_t> filterBytes(size);
+ env->GetByteArrayRegion(
+ jfilterBytes, 0, size, reinterpret_cast<jbyte*>(&filterBytes[0]));
+
+ jbyteArray jmask = static_cast<jbyteArray>(
+ env->GetObjectField(settings, env->GetFieldID(clazz, "mMask", "[B")));
+ size = env->GetArrayLength(jmask);
+ std::vector<uint8_t> mask(size);
+ env->GetByteArrayRegion(jmask, 0, size, reinterpret_cast<jbyte*>(&mask[0]));
+
+ jbyteArray jmode = static_cast<jbyteArray>(
+ env->GetObjectField(settings, env->GetFieldID(clazz, "mMode", "[B")));
+ size = env->GetArrayLength(jmode);
+ std::vector<uint8_t> mode(size);
+ env->GetByteArrayRegion(jmode, 0, size, reinterpret_cast<jbyte*>(&mode[0]));
+
+ DemuxFilterSectionBits filterSectionBits {
+ .filter = filterBytes,
+ .mask = mask,
+ .mode = mode,
+ };
+ return filterSectionBits;
+}
+
+static DemuxFilterSectionSettings::Condition::TableInfo getFilterTableInfo(
+ JNIEnv *env, const jobject& settings) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/filter/SectionSettingsWithTableInfo");
+ uint16_t tableId = static_cast<uint16_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mTableId", "I")));
+ uint16_t version = static_cast<uint16_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mVersion", "I")));
+ DemuxFilterSectionSettings::Condition::TableInfo tableInfo {
+ .tableId = tableId,
+ .version = version,
+ };
+ return tableInfo;
+}
+
+static DemuxFilterSectionSettings getFilterSectionSettings(JNIEnv *env, const jobject& settings) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/filter/SectionSettings");
+ bool isCheckCrc = static_cast<bool>(
+ env->GetBooleanField(settings, env->GetFieldID(clazz, "mCrcEnabled", "Z")));
+ bool isRepeat = static_cast<bool>(
+ env->GetBooleanField(settings, env->GetFieldID(clazz, "mIsRepeat", "Z")));
+ bool isRaw = static_cast<bool>(
+ env->GetBooleanField(settings, env->GetFieldID(clazz, "mIsRaw", "Z")));
+
+ DemuxFilterSectionSettings filterSectionSettings {
+ .isCheckCrc = isCheckCrc,
+ .isRepeat = isRepeat,
+ .isRaw = isRaw,
+ };
+ if (env->IsInstanceOf(
+ settings,
+ env->FindClass("android/media/tv/tuner/filter/SectionSettingsWithSectionBits"))) {
+ filterSectionSettings.condition.sectionBits(getFilterSectionBits(env, settings));
+ } else if (env->IsInstanceOf(
+ settings,
+ env->FindClass("android/media/tv/tuner/filter/SectionSettingsWithTableInfo"))) {
+ filterSectionSettings.condition.tableInfo(getFilterTableInfo(env, settings));
+ }
+ return filterSectionSettings;
+}
+
+static DemuxFilterAvSettings getFilterAvSettings(JNIEnv *env, const jobject& settings) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/filter/AvSettings");
+ bool isPassthrough = static_cast<bool>(
+ env->GetBooleanField(settings, env->GetFieldID(clazz, "mIsPassthrough", "Z")));
+ DemuxFilterAvSettings filterAvSettings {
+ .isPassthrough = isPassthrough,
+ };
+ return filterAvSettings;
+}
+
+static DemuxFilterPesDataSettings getFilterPesDataSettings(JNIEnv *env, const jobject& settings) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/filter/PesSettings");
+ uint16_t streamId = static_cast<uint16_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mStreamId", "I")));
+ bool isRaw = static_cast<bool>(
+ env->GetBooleanField(settings, env->GetFieldID(clazz, "mIsRaw", "Z")));
+ DemuxFilterPesDataSettings filterPesDataSettings {
+ .streamId = streamId,
+ .isRaw = isRaw,
+ };
+ return filterPesDataSettings;
+}
+
+static DemuxFilterRecordSettings getFilterRecordSettings(JNIEnv *env, const jobject& settings) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/filter/RecordSettings");
+ hidl_bitfield<DemuxTsIndex> tsIndexMask = static_cast<hidl_bitfield<DemuxTsIndex>>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mTsIndexMask", "I")));
+ DemuxRecordScIndexType scIndexType = static_cast<DemuxRecordScIndexType>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mScIndexType", "I")));
+ jint scIndexMask = env->GetIntField(settings, env->GetFieldID(clazz, "mScIndexMask", "I"));
+
+ DemuxFilterRecordSettings filterRecordSettings {
+ .tsIndexMask = tsIndexMask,
+ .scIndexType = scIndexType,
+ };
+ if (scIndexType == DemuxRecordScIndexType::SC) {
+ filterRecordSettings.scIndexMask.sc(static_cast<hidl_bitfield<DemuxScIndex>>(scIndexMask));
+ } else if (scIndexType == DemuxRecordScIndexType::SC_HEVC) {
+ filterRecordSettings.scIndexMask.scHevc(
+ static_cast<hidl_bitfield<DemuxScHevcIndex>>(scIndexMask));
+ }
+ return filterRecordSettings;
+}
+
+static DemuxFilterDownloadSettings getFilterDownloadSettings(JNIEnv *env, const jobject& settings) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/filter/DownloadSettings");
+ uint32_t downloadId = static_cast<uint32_t>(
+ env->GetIntField(settings, env->GetFieldID(clazz, "mDownloadId", "I")));
+
+ DemuxFilterDownloadSettings filterDownloadSettings {
+ .downloadId = downloadId,
+ };
+ return filterDownloadSettings;
+}
+
+static DemuxIpAddress getDemuxIpAddress(JNIEnv *env, const jobject& config) {
+ jclass clazz = env->FindClass("android/media/tv/tuner/filter/IpFilterConfiguration");
+
+ jbyteArray jsrcIpAddress = static_cast<jbyteArray>(
+ env->GetObjectField(config, env->GetFieldID(clazz, "mSrcIpAddress", "[B")));
+ jsize srcSize = env->GetArrayLength(jsrcIpAddress);
+ jbyteArray jdstIpAddress = static_cast<jbyteArray>(
+ env->GetObjectField(config, env->GetFieldID(clazz, "mDstIpAddress", "[B")));
+ jsize dstSize = env->GetArrayLength(jdstIpAddress);
+
+ DemuxIpAddress res;
+
+ if (srcSize != dstSize) {
+ // should never happen. Validated on Java size.
+ jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
+ "IP address lengths don't match. srcLength=%d, dstLength=%d", srcSize, dstSize);
+ return res;
+ }
+
+ if (srcSize == IP_V4_LENGTH) {
+ uint8_t srcAddr[IP_V4_LENGTH];
+ uint8_t dstAddr[IP_V4_LENGTH];
+ env->GetByteArrayRegion(
+ jsrcIpAddress, 0, srcSize, reinterpret_cast<jbyte*>(srcAddr));
+ env->GetByteArrayRegion(
+ jdstIpAddress, 0, dstSize, reinterpret_cast<jbyte*>(dstAddr));
+ res.srcIpAddress.v4(srcAddr);
+ res.dstIpAddress.v4(dstAddr);
+ } else if (srcSize == IP_V6_LENGTH) {
+ uint8_t srcAddr[IP_V6_LENGTH];
+ uint8_t dstAddr[IP_V6_LENGTH];
+ env->GetByteArrayRegion(
+ jsrcIpAddress, 0, srcSize, reinterpret_cast<jbyte*>(srcAddr));
+ env->GetByteArrayRegion(
+ jdstIpAddress, 0, dstSize, reinterpret_cast<jbyte*>(dstAddr));
+ res.srcIpAddress.v6(srcAddr);
+ res.dstIpAddress.v6(dstAddr);
+ } else {
+ // should never happen. Validated on Java size.
+ jniThrowExceptionFmt(env, "java/lang/IllegalArgumentException",
+ "Invalid IP address length %d", srcSize);
+ return res;
+ }
+
+ uint16_t srcPort = static_cast<uint16_t>(
+ env->GetIntField(config, env->GetFieldID(clazz, "mSrcPort", "I")));
+ uint16_t dstPort = static_cast<uint16_t>(
+ env->GetIntField(config, env->GetFieldID(clazz, "mDstPort", "I")));
+
+ res.srcPort = srcPort;
+ res.dstPort = dstPort;
+
+ return res;
+}
+
+static DemuxFilterSettings getFilterConfiguration(
+ JNIEnv *env, int type, int subtype, jobject filterConfigObj) {
+ DemuxFilterSettings filterSettings;
+ jobject settingsObj =
+ env->GetObjectField(
+ filterConfigObj,
+ env->GetFieldID(
+ env->FindClass("android/media/tv/tuner/filter/FilterConfiguration"),
+ "mSettings",
+ "Landroid/media/tv/tuner/filter/Settings;"));
+ DemuxFilterMainType mainType = static_cast<DemuxFilterMainType>(type);
+ switch (mainType) {
+ case DemuxFilterMainType::TS: {
+ jclass clazz = env->FindClass("android/media/tv/tuner/filter/TsFilterConfiguration");
+ uint16_t tpid = static_cast<uint16_t>(
+ env->GetIntField(filterConfigObj, env->GetFieldID(clazz, "mTpid", "I")));
+ DemuxTsFilterSettings tsFilterSettings {
+ .tpid = tpid,
+ };
+
+ DemuxTsFilterType tsType = static_cast<DemuxTsFilterType>(subtype);
+ switch (tsType) {
+ case DemuxTsFilterType::SECTION:
+ tsFilterSettings.filterSettings.section(
+ getFilterSectionSettings(env, settingsObj));
+ break;
+ case DemuxTsFilterType::AUDIO:
+ case DemuxTsFilterType::VIDEO:
+ tsFilterSettings.filterSettings.av(getFilterAvSettings(env, settingsObj));
+ break;
+ case DemuxTsFilterType::PES:
+ tsFilterSettings.filterSettings.pesData(
+ getFilterPesDataSettings(env, settingsObj));
+ break;
+ case DemuxTsFilterType::RECORD:
+ tsFilterSettings.filterSettings.record(
+ getFilterRecordSettings(env, settingsObj));
+ break;
+ default:
+ break;
+ }
+ filterSettings.ts(tsFilterSettings);
+ break;
+ }
+ case DemuxFilterMainType::MMTP: {
+ jclass clazz = env->FindClass("android/media/tv/tuner/filter/MmtpFilterConfiguration");
+ uint16_t mmtpPid = static_cast<uint16_t>(
+ env->GetIntField(filterConfigObj, env->GetFieldID(clazz, "mMmtpPid", "I")));
+ DemuxMmtpFilterSettings mmtpFilterSettings {
+ .mmtpPid = mmtpPid,
+ };
+ DemuxMmtpFilterType mmtpType = static_cast<DemuxMmtpFilterType>(subtype);
+ switch (mmtpType) {
+ case DemuxMmtpFilterType::SECTION:
+ mmtpFilterSettings.filterSettings.section(
+ getFilterSectionSettings(env, settingsObj));
+ break;
+ case DemuxMmtpFilterType::AUDIO:
+ case DemuxMmtpFilterType::VIDEO:
+ mmtpFilterSettings.filterSettings.av(getFilterAvSettings(env, settingsObj));
+ break;
+ case DemuxMmtpFilterType::PES:
+ mmtpFilterSettings.filterSettings.pesData(
+ getFilterPesDataSettings(env, settingsObj));
+ break;
+ case DemuxMmtpFilterType::RECORD:
+ mmtpFilterSettings.filterSettings.record(
+ getFilterRecordSettings(env, settingsObj));
+ break;
+ case DemuxMmtpFilterType::DOWNLOAD:
+ mmtpFilterSettings.filterSettings.download(
+ getFilterDownloadSettings(env, settingsObj));
+ break;
+ default:
+ break;
+ }
+ filterSettings.mmtp(mmtpFilterSettings);
+ break;
+ }
+ case DemuxFilterMainType::IP: {
+ DemuxIpAddress ipAddr = getDemuxIpAddress(env, filterConfigObj);
+
+ DemuxIpFilterSettings ipFilterSettings {
+ .ipAddr = ipAddr,
+ };
+ DemuxIpFilterType ipType = static_cast<DemuxIpFilterType>(subtype);
+ switch (ipType) {
+ case DemuxIpFilterType::SECTION: {
+ ipFilterSettings.filterSettings.section(
+ getFilterSectionSettings(env, settingsObj));
+ break;
+ }
+ case DemuxIpFilterType::IP: {
+ jclass clazz = env->FindClass(
+ "android/media/tv/tuner/filter/IpFilterConfiguration");
+ bool bPassthrough = static_cast<bool>(
+ env->GetBooleanField(
+ filterConfigObj, env->GetFieldID(
+ clazz, "mPassthrough", "Z")));
+ ipFilterSettings.filterSettings.bPassthrough(bPassthrough);
+ break;
+ }
+ default: {
+ break;
+ }
+ }
+ filterSettings.ip(ipFilterSettings);
+ break;
+ }
+ case DemuxFilterMainType::TLV: {
+ jclass clazz = env->FindClass("android/media/tv/tuner/filter/TlvFilterConfiguration");
+ uint8_t packetType = static_cast<uint8_t>(
+ env->GetIntField(filterConfigObj, env->GetFieldID(clazz, "mPacketType", "I")));
+ bool isCompressedIpPacket = static_cast<bool>(
+ env->GetBooleanField(
+ filterConfigObj, env->GetFieldID(clazz, "mIsCompressedIpPacket", "Z")));
+
+ DemuxTlvFilterSettings tlvFilterSettings {
+ .packetType = packetType,
+ .isCompressedIpPacket = isCompressedIpPacket,
+ };
+ DemuxTlvFilterType tlvType = static_cast<DemuxTlvFilterType>(subtype);
+ switch (tlvType) {
+ case DemuxTlvFilterType::SECTION: {
+ tlvFilterSettings.filterSettings.section(
+ getFilterSectionSettings(env, settingsObj));
+ break;
+ }
+ case DemuxTlvFilterType::TLV: {
+ bool bPassthrough = static_cast<bool>(
+ env->GetBooleanField(
+ filterConfigObj, env->GetFieldID(
+ clazz, "mPassthrough", "Z")));
+ tlvFilterSettings.filterSettings.bPassthrough(bPassthrough);
+ break;
+ }
+ default: {
+ break;
+ }
+ }
+ filterSettings.tlv(tlvFilterSettings);
+ break;
+ }
+ case DemuxFilterMainType::ALP: {
+ jclass clazz = env->FindClass("android/media/tv/tuner/filter/AlpFilterConfiguration");
+ uint8_t packetType = static_cast<uint8_t>(
+ env->GetIntField(filterConfigObj, env->GetFieldID(clazz, "mPacketType", "I")));
+ DemuxAlpLengthType lengthType = static_cast<DemuxAlpLengthType>(
+ env->GetIntField(filterConfigObj, env->GetFieldID(clazz, "mLengthType", "I")));
+ DemuxAlpFilterSettings alpFilterSettings {
+ .packetType = packetType,
+ .lengthType = lengthType,
+ };
+ DemuxAlpFilterType alpType = static_cast<DemuxAlpFilterType>(subtype);
+ switch (alpType) {
+ case DemuxAlpFilterType::SECTION:
+ alpFilterSettings.filterSettings.section(
+ getFilterSectionSettings(env, settingsObj));
+ break;
+ default:
+ break;
+ }
+ filterSettings.alp(alpFilterSettings);
+ break;
+ }
+ default: {
+ break;
+ }
+ }
+ return filterSettings;
+}
+
+static jint copyData(JNIEnv *env, std::unique_ptr<MQ>& mq, EventFlag* flag, jbyteArray buffer,
+ jlong offset, jlong size) {
+ ALOGD("copyData, size=%ld, offset=%ld", (long) size, (long) offset);
+
+ jlong available = mq->availableToRead();
+ ALOGD("copyData, available=%ld", (long) available);
+ size = std::min(size, available);
+
+ jboolean isCopy;
+ jbyte *dst = env->GetByteArrayElements(buffer, &isCopy);
+ ALOGD("copyData, isCopy=%d", isCopy);
+ if (dst == nullptr) {
+ jniThrowRuntimeException(env, "Failed to GetByteArrayElements");
+ return 0;
+ }
+
+ if (mq->read(reinterpret_cast<unsigned char*>(dst) + offset, size)) {
+ env->ReleaseByteArrayElements(buffer, dst, 0);
+ flag->wake(static_cast<uint32_t>(DemuxQueueNotifyBits::DATA_CONSUMED));
+ } else {
+ jniThrowRuntimeException(env, "Failed to read FMQ");
+ env->ReleaseByteArrayElements(buffer, dst, 0);
+ return 0;
+ }
+ return size;
+}
+
+static jint android_media_tv_Tuner_configure_filter(
+ JNIEnv *env, jobject filter, int type, int subtype, jobject settings) {
+ ALOGD("configure filter type=%d, subtype=%d", type, subtype);
+ sp<Filter> filterSp = getFilter(env, filter);
+ sp<IFilter> iFilterSp = filterSp->getIFilter();
+ if (iFilterSp == NULL) {
+ ALOGD("Failed to configure filter: filter not found");
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ DemuxFilterSettings filterSettings = getFilterConfiguration(env, type, subtype, settings);
+ Result res = iFilterSp->configure(filterSettings);
+
+ if (res != Result::SUCCESS) {
+ return (jint) res;
+ }
+
+ MQDescriptorSync<uint8_t> filterMQDesc;
+ Result getQueueDescResult = Result::UNKNOWN_ERROR;
+ if (filterSp->mFilterMQ == NULL) {
+ iFilterSp->getQueueDesc(
+ [&](Result r, const MQDescriptorSync<uint8_t>& desc) {
+ filterMQDesc = desc;
+ getQueueDescResult = r;
+ ALOGD("getFilterQueueDesc");
+ });
+ if (getQueueDescResult == Result::SUCCESS) {
+ filterSp->mFilterMQ = std::make_unique<MQ>(filterMQDesc, true);
+ EventFlag::createEventFlag(
+ filterSp->mFilterMQ->getEventFlagWord(), &(filterSp->mFilterMQEventFlag));
+ }
+ }
+ return (jint) getQueueDescResult;
+}
+
+static jint android_media_tv_Tuner_get_filter_id(JNIEnv* env, jobject filter) {
+ sp<IFilter> iFilterSp = getFilter(env, filter)->getIFilter();
+ if (iFilterSp == NULL) {
+ ALOGD("Failed to get filter ID: filter not found");
+ return (int) Result::NOT_INITIALIZED;
+ }
+ Result res;
+ uint32_t id;
+ iFilterSp->getId(
+ [&](Result r, uint32_t filterId) {
+ res = r;
+ id = filterId;
+ });
+ if (res != Result::SUCCESS) {
+ return (jint) Constant::INVALID_FILTER_ID;
+ }
+ return (jint) id;
+}
+
+static jint android_media_tv_Tuner_set_filter_data_source(
+ JNIEnv* env, jobject filter, jobject srcFilter) {
+ sp<IFilter> iFilterSp = getFilter(env, filter)->getIFilter();
+ if (iFilterSp == NULL) {
+ ALOGD("Failed to set filter data source: filter not found");
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ Result r;
+ if (srcFilter == NULL) {
+ r = iFilterSp->setDataSource(NULL);
+ } else {
+ sp<IFilter> srcSp = getFilter(env, srcFilter)->getIFilter();
+ if (iFilterSp == NULL) {
+ ALOGD("Failed to set filter data source: src filter not found");
+ return (jint) Result::INVALID_ARGUMENT;
+ }
+ r = iFilterSp->setDataSource(srcSp);
+ }
+ return (jint) r;
+}
+
+static jint android_media_tv_Tuner_start_filter(JNIEnv *env, jobject filter) {
+ sp<IFilter> iFilterSp = getFilter(env, filter)->getIFilter();
+ if (iFilterSp == NULL) {
+ ALOGD("Failed to start filter: filter not found");
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ Result r = iFilterSp->start();
+ return (jint) r;
+}
+
+static jint android_media_tv_Tuner_stop_filter(JNIEnv *env, jobject filter) {
+ sp<IFilter> iFilterSp = getFilter(env, filter)->getIFilter();
+ if (iFilterSp == NULL) {
+ ALOGD("Failed to stop filter: filter not found");
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ Result r = iFilterSp->stop();
+ return (jint) r;
+}
+
+static jint android_media_tv_Tuner_flush_filter(JNIEnv *env, jobject filter) {
+ sp<IFilter> iFilterSp = getFilter(env, filter)->getIFilter();
+ if (iFilterSp == NULL) {
+ ALOGD("Failed to flush filter: filter not found");
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ Result r = iFilterSp->flush();
+ return (jint) r;
+}
+
+static jint android_media_tv_Tuner_read_filter_fmq(
+ JNIEnv *env, jobject filter, jbyteArray buffer, jlong offset, jlong size) {
+ sp<Filter> filterSp = getFilter(env, filter);
+ if (filterSp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to read filter FMQ: filter not found");
+ return 0;
+ }
+ return copyData(env, filterSp->mFilterMQ, filterSp->mFilterMQEventFlag, buffer, offset, size);
+}
+
+static jint android_media_tv_Tuner_close_filter(JNIEnv *env, jobject filter) {
+ sp<IFilter> iFilterSp = getFilter(env, filter)->getIFilter();
+ if (iFilterSp == NULL) {
+ ALOGD("Failed to close filter: filter not found");
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ Result r = iFilterSp->close();
+ return (jint) r;
+}
+
+static sp<TimeFilter> getTimeFilter(JNIEnv *env, jobject filter) {
+ return (TimeFilter *)env->GetLongField(filter, gFields.timeFilterContext);
+}
+
+static int android_media_tv_Tuner_time_filter_set_timestamp(
+ JNIEnv *env, jobject filter, jlong timestamp) {
+ sp<TimeFilter> filterSp = getTimeFilter(env, filter);
+ if (filterSp == NULL) {
+ ALOGD("Failed set timestamp: time filter not found");
+ return (int) Result::INVALID_STATE;
+ }
+ sp<ITimeFilter> iFilterSp = filterSp->getITimeFilter();
+ Result r = iFilterSp->setTimeStamp(static_cast<uint64_t>(timestamp));
+ return (int) r;
+}
+
+static int android_media_tv_Tuner_time_filter_clear_timestamp(JNIEnv *env, jobject filter) {
+ sp<TimeFilter> filterSp = getTimeFilter(env, filter);
+ if (filterSp == NULL) {
+ ALOGD("Failed clear timestamp: time filter not found");
+ return (int) Result::INVALID_STATE;
+ }
+ sp<ITimeFilter> iFilterSp = filterSp->getITimeFilter();
+ Result r = iFilterSp->clearTimeStamp();
+ return (int) r;
+}
+
+static jobject android_media_tv_Tuner_time_filter_get_timestamp(JNIEnv *env, jobject filter) {
+ sp<TimeFilter> filterSp = getTimeFilter(env, filter);
+ if (filterSp == NULL) {
+ ALOGD("Failed get timestamp: time filter not found");
+ return NULL;
+ }
+
+ sp<ITimeFilter> iFilterSp = filterSp->getITimeFilter();
+ Result res;
+ uint64_t timestamp;
+ iFilterSp->getTimeStamp(
+ [&](Result r, uint64_t t) {
+ res = r;
+ timestamp = t;
+ });
+ if (res != Result::SUCCESS) {
+ return NULL;
+ }
+
+ jclass longClazz = env->FindClass("java/lang/Long");
+ jmethodID longInit = env->GetMethodID(longClazz, "<init>", "(J)V");
+
+ jobject longObj = env->NewObject(longClazz, longInit, static_cast<jlong>(timestamp));
+ return longObj;
+}
+
+static jobject android_media_tv_Tuner_time_filter_get_source_time(JNIEnv *env, jobject filter) {
+ sp<TimeFilter> filterSp = getTimeFilter(env, filter);
+ if (filterSp == NULL) {
+ ALOGD("Failed get source time: time filter not found");
+ return NULL;
+ }
+
+ sp<ITimeFilter> iFilterSp = filterSp->getITimeFilter();
+ Result res;
+ uint64_t timestamp;
+ iFilterSp->getSourceTime(
+ [&](Result r, uint64_t t) {
+ res = r;
+ timestamp = t;
+ });
+ if (res != Result::SUCCESS) {
+ return NULL;
+ }
+
+ jclass longClazz = env->FindClass("java/lang/Long");
+ jmethodID longInit = env->GetMethodID(longClazz, "<init>", "(J)V");
+
+ jobject longObj = env->NewObject(longClazz, longInit, static_cast<jlong>(timestamp));
+ return longObj;
+}
+
+static int android_media_tv_Tuner_time_filter_close(JNIEnv *env, jobject filter) {
+ sp<TimeFilter> filterSp = getTimeFilter(env, filter);
+ if (filterSp == NULL) {
+ ALOGD("Failed close time filter: time filter not found");
+ return (int) Result::INVALID_STATE;
+ }
+
+ Result r = filterSp->getITimeFilter()->close();
+ if (r == Result::SUCCESS) {
+ filterSp->decStrong(filter);
+ env->SetLongField(filter, gFields.timeFilterContext, 0);
+ }
+ return (int) r;
+}
+
+static jobject android_media_tv_Tuner_open_descrambler(JNIEnv *env, jobject thiz, jint) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->openDescrambler();
+}
+
+static jint android_media_tv_Tuner_descrambler_add_pid(
+ JNIEnv *env, jobject descrambler, jint pidType, jint pid, jobject filter) {
+ sp<IDescrambler> descramblerSp = getDescrambler(env, descrambler);
+ if (descramblerSp == NULL) {
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ sp<IFilter> iFilterSp = getFilter(env, filter)->getIFilter();
+ Result result = descramblerSp->addPid(getDemuxPid((int)pidType, (int)pid), iFilterSp);
+ return (jint) result;
+}
+
+static jint android_media_tv_Tuner_descrambler_remove_pid(
+ JNIEnv *env, jobject descrambler, jint pidType, jint pid, jobject filter) {
+ sp<IDescrambler> descramblerSp = getDescrambler(env, descrambler);
+ if (descramblerSp == NULL) {
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ sp<IFilter> iFilterSp = getFilter(env, filter)->getIFilter();
+ Result result = descramblerSp->removePid(getDemuxPid((int)pidType, (int)pid), iFilterSp);
+ return (jint) result;
+}
+
+static jint android_media_tv_Tuner_descrambler_set_key_token(
+ JNIEnv* env, jobject descrambler, jbyteArray keyToken) {
+ sp<IDescrambler> descramblerSp = getDescrambler(env, descrambler);
+ if (descramblerSp == NULL) {
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ int size = env->GetArrayLength(keyToken);
+ std::vector<uint8_t> v(size);
+ env->GetByteArrayRegion(keyToken, 0, size, reinterpret_cast<jbyte*>(&v[0]));
+ Result result = descramblerSp->setKeyToken(v);
+ return (jint) result;
+}
+
+static jint android_media_tv_Tuner_close_descrambler(JNIEnv* env, jobject descrambler) {
+ sp<IDescrambler> descramblerSp = getDescrambler(env, descrambler);
+ if (descramblerSp == NULL) {
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ Result r = descramblerSp->close();
+ if (r == Result::SUCCESS) {
+ descramblerSp->decStrong(descrambler);
+ }
+ return (jint) r;
+}
+
+static jobject android_media_tv_Tuner_open_dvr_recorder(
+ JNIEnv* env, jobject thiz, jlong bufferSize) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->openDvr(DvrType::RECORD, bufferSize);
+}
+
+static jobject android_media_tv_Tuner_open_dvr_playback(
+ JNIEnv* env, jobject thiz, jlong bufferSize) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->openDvr(DvrType::PLAYBACK, bufferSize);
+}
+
+static jobject android_media_tv_Tuner_get_demux_caps(JNIEnv* env, jobject thiz) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->getDemuxCaps();
+}
+
+static jint android_media_tv_Tuner_open_demux(JNIEnv* env, jobject thiz, jint /* handle */) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return (jint) tuner->openDemux();
+}
+
+static jint android_media_tv_Tuner_close_tuner(JNIEnv* env, jobject thiz) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return (jint) tuner->close();
+}
+
+static jint android_media_tv_Tuner_close_demux(JNIEnv* env, jobject thiz, jint /* handle */) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->closeDemux();
+}
+
+static jint android_media_tv_Tuner_close_frontend(JNIEnv* env, jobject thiz, jint /* handle */) {
+ sp<JTuner> tuner = getTuner(env, thiz);
+ return tuner->closeFrontend();
+}
+
+static jint android_media_tv_Tuner_attach_filter(JNIEnv *env, jobject dvr, jobject filter) {
+ sp<Dvr> dvrSp = getDvr(env, dvr);
+ if (dvrSp == NULL) {
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ sp<Filter> filterSp = getFilter(env, filter);
+ if (filterSp == NULL) {
+ return (jint) Result::INVALID_ARGUMENT;
+ }
+ sp<IDvr> iDvrSp = dvrSp->getIDvr();
+ sp<IFilter> iFilterSp = filterSp->getIFilter();
+ Result result = iDvrSp->attachFilter(iFilterSp);
+ return (jint) result;
+}
+
+static jint android_media_tv_Tuner_detach_filter(JNIEnv *env, jobject dvr, jobject filter) {
+ sp<Dvr> dvrSp = getDvr(env, dvr);
+ if (dvrSp == NULL) {
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ sp<Filter> filterSp = getFilter(env, filter);
+ if (filterSp == NULL) {
+ return (jint) Result::INVALID_ARGUMENT;
+ }
+ sp<IDvr> iDvrSp = dvrSp->getIDvr();
+ sp<IFilter> iFilterSp = filterSp->getIFilter();
+ Result result = iDvrSp->detachFilter(iFilterSp);
+ return (jint) result;
+}
+
+static jint android_media_tv_Tuner_configure_dvr(JNIEnv *env, jobject dvr, jobject settings) {
+ sp<Dvr> dvrSp = getDvr(env, dvr);
+ if (dvrSp == NULL) {
+ ALOGD("Failed to configure dvr: dvr not found");
+ return (int)Result::NOT_INITIALIZED;
+ }
+ sp<IDvr> iDvrSp = dvrSp->getIDvr();
+ bool isRecorder =
+ env->IsInstanceOf(dvr, env->FindClass("android/media/tv/tuner/dvr/DvrRecorder"));
+ Result result = iDvrSp->configure(getDvrSettings(env, settings, isRecorder));
+ if (result != Result::SUCCESS) {
+ return (jint) result;
+ }
+ MQDescriptorSync<uint8_t> dvrMQDesc;
+ Result getQueueDescResult = Result::UNKNOWN_ERROR;
+ iDvrSp->getQueueDesc(
+ [&](Result r, const MQDescriptorSync<uint8_t>& desc) {
+ dvrMQDesc = desc;
+ getQueueDescResult = r;
+ ALOGD("getDvrQueueDesc");
+ });
+ if (getQueueDescResult == Result::SUCCESS) {
+ dvrSp->mDvrMQ = std::make_unique<MQ>(dvrMQDesc, true);
+ EventFlag::createEventFlag(
+ dvrSp->mDvrMQ->getEventFlagWord(), &(dvrSp->mDvrMQEventFlag));
+ }
+ return (jint) getQueueDescResult;
+}
+
+static jint android_media_tv_Tuner_start_dvr(JNIEnv *env, jobject dvr) {
+ sp<Dvr> dvrSp = getDvr(env, dvr);
+ if (dvrSp == NULL) {
+ ALOGD("Failed to start dvr: dvr not found");
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ sp<IDvr> iDvrSp = dvrSp->getIDvr();
+ Result result = iDvrSp->start();
+ return (jint) result;
+}
+
+static jint android_media_tv_Tuner_stop_dvr(JNIEnv *env, jobject dvr) {
+ sp<Dvr> dvrSp = getDvr(env, dvr);
+ if (dvrSp == NULL) {
+ ALOGD("Failed to stop dvr: dvr not found");
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ sp<IDvr> iDvrSp = dvrSp->getIDvr();
+ Result result = iDvrSp->stop();
+ return (jint) result;
+}
+
+static jint android_media_tv_Tuner_flush_dvr(JNIEnv *env, jobject dvr) {
+ sp<Dvr> dvrSp = getDvr(env, dvr);
+ if (dvrSp == NULL) {
+ ALOGD("Failed to flush dvr: dvr not found");
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ sp<IDvr> iDvrSp = dvrSp->getIDvr();
+ Result result = iDvrSp->flush();
+ return (jint) result;
+}
+
+static jint android_media_tv_Tuner_close_dvr(JNIEnv* env, jobject dvr) {
+ sp<Dvr> dvrSp = getDvr(env, dvr);
+ if (dvrSp == NULL) {
+ ALOGD("Failed to close dvr: dvr not found");
+ return (jint) Result::NOT_INITIALIZED;
+ }
+ return dvrSp->close();
+}
+
+static sp<Lnb> getLnb(JNIEnv *env, jobject lnb) {
+ return (Lnb *)env->GetLongField(lnb, gFields.lnbContext);
+}
+
+static jint android_media_tv_Tuner_lnb_set_voltage(JNIEnv* env, jobject lnb, jint voltage) {
+ sp<ILnb> iLnbSp = getLnb(env, lnb)->getILnb();
+ Result r = iLnbSp->setVoltage(static_cast<LnbVoltage>(voltage));
+ return (jint) r;
+}
+
+static int android_media_tv_Tuner_lnb_set_tone(JNIEnv* env, jobject lnb, jint tone) {
+ sp<ILnb> iLnbSp = getLnb(env, lnb)->getILnb();
+ Result r = iLnbSp->setTone(static_cast<LnbTone>(tone));
+ return (jint) r;
+}
+
+static int android_media_tv_Tuner_lnb_set_position(JNIEnv* env, jobject lnb, jint position) {
+ sp<ILnb> iLnbSp = getLnb(env, lnb)->getILnb();
+ Result r = iLnbSp->setSatellitePosition(static_cast<LnbPosition>(position));
+ return (jint) r;
+}
+
+static int android_media_tv_Tuner_lnb_send_diseqc_msg(JNIEnv* env, jobject lnb, jbyteArray msg) {
+ sp<ILnb> iLnbSp = getLnb(env, lnb)->getILnb();
+ int size = env->GetArrayLength(msg);
+ std::vector<uint8_t> v(size);
+ env->GetByteArrayRegion(msg, 0, size, reinterpret_cast<jbyte*>(&v[0]));
+ Result r = iLnbSp->sendDiseqcMessage(v);
+ return (jint) r;
+}
+
+static int android_media_tv_Tuner_close_lnb(JNIEnv* env, jobject lnb) {
+ sp<Lnb> lnbSp = getLnb(env, lnb);
+ Result r = lnbSp->getILnb()->close();
+ if (r == Result::SUCCESS) {
+ lnbSp->decStrong(lnb);
+ env->SetLongField(lnb, gFields.lnbContext, 0);
+ }
+ return (jint) r;
+}
+
+static void android_media_tv_Tuner_dvr_set_fd(JNIEnv *env, jobject dvr, jint fd) {
+ sp<Dvr> dvrSp = getDvr(env, dvr);
+ if (dvrSp == NULL) {
+ ALOGD("Failed to set FD for dvr: dvr not found");
+ }
+ dvrSp->mFd = (int) fd;
+ ALOGD("set fd = %d", dvrSp->mFd);
+}
+
+static jlong android_media_tv_Tuner_read_dvr(JNIEnv *env, jobject dvr, jlong size) {
+ sp<Dvr> dvrSp = getDvr(env, dvr);
+ if (dvrSp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to read dvr: dvr not found");
+ return 0;
+ }
+
+ long available = dvrSp->mDvrMQ->availableToWrite();
+ long write = std::min((long) size, available);
+
+ MQ::MemTransaction tx;
+ long ret = 0;
+ if (dvrSp->mDvrMQ->beginWrite(write, &tx)) {
+ auto first = tx.getFirstRegion();
+ auto data = first.getAddress();
+ long length = first.getLength();
+ long firstToWrite = std::min(length, write);
+ ret = read(dvrSp->mFd, data, firstToWrite);
+
+ if (ret < 0) {
+ ALOGE("[DVR] Failed to read from FD: %s", strerror(errno));
+ jniThrowRuntimeException(env, strerror(errno));
+ return 0;
+ }
+ if (ret < firstToWrite) {
+ ALOGW("[DVR] file to MQ, first region: %ld bytes to write, but %ld bytes written",
+ firstToWrite, ret);
+ } else if (firstToWrite < write) {
+ ALOGD("[DVR] write second region: %ld bytes written, %ld bytes in total", ret, write);
+ auto second = tx.getSecondRegion();
+ data = second.getAddress();
+ length = second.getLength();
+ int secondToWrite = std::min(length, write - firstToWrite);
+ ret += read(dvrSp->mFd, data, secondToWrite);
+ }
+ ALOGD("[DVR] file to MQ: %ld bytes need to be written, %ld bytes written", write, ret);
+ if (!dvrSp->mDvrMQ->commitWrite(ret)) {
+ ALOGE("[DVR] Error: failed to commit write!");
+ return 0;
+ }
+
+ } else {
+ ALOGE("dvrMq.beginWrite failed");
+ }
+ return (jlong) ret;
+}
+
+static jlong android_media_tv_Tuner_read_dvr_from_array(
+ JNIEnv* env, jobject dvr, jbyteArray buffer, jlong offset, jlong size) {
+ sp<Dvr> dvrSp = getDvr(env, dvr);
+ if (dvrSp == NULL) {
+ ALOGW("Failed to read dvr: dvr not found");
+ return 0;
+ }
+ if (dvrSp->mDvrMQ == NULL) {
+ ALOGW("Failed to read dvr: dvr not configured");
+ return 0;
+ }
+
+ jlong available = dvrSp->mDvrMQ->availableToWrite();
+ size = std::min(size, available);
+
+ jboolean isCopy;
+ jbyte *src = env->GetByteArrayElements(buffer, &isCopy);
+ if (src == nullptr) {
+ ALOGD("Failed to GetByteArrayElements");
+ return 0;
+ }
+
+ if (dvrSp->mDvrMQ->write(reinterpret_cast<unsigned char*>(src) + offset, size)) {
+ env->ReleaseByteArrayElements(buffer, src, 0);
+ dvrSp->mDvrMQEventFlag->wake(static_cast<uint32_t>(DemuxQueueNotifyBits::DATA_CONSUMED));
+ } else {
+ ALOGD("Failed to write FMQ");
+ env->ReleaseByteArrayElements(buffer, src, 0);
+ return 0;
+ }
+ return size;
+}
+
+static jlong android_media_tv_Tuner_write_dvr(JNIEnv *env, jobject dvr, jlong size) {
+ sp<Dvr> dvrSp = getDvr(env, dvr);
+ if (dvrSp == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to write dvr: dvr not found");
+ return 0;
+ }
+
+ if (dvrSp->mDvrMQ == NULL) {
+ jniThrowException(env, "java/lang/IllegalStateException",
+ "Failed to write dvr: dvr not configured");
+ return 0;
+ }
+
+ MQ& dvrMq = dvrSp->getDvrMQ();
+
+ long available = dvrMq.availableToRead();
+ long toRead = std::min((long) size, available);
+
+ long ret = 0;
+ MQ::MemTransaction tx;
+ if (dvrMq.beginRead(toRead, &tx)) {
+ auto first = tx.getFirstRegion();
+ auto data = first.getAddress();
+ long length = first.getLength();
+ long firstToRead = std::min(length, toRead);
+ ret = write(dvrSp->mFd, data, firstToRead);
+
+ if (ret < 0) {
+ ALOGE("[DVR] Failed to write to FD: %s", strerror(errno));
+ jniThrowRuntimeException(env, strerror(errno));
+ return 0;
+ }
+ if (ret < firstToRead) {
+ ALOGW("[DVR] MQ to file: %ld bytes read, but %ld bytes written", firstToRead, ret);
+ } else if (firstToRead < toRead) {
+ ALOGD("[DVR] read second region: %ld bytes read, %ld bytes in total", ret, toRead);
+ auto second = tx.getSecondRegion();
+ data = second.getAddress();
+ length = second.getLength();
+ int secondToRead = toRead - firstToRead;
+ ret += write(dvrSp->mFd, data, secondToRead);
+ }
+ ALOGD("[DVR] MQ to file: %ld bytes to be read, %ld bytes written", toRead, ret);
+ if (!dvrMq.commitRead(ret)) {
+ ALOGE("[DVR] Error: failed to commit read!");
+ return 0;
+ }
+
+ } else {
+ ALOGE("dvrMq.beginRead failed");
+ }
+
+ return (jlong) ret;
+}
+
+static jlong android_media_tv_Tuner_write_dvr_to_array(
+ JNIEnv *env, jobject dvr, jbyteArray buffer, jlong offset, jlong size) {
+ sp<Dvr> dvrSp = getDvr(env, dvr);
+ if (dvrSp == NULL) {
+ ALOGW("Failed to write dvr: dvr not found");
+ return 0;
+ }
+ if (dvrSp->mDvrMQ == NULL) {
+ ALOGW("Failed to write dvr: dvr not configured");
+ return 0;
+ }
+ return copyData(env, dvrSp->mDvrMQ, dvrSp->mDvrMQEventFlag, buffer, offset, size);
+}
+
+static sp<MediaEvent> getMediaEventSp(JNIEnv *env, jobject mediaEventObj) {
+ return (MediaEvent *)env->GetLongField(mediaEventObj, gFields.mediaEventContext);
+}
+
+static jobject android_media_tv_Tuner_media_event_get_linear_block(
+ JNIEnv* env, jobject mediaEventObj) {
+ sp<MediaEvent> mediaEventSp = getMediaEventSp(env, mediaEventObj);
+ if (mediaEventSp == NULL) {
+ ALOGD("Failed get MediaEvent");
+ return NULL;
+ }
+
+ return mediaEventSp->getLinearBlock();
+}
+
+static jobject android_media_tv_Tuner_media_event_get_audio_handle(
+ JNIEnv* env, jobject mediaEventObj) {
+ sp<MediaEvent> mediaEventSp = getMediaEventSp(env, mediaEventObj);
+ if (mediaEventSp == NULL) {
+ ALOGD("Failed get MediaEvent");
+ return NULL;
+ }
+
+ android::Mutex::Autolock autoLock(mediaEventSp->mLock);
+ uint64_t audioHandle = mediaEventSp->getAudioHandle();
+ jclass longClazz = env->FindClass("java/lang/Long");
+ jmethodID longInit = env->GetMethodID(longClazz, "<init>", "(J)V");
+
+ jobject longObj = env->NewObject(longClazz, longInit, static_cast<jlong>(audioHandle));
+ return longObj;
+}
+
+static void android_media_tv_Tuner_media_event_finalize(JNIEnv* env, jobject mediaEventObj) {
+ sp<MediaEvent> mediaEventSp = getMediaEventSp(env, mediaEventObj);
+ if (mediaEventSp == NULL) {
+ ALOGD("Failed get MediaEvent");
+ return;
+ }
+
+ android::Mutex::Autolock autoLock(mediaEventSp->mLock);
+ mediaEventSp->mAvHandleRefCnt--;
+ mediaEventSp->finalize();
+
+ mediaEventSp->decStrong(mediaEventObj);
+}
+
+static const JNINativeMethod gTunerMethods[] = {
+ { "nativeInit", "()V", (void *)android_media_tv_Tuner_native_init },
+ { "nativeSetup", "()V", (void *)android_media_tv_Tuner_native_setup },
+ { "nativeGetFrontendIds", "()Ljava/util/List;",
+ (void *)android_media_tv_Tuner_get_frontend_ids },
+ { "nativeOpenFrontendByHandle", "(I)Landroid/media/tv/tuner/Tuner$Frontend;",
+ (void *)android_media_tv_Tuner_open_frontend_by_handle },
+ { "nativeCloseFrontendByHandle", "(I)I",
+ (void *)android_media_tv_Tuner_close_frontend_by_handle },
+ { "nativeTune", "(ILandroid/media/tv/tuner/frontend/FrontendSettings;)I",
+ (void *)android_media_tv_Tuner_tune },
+ { "nativeStopTune", "()I", (void *)android_media_tv_Tuner_stop_tune },
+ { "nativeScan", "(ILandroid/media/tv/tuner/frontend/FrontendSettings;I)I",
+ (void *)android_media_tv_Tuner_scan },
+ { "nativeStopScan", "()I", (void *)android_media_tv_Tuner_stop_scan },
+ { "nativeSetLnb", "(I)I", (void *)android_media_tv_Tuner_set_lnb },
+ { "nativeSetLna", "(Z)I", (void *)android_media_tv_Tuner_set_lna },
+ { "nativeGetFrontendStatus", "([I)Landroid/media/tv/tuner/frontend/FrontendStatus;",
+ (void *)android_media_tv_Tuner_get_frontend_status },
+ { "nativeGetAvSyncHwId", "(Landroid/media/tv/tuner/filter/Filter;)Ljava/lang/Integer;",
+ (void *)android_media_tv_Tuner_get_av_sync_hw_id },
+ { "nativeGetAvSyncTime", "(I)Ljava/lang/Long;",
+ (void *)android_media_tv_Tuner_get_av_sync_time },
+ { "nativeConnectCiCam", "(I)I", (void *)android_media_tv_Tuner_connect_cicam },
+ { "nativeDisconnectCiCam", "()I", (void *)android_media_tv_Tuner_disconnect_cicam },
+ { "nativeGetFrontendInfo", "(I)Landroid/media/tv/tuner/frontend/FrontendInfo;",
+ (void *)android_media_tv_Tuner_get_frontend_info },
+ { "nativeOpenFilter", "(IIJ)Landroid/media/tv/tuner/filter/Filter;",
+ (void *)android_media_tv_Tuner_open_filter },
+ { "nativeOpenTimeFilter", "()Landroid/media/tv/tuner/filter/TimeFilter;",
+ (void *)android_media_tv_Tuner_open_time_filter },
+ { "nativeGetLnbIds", "()[I", (void *)android_media_tv_Tuner_get_lnb_ids },
+ { "nativeOpenLnbByHandle", "(I)Landroid/media/tv/tuner/Lnb;",
+ (void *)android_media_tv_Tuner_open_lnb_by_handle },
+ { "nativeOpenLnbByName", "(Ljava/lang/String;)Landroid/media/tv/tuner/Lnb;",
+ (void *)android_media_tv_Tuner_open_lnb_by_name },
+ { "nativeOpenDescramblerByHandle", "(I)Landroid/media/tv/tuner/Descrambler;",
+ (void *)android_media_tv_Tuner_open_descrambler },
+ { "nativeOpenDvrRecorder", "(J)Landroid/media/tv/tuner/dvr/DvrRecorder;",
+ (void *)android_media_tv_Tuner_open_dvr_recorder },
+ { "nativeOpenDvrPlayback", "(J)Landroid/media/tv/tuner/dvr/DvrPlayback;",
+ (void *)android_media_tv_Tuner_open_dvr_playback },
+ { "nativeGetDemuxCapabilities", "()Landroid/media/tv/tuner/DemuxCapabilities;",
+ (void *)android_media_tv_Tuner_get_demux_caps },
+ { "nativeOpenDemuxByhandle", "(I)I", (void *)android_media_tv_Tuner_open_demux },
+ { "nativeClose", "()I", (void *)android_media_tv_Tuner_close_tuner },
+ { "nativeCloseFrontend", "(I)I", (void *)android_media_tv_Tuner_close_frontend },
+ { "nativeCloseDemux", "(I)I", (void *)android_media_tv_Tuner_close_demux },
+};
+
+static const JNINativeMethod gFilterMethods[] = {
+ { "nativeConfigureFilter", "(IILandroid/media/tv/tuner/filter/FilterConfiguration;)I",
+ (void *)android_media_tv_Tuner_configure_filter },
+ { "nativeGetId", "()I", (void *)android_media_tv_Tuner_get_filter_id },
+ { "nativeSetDataSource", "(Landroid/media/tv/tuner/filter/Filter;)I",
+ (void *)android_media_tv_Tuner_set_filter_data_source },
+ { "nativeStartFilter", "()I", (void *)android_media_tv_Tuner_start_filter },
+ { "nativeStopFilter", "()I", (void *)android_media_tv_Tuner_stop_filter },
+ { "nativeFlushFilter", "()I", (void *)android_media_tv_Tuner_flush_filter },
+ { "nativeRead", "([BJJ)I", (void *)android_media_tv_Tuner_read_filter_fmq },
+ { "nativeClose", "()I", (void *)android_media_tv_Tuner_close_filter },
+};
+
+static const JNINativeMethod gTimeFilterMethods[] = {
+ { "nativeSetTimestamp", "(J)I", (void *)android_media_tv_Tuner_time_filter_set_timestamp },
+ { "nativeClearTimestamp", "()I", (void *)android_media_tv_Tuner_time_filter_clear_timestamp },
+ { "nativeGetTimestamp", "()Ljava/lang/Long;",
+ (void *)android_media_tv_Tuner_time_filter_get_timestamp },
+ { "nativeGetSourceTime", "()Ljava/lang/Long;",
+ (void *)android_media_tv_Tuner_time_filter_get_source_time },
+ { "nativeClose", "()I", (void *)android_media_tv_Tuner_time_filter_close },
+};
+
+static const JNINativeMethod gDescramblerMethods[] = {
+ { "nativeAddPid", "(IILandroid/media/tv/tuner/filter/Filter;)I",
+ (void *)android_media_tv_Tuner_descrambler_add_pid },
+ { "nativeRemovePid", "(IILandroid/media/tv/tuner/filter/Filter;)I",
+ (void *)android_media_tv_Tuner_descrambler_remove_pid },
+ { "nativeSetKeyToken", "([B)I", (void *)android_media_tv_Tuner_descrambler_set_key_token },
+ { "nativeClose", "()I", (void *)android_media_tv_Tuner_close_descrambler },
+};
+
+static const JNINativeMethod gDvrRecorderMethods[] = {
+ { "nativeAttachFilter", "(Landroid/media/tv/tuner/filter/Filter;)I",
+ (void *)android_media_tv_Tuner_attach_filter },
+ { "nativeDetachFilter", "(Landroid/media/tv/tuner/filter/Filter;)I",
+ (void *)android_media_tv_Tuner_detach_filter },
+ { "nativeConfigureDvr", "(Landroid/media/tv/tuner/dvr/DvrSettings;)I",
+ (void *)android_media_tv_Tuner_configure_dvr },
+ { "nativeStartDvr", "()I", (void *)android_media_tv_Tuner_start_dvr },
+ { "nativeStopDvr", "()I", (void *)android_media_tv_Tuner_stop_dvr },
+ { "nativeFlushDvr", "()I", (void *)android_media_tv_Tuner_flush_dvr },
+ { "nativeClose", "()I", (void *)android_media_tv_Tuner_close_dvr },
+ { "nativeSetFileDescriptor", "(I)V", (void *)android_media_tv_Tuner_dvr_set_fd },
+ { "nativeWrite", "(J)J", (void *)android_media_tv_Tuner_write_dvr },
+ { "nativeWrite", "([BJJ)J", (void *)android_media_tv_Tuner_write_dvr_to_array },
+};
+
+static const JNINativeMethod gDvrPlaybackMethods[] = {
+ { "nativeAttachFilter", "(Landroid/media/tv/tuner/filter/Filter;)I",
+ (void *)android_media_tv_Tuner_attach_filter },
+ { "nativeDetachFilter", "(Landroid/media/tv/tuner/filter/Filter;)I",
+ (void *)android_media_tv_Tuner_detach_filter },
+ { "nativeConfigureDvr", "(Landroid/media/tv/tuner/dvr/DvrSettings;)I",
+ (void *)android_media_tv_Tuner_configure_dvr },
+ { "nativeStartDvr", "()I", (void *)android_media_tv_Tuner_start_dvr },
+ { "nativeStopDvr", "()I", (void *)android_media_tv_Tuner_stop_dvr },
+ { "nativeFlushDvr", "()I", (void *)android_media_tv_Tuner_flush_dvr },
+ { "nativeClose", "()I", (void *)android_media_tv_Tuner_close_dvr },
+ { "nativeSetFileDescriptor", "(I)V", (void *)android_media_tv_Tuner_dvr_set_fd },
+ { "nativeRead", "(J)J", (void *)android_media_tv_Tuner_read_dvr },
+ { "nativeRead", "([BJJ)J", (void *)android_media_tv_Tuner_read_dvr_from_array },
+};
+
+static const JNINativeMethod gLnbMethods[] = {
+ { "nativeSetVoltage", "(I)I", (void *)android_media_tv_Tuner_lnb_set_voltage },
+ { "nativeSetTone", "(I)I", (void *)android_media_tv_Tuner_lnb_set_tone },
+ { "nativeSetSatellitePosition", "(I)I", (void *)android_media_tv_Tuner_lnb_set_position },
+ { "nativeSendDiseqcMessage", "([B)I", (void *)android_media_tv_Tuner_lnb_send_diseqc_msg },
+ { "nativeClose", "()I", (void *)android_media_tv_Tuner_close_lnb },
+};
+
+static const JNINativeMethod gMediaEventMethods[] = {
+ { "nativeGetLinearBlock", "()Landroid/media/MediaCodec$LinearBlock;",
+ (void *)android_media_tv_Tuner_media_event_get_linear_block },
+ { "nativeGetAudioHandle", "()Ljava/lang/Long;",
+ (void *)android_media_tv_Tuner_media_event_get_audio_handle },
+ { "nativeFinalize", "()V",
+ (void *)android_media_tv_Tuner_media_event_finalize },
+};
+
+static bool register_android_media_tv_Tuner(JNIEnv *env) {
+ if (AndroidRuntime::registerNativeMethods(
+ env, "android/media/tv/tuner/Tuner", gTunerMethods, NELEM(gTunerMethods)) != JNI_OK) {
+ ALOGE("Failed to register tuner native methods");
+ return false;
+ }
+ if (AndroidRuntime::registerNativeMethods(
+ env, "android/media/tv/tuner/filter/Filter",
+ gFilterMethods,
+ NELEM(gFilterMethods)) != JNI_OK) {
+ ALOGE("Failed to register filter native methods");
+ return false;
+ }
+ if (AndroidRuntime::registerNativeMethods(
+ env, "android/media/tv/tuner/filter/TimeFilter",
+ gTimeFilterMethods,
+ NELEM(gTimeFilterMethods)) != JNI_OK) {
+ ALOGE("Failed to register time filter native methods");
+ return false;
+ }
+ if (AndroidRuntime::registerNativeMethods(
+ env, "android/media/tv/tuner/Descrambler",
+ gDescramblerMethods,
+ NELEM(gDescramblerMethods)) != JNI_OK) {
+ ALOGE("Failed to register descrambler native methods");
+ return false;
+ }
+ if (AndroidRuntime::registerNativeMethods(
+ env, "android/media/tv/tuner/dvr/DvrRecorder",
+ gDvrRecorderMethods,
+ NELEM(gDvrRecorderMethods)) != JNI_OK) {
+ ALOGE("Failed to register dvr recorder native methods");
+ return false;
+ }
+ if (AndroidRuntime::registerNativeMethods(
+ env, "android/media/tv/tuner/dvr/DvrPlayback",
+ gDvrPlaybackMethods,
+ NELEM(gDvrPlaybackMethods)) != JNI_OK) {
+ ALOGE("Failed to register dvr playback native methods");
+ return false;
+ }
+ if (AndroidRuntime::registerNativeMethods(
+ env, "android/media/tv/tuner/Lnb",
+ gLnbMethods,
+ NELEM(gLnbMethods)) != JNI_OK) {
+ ALOGE("Failed to register lnb native methods");
+ return false;
+ }
+ if (AndroidRuntime::registerNativeMethods(
+ env, "android/media/tv/tuner/filter/MediaEvent",
+ gMediaEventMethods,
+ NELEM(gMediaEventMethods)) != JNI_OK) {
+ ALOGE("Failed to register MediaEvent native methods");
+ return false;
+ }
+ return true;
+}
+
+jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
+{
+ JNIEnv* env = NULL;
+ jint result = -1;
+
+ if (vm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+ ALOGE("ERROR: GetEnv failed\n");
+ return result;
+ }
+ assert(env != NULL);
+
+ if (!register_android_media_tv_Tuner(env)) {
+ ALOGE("ERROR: Tuner native registration failed\n");
+ return result;
+ }
+ return JNI_VERSION_1_4;
+}
diff --git a/media/jni/android_media_tv_Tuner.h b/media/jni/android_media_tv_Tuner.h
new file mode 100644
index 000000000000..83e9db796363
--- /dev/null
+++ b/media/jni/android_media_tv_Tuner.h
@@ -0,0 +1,259 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef _ANDROID_MEDIA_TV_TUNER_H_
+#define _ANDROID_MEDIA_TV_TUNER_H_
+
+#include <android/hardware/tv/tuner/1.0/ITuner.h>
+#include <C2BlockInternal.h>
+#include <C2HandleIonInternal.h>
+#include <C2ParamDef.h>
+#include <fmq/MessageQueue.h>
+#include <fstream>
+#include <string>
+#include <unordered_map>
+#include <utils/Mutex.h>
+#include <utils/RefBase.h>
+
+#include "jni.h"
+
+using ::android::hardware::EventFlag;
+using ::android::hardware::MQDescriptorSync;
+using ::android::hardware::MessageQueue;
+using ::android::hardware::Return;
+using ::android::hardware::hidl_handle;
+using ::android::hardware::hidl_vec;
+using ::android::hardware::kSynchronizedReadWrite;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterEvent;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterStatus;
+using ::android::hardware::tv::tuner::V1_0::DemuxFilterType;
+using ::android::hardware::tv::tuner::V1_0::DemuxPid;
+using ::android::hardware::tv::tuner::V1_0::DvrType;
+using ::android::hardware::tv::tuner::V1_0::FrontendEventType;
+using ::android::hardware::tv::tuner::V1_0::FrontendId;
+using ::android::hardware::tv::tuner::V1_0::FrontendInfo;
+using ::android::hardware::tv::tuner::V1_0::FrontendScanMessage;
+using ::android::hardware::tv::tuner::V1_0::FrontendScanMessageType;
+using ::android::hardware::tv::tuner::V1_0::FrontendScanType;
+using ::android::hardware::tv::tuner::V1_0::FrontendSettings;
+using ::android::hardware::tv::tuner::V1_0::IDemux;
+using ::android::hardware::tv::tuner::V1_0::IDescrambler;
+using ::android::hardware::tv::tuner::V1_0::IDvr;
+using ::android::hardware::tv::tuner::V1_0::IDvrCallback;
+using ::android::hardware::tv::tuner::V1_0::IFilter;
+using ::android::hardware::tv::tuner::V1_0::IFilterCallback;
+using ::android::hardware::tv::tuner::V1_0::IFrontend;
+using ::android::hardware::tv::tuner::V1_0::IFrontendCallback;
+using ::android::hardware::tv::tuner::V1_0::ILnb;
+using ::android::hardware::tv::tuner::V1_0::ILnbCallback;
+using ::android::hardware::tv::tuner::V1_0::ITimeFilter;
+using ::android::hardware::tv::tuner::V1_0::ITuner;
+using ::android::hardware::tv::tuner::V1_0::LnbEventType;
+using ::android::hardware::tv::tuner::V1_0::LnbId;
+using ::android::hardware::tv::tuner::V1_0::PlaybackStatus;
+using ::android::hardware::tv::tuner::V1_0::RecordStatus;
+using ::android::hardware::tv::tuner::V1_0::Result;
+
+using MQ = MessageQueue<uint8_t, kSynchronizedReadWrite>;
+
+namespace android {
+
+struct LnbCallback : public ILnbCallback {
+ LnbCallback(jweak tunerObj, LnbId id);
+ virtual Return<void> onEvent(LnbEventType lnbEventType);
+ virtual Return<void> onDiseqcMessage(const hidl_vec<uint8_t>& diseqcMessage);
+ jweak mLnb;
+ LnbId mId;
+};
+
+struct Lnb : public RefBase {
+ Lnb(sp<ILnb> sp, jobject obj);
+ ~Lnb();
+ sp<ILnb> getILnb();
+ sp<ILnb> mLnbSp;
+ jweak mLnbObj;
+};
+
+struct DvrCallback : public IDvrCallback {
+ ~DvrCallback();
+ virtual Return<void> onRecordStatus(RecordStatus status);
+ virtual Return<void> onPlaybackStatus(PlaybackStatus status);
+
+ void setDvr(const jobject dvr);
+private:
+ jweak mDvr;
+};
+
+struct Dvr : public RefBase {
+ Dvr(sp<IDvr> sp, jweak obj);
+ ~Dvr();
+ jint close();
+ MQ& getDvrMQ();
+ sp<IDvr> getIDvr();
+ sp<IDvr> mDvrSp;
+ jweak mDvrObj;
+ std::unique_ptr<MQ> mDvrMQ;
+ EventFlag* mDvrMQEventFlag;
+ std::string mFilePath;
+ int mFd;
+};
+
+struct MediaEvent : public RefBase {
+ MediaEvent(sp<IFilter> iFilter, hidl_handle avHandle, uint64_t dataId,
+ uint64_t dataLength, jobject obj);
+ ~MediaEvent();
+ jobject getLinearBlock();
+ uint64_t getAudioHandle();
+ void finalize();
+
+ sp<IFilter> mIFilter;
+ native_handle_t* mAvHandle;
+ uint64_t mDataId;
+ uint64_t mDataLength;
+ uint8_t* mBuffer;
+ android::Mutex mLock;
+ int mDataIdRefCnt;
+ int mAvHandleRefCnt;
+ jweak mMediaEventObj;
+ jweak mLinearBlockObj;
+ C2HandleIon* mIonHandle;
+ std::weak_ptr<C2Buffer> mC2Buffer;
+};
+
+struct Filter : public RefBase {
+ Filter(sp<IFilter> sp, jobject obj);
+ ~Filter();
+ int close();
+ sp<IFilter> getIFilter();
+ sp<IFilter> mFilterSp;
+ std::unique_ptr<MQ> mFilterMQ;
+ EventFlag* mFilterMQEventFlag;
+ jweak mFilterObj;
+};
+
+struct FilterCallback : public IFilterCallback {
+ ~FilterCallback();
+ virtual Return<void> onFilterEvent(const DemuxFilterEvent& filterEvent);
+ virtual Return<void> onFilterStatus(const DemuxFilterStatus status);
+
+ void setFilter(const sp<Filter> filter);
+private:
+ jweak mFilter;
+ sp<IFilter> mIFilter;
+ jobjectArray getSectionEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events);
+ jobjectArray getMediaEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events);
+ jobjectArray getPesEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events);
+ jobjectArray getTsRecordEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events);
+ jobjectArray getMmtpRecordEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events);
+ jobjectArray getDownloadEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events);
+ jobjectArray getIpPayloadEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events);
+ jobjectArray getTemiEvent(
+ jobjectArray& arr, const std::vector<DemuxFilterEvent::Event>& events);
+};
+
+struct FrontendCallback : public IFrontendCallback {
+ FrontendCallback(jweak tunerObj, FrontendId id);
+
+ virtual Return<void> onEvent(FrontendEventType frontendEventType);
+ virtual Return<void> onScanMessage(
+ FrontendScanMessageType type, const FrontendScanMessage& message);
+
+ jweak mObject;
+ FrontendId mId;
+};
+
+struct TimeFilter : public RefBase {
+ TimeFilter(sp<ITimeFilter> sp, jweak obj);
+ ~TimeFilter();
+ sp<ITimeFilter> getITimeFilter();
+ sp<ITimeFilter> mTimeFilterSp;
+ jweak mTimeFilterObj;
+};
+
+struct JTuner : public RefBase {
+ JTuner(JNIEnv *env, jobject thiz);
+ sp<ITuner> getTunerService();
+ jobject getAvSyncHwId(sp<Filter> filter);
+ jobject getAvSyncTime(jint id);
+ int connectCiCam(jint id);
+ int disconnectCiCam();
+ jobject getFrontendIds();
+ jobject openFrontendById(int id);
+ jint closeFrontendById(int id);
+ jobject getFrontendInfo(int id);
+ int tune(const FrontendSettings& settings);
+ int stopTune();
+ int scan(const FrontendSettings& settings, FrontendScanType scanType);
+ int stopScan();
+ int setLnb(int id);
+ int setLna(bool enable);
+ jintArray getLnbIds();
+ jobject openLnbById(int id);
+ jobject openLnbByName(jstring name);
+ jobject openFilter(DemuxFilterType type, int bufferSize);
+ jobject openTimeFilter();
+ jobject openDescrambler();
+ jobject openDvr(DvrType type, jlong bufferSize);
+ jobject getDemuxCaps();
+ jobject getFrontendStatus(jintArray types);
+ Result openDemux();
+ jint close();
+ jint closeFrontend();
+ jint closeDemux();
+
+protected:
+ virtual ~JTuner();
+
+private:
+ jclass mClass;
+ jweak mObject;
+ static sp<ITuner> mTuner;
+ hidl_vec<FrontendId> mFeIds;
+ sp<IFrontend> mFe;
+ int mFeId;
+ hidl_vec<LnbId> mLnbIds;
+ sp<ILnb> mLnb;
+ sp<IDemux> mDemux;
+ uint32_t mDemuxId;
+ static jobject getAnalogFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps);
+ static jobject getAtsc3FrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps);
+ static jobject getAtscFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps);
+ static jobject getDvbcFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps);
+ static jobject getDvbsFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps);
+ static jobject getDvbtFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps);
+ static jobject getIsdbs3FrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps);
+ static jobject getIsdbsFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps);
+ static jobject getIsdbtFrontendCaps(JNIEnv *env, FrontendInfo::FrontendCapabilities& caps);
+};
+
+class C2DataIdInfo : public C2Param {
+public:
+ C2DataIdInfo(uint32_t index, uint64_t value);
+private:
+ typedef C2GlobalParam<C2Info, C2Int64Value, 0> DummyInfo;
+ static const size_t kParamSize = sizeof(DummyInfo);
+};
+
+} // namespace android
+
+#endif // _ANDROID_MEDIA_TV_TUNER_H_
diff --git a/media/jni/android_mtp_MtpDatabase.cpp b/media/jni/android_mtp_MtpDatabase.cpp
index 0a3b47b6f901..17189fd08688 100644
--- a/media/jni/android_mtp_MtpDatabase.cpp
+++ b/media/jni/android_mtp_MtpDatabase.cpp
@@ -820,7 +820,10 @@ MtpResponseCode MtpDatabase::getObjectInfo(MtpObjectHandle handle,
switch (info.mFormat) {
case MTP_FORMAT_EXIF_JPEG:
case MTP_FORMAT_HEIF:
- case MTP_FORMAT_JFIF: {
+ case MTP_FORMAT_JFIF:
+ case MTP_FORMAT_PNG:
+ case MTP_FORMAT_BMP:
+ case MTP_FORMAT_GIF: {
env = AndroidRuntime::getJNIEnv();
if (env->CallBooleanMethod(
mDatabase, method_getThumbnailInfo, (jint)handle, mLongBuffer)) {
@@ -881,7 +884,10 @@ void* MtpDatabase::getThumbnail(MtpObjectHandle handle, size_t& outThumbSize) {
switch (format) {
case MTP_FORMAT_EXIF_JPEG:
case MTP_FORMAT_HEIF:
- case MTP_FORMAT_JFIF: {
+ case MTP_FORMAT_JFIF:
+ case MTP_FORMAT_PNG:
+ case MTP_FORMAT_BMP:
+ case MTP_FORMAT_GIF: {
JNIEnv* env = AndroidRuntime::getJNIEnv();
jbyteArray thumbData = (jbyteArray) env->CallObjectMethod(
mDatabase, method_getThumbnailData, (jint)handle);
diff --git a/media/jni/audioeffect/android_media_AudioEffect.cpp b/media/jni/audioeffect/android_media_AudioEffect.cpp
index 501e47febf7f..96961ac21a2d 100644
--- a/media/jni/audioeffect/android_media_AudioEffect.cpp
+++ b/media/jni/audioeffect/android_media_AudioEffect.cpp
@@ -270,7 +270,7 @@ static jint
android_media_AudioEffect_native_setup(JNIEnv *env, jobject thiz, jobject weak_this,
jstring type, jstring uuid, jint priority, jint sessionId,
jint deviceType, jstring deviceAddress,
- jintArray jId, jobjectArray javadesc, jstring opPackageName)
+ jintArray jId, jobjectArray javadesc, jstring opPackageName, jboolean probe)
{
ALOGV("android_media_AudioEffect_native_setup");
AudioEffectJniStorage* lpJniStorage = NULL;
@@ -350,7 +350,8 @@ android_media_AudioEffect_native_setup(JNIEnv *env, jobject thiz, jobject weak_t
&lpJniStorage->mCallbackData,
(audio_session_t) sessionId,
AUDIO_IO_HANDLE_NONE,
- device);
+ device,
+ probe);
lStatus = AudioEffectJni::translateNativeErrorToJava(lpAudioEffect->initCheck());
if (lStatus != AUDIOEFFECT_SUCCESS && lStatus != AUDIOEFFECT_ERROR_ALREADY_EXISTS) {
ALOGE("AudioEffect initCheck failed %d", lStatus);
@@ -387,7 +388,13 @@ android_media_AudioEffect_native_setup(JNIEnv *env, jobject thiz, jobject weak_t
env->SetObjectArrayElement(javadesc, 0, jdesc);
env->DeleteLocalRef(jdesc);
- setAudioEffect(env, thiz, lpAudioEffect);
+ // In probe mode, release the native object and clear our strong reference
+ // to force all method calls from JAVA to be rejected.
+ if (probe) {
+ setAudioEffect(env, thiz, 0);
+ } else {
+ setAudioEffect(env, thiz, lpAudioEffect);
+ }
env->SetLongField(thiz, fields.fidJniData, (jlong)lpJniStorage);
@@ -766,7 +773,7 @@ android_media_AudioEffect_native_queryPreProcessings(JNIEnv *env, jclass clazz _
// Dalvik VM type signatures
static const JNINativeMethod gMethods[] = {
{"native_init", "()V", (void *)android_media_AudioEffect_native_init},
- {"native_setup", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/String;IIILjava/lang/String;[I[Ljava/lang/Object;Ljava/lang/String;)I",
+ {"native_setup", "(Ljava/lang/Object;Ljava/lang/String;Ljava/lang/String;IIILjava/lang/String;[I[Ljava/lang/Object;Ljava/lang/String;Z)I",
(void *)android_media_AudioEffect_native_setup},
{"native_finalize", "()V", (void *)android_media_AudioEffect_native_finalize},
{"native_release", "()V", (void *)android_media_AudioEffect_native_release},
diff --git a/media/jni/soundpool/Android.bp b/media/jni/soundpool/Android.bp
index 7403181a47f4..6141308a8fdb 100644
--- a/media/jni/soundpool/Android.bp
+++ b/media/jni/soundpool/Android.bp
@@ -1,17 +1,110 @@
+tidy_errors = [
+ // https://clang.llvm.org/extra/clang-tidy/checks/list.html
+ // For many categories, the checks are too many to specify individually.
+ // Feel free to disable as needed - as warnings are generally ignored,
+ // we treat warnings as errors.
+ "android-*",
+ "bugprone-*",
+ "cert-*",
+ "clang-analyzer-security*",
+ "google-*",
+ "misc-*",
+ //"modernize-*", // explicitly list the modernize as they can be subjective.
+ "modernize-avoid-bind",
+ //"modernize-avoid-c-arrays", // std::array<> can be verbose
+ "modernize-concat-nested-namespaces",
+ //"modernize-deprecated-headers", // C headers still ok even if there is C++ equivalent.
+ "modernize-deprecated-ios-base-aliases",
+ "modernize-loop-convert",
+ "modernize-make-shared",
+ "modernize-make-unique",
+ "modernize-pass-by-value",
+ "modernize-raw-string-literal",
+ "modernize-redundant-void-arg",
+ "modernize-replace-auto-ptr",
+ "modernize-replace-random-shuffle",
+ "modernize-return-braced-init-list",
+ "modernize-shrink-to-fit",
+ "modernize-unary-static-assert",
+ "modernize-use-auto", // debatable - auto can obscure type
+ "modernize-use-bool-literals",
+ "modernize-use-default-member-init",
+ "modernize-use-emplace",
+ "modernize-use-equals-default",
+ "modernize-use-equals-delete",
+ "modernize-use-nodiscard",
+ "modernize-use-noexcept",
+ "modernize-use-nullptr",
+ "modernize-use-override",
+ //"modernize-use-trailing-return-type", // not necessarily more readable
+ "modernize-use-transparent-functors",
+ "modernize-use-uncaught-exceptions",
+ "modernize-use-using",
+ "performance-*",
+
+ // Remove some pedantic stylistic requirements.
+ "-google-readability-casting", // C++ casts not always necessary and may be verbose
+ "-google-readability-todo", // do not require TODO(info)
+ "-google-build-using-namespace", // Reenable and fix later.
+]
+
+cc_defaults {
+ name: "soundpool_flags_defaults",
+ // https://clang.llvm.org/docs/UsersManual.html#command-line-options
+ // https://clang.llvm.org/docs/DiagnosticsReference.html
+ cflags: [
+ "-Wall",
+ "-Wdeprecated",
+ "-Werror",
+ "-Werror=implicit-fallthrough",
+ "-Werror=sometimes-uninitialized",
+ //"-Werror=conditional-uninitialized",
+ "-Wextra",
+ "-Wredundant-decls",
+ "-Wshadow",
+ "-Wstrict-aliasing",
+ "-fstrict-aliasing",
+ "-Wthread-safety",
+ //"-Wthread-safety-negative", // experimental - looks broken in R.
+ "-Wunreachable-code",
+ "-Wunreachable-code-break",
+ "-Wunreachable-code-return",
+ "-Wunused",
+ "-Wused-but-marked-unused",
+ ],
+ // https://clang.llvm.org/extra/clang-tidy/
+ tidy: true,
+ tidy_checks: tidy_errors,
+ tidy_checks_as_errors: tidy_errors,
+ tidy_flags: [
+ "-format-style='file'",
+ "--header-filter='frameworks/base/media/jni/soundpool'",
+ ],
+}
+
cc_library_shared {
name: "libsoundpool",
+ defaults: [
+ "soundpool_flags_defaults",
+ ],
srcs: [
"android_media_SoundPool.cpp",
+ "Sound.cpp",
+ "SoundDecoder.cpp",
+ "SoundManager.cpp",
"SoundPool.cpp",
- "SoundPoolThread.cpp",
+ "Stream.cpp",
+ "StreamManager.cpp",
],
header_libs: [
"libmedia_headers",
+ "libmediametrics_headers",
],
shared_libs: [
+ "libaudioutils",
"liblog",
"libcutils",
"libutils",
diff --git a/media/jni/soundpool/Sound.cpp b/media/jni/soundpool/Sound.cpp
new file mode 100644
index 000000000000..f8b4bdb1f4d5
--- /dev/null
+++ b/media/jni/soundpool/Sound.cpp
@@ -0,0 +1,241 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoundPool::Sound"
+#include <utils/Log.h>
+
+#include "Sound.h"
+
+#include <media/NdkMediaCodec.h>
+#include <media/NdkMediaExtractor.h>
+#include <media/NdkMediaFormat.h>
+
+namespace android::soundpool {
+
+constexpr uint32_t kMaxSampleRate = 192000;
+constexpr size_t kDefaultHeapSize = 1024 * 1024; // 1MB (compatible with low mem devices)
+
+Sound::Sound(int32_t soundID, int fd, int64_t offset, int64_t length)
+ : mSoundID(soundID)
+ , mFd(fcntl(fd, F_DUPFD_CLOEXEC, (int)0 /* arg */)) // dup(fd) + close on exec to prevent leaks.
+ , mOffset(offset)
+ , mLength(length)
+{
+ ALOGV("%s(soundID=%d, fd=%d, offset=%lld, length=%lld)",
+ __func__, soundID, fd, (long long)offset, (long long)length);
+ ALOGW_IF(mFd == -1, "Unable to dup descriptor %d", fd);
+}
+
+Sound::~Sound()
+{
+ ALOGV("%s(soundID=%d, fd=%d)", __func__, mSoundID, mFd.get());
+}
+
+static status_t decode(int fd, int64_t offset, int64_t length,
+ uint32_t *rate, int32_t *channelCount, audio_format_t *audioFormat,
+ audio_channel_mask_t *channelMask, const sp<MemoryHeapBase>& heap,
+ size_t *sizeInBytes) {
+ ALOGV("%s(fd=%d, offset=%lld, length=%lld, ...)",
+ __func__, fd, (long long)offset, (long long)length);
+ std::unique_ptr<AMediaExtractor, decltype(&AMediaExtractor_delete)> ex{
+ AMediaExtractor_new(), &AMediaExtractor_delete};
+ status_t err = AMediaExtractor_setDataSourceFd(ex.get(), fd, offset, length);
+
+ if (err != AMEDIA_OK) {
+ return err;
+ }
+
+ *audioFormat = AUDIO_FORMAT_PCM_16_BIT; // default format for audio codecs.
+ const size_t numTracks = AMediaExtractor_getTrackCount(ex.get());
+ for (size_t i = 0; i < numTracks; i++) {
+ std::unique_ptr<AMediaFormat, decltype(&AMediaFormat_delete)> format{
+ AMediaExtractor_getTrackFormat(ex.get(), i), &AMediaFormat_delete};
+ const char *mime;
+ if (!AMediaFormat_getString(format.get(), AMEDIAFORMAT_KEY_MIME, &mime)) {
+ return UNKNOWN_ERROR;
+ }
+ if (strncmp(mime, "audio/", 6) == 0) {
+ std::unique_ptr<AMediaCodec, decltype(&AMediaCodec_delete)> codec{
+ AMediaCodec_createDecoderByType(mime), &AMediaCodec_delete};
+ if (codec == nullptr
+ || AMediaCodec_configure(codec.get(), format.get(),
+ nullptr /* window */, nullptr /* drm */, 0 /* flags */) != AMEDIA_OK
+ || AMediaCodec_start(codec.get()) != AMEDIA_OK
+ || AMediaExtractor_selectTrack(ex.get(), i) != AMEDIA_OK) {
+ return UNKNOWN_ERROR;
+ }
+
+ bool sawInputEOS = false;
+ bool sawOutputEOS = false;
+ auto writePos = static_cast<uint8_t*>(heap->getBase());
+ size_t available = heap->getSize();
+ size_t written = 0;
+ format.reset(AMediaCodec_getOutputFormat(codec.get())); // update format.
+
+ while (!sawOutputEOS) {
+ if (!sawInputEOS) {
+ ssize_t bufidx = AMediaCodec_dequeueInputBuffer(codec.get(), 5000);
+ ALOGV("%s: input buffer %zd", __func__, bufidx);
+ if (bufidx >= 0) {
+ size_t bufsize;
+ uint8_t * const buf = AMediaCodec_getInputBuffer(
+ codec.get(), bufidx, &bufsize);
+ if (buf == nullptr) {
+ ALOGE("%s: AMediaCodec_getInputBuffer returned nullptr, short decode",
+ __func__);
+ break;
+ }
+ int sampleSize = AMediaExtractor_readSampleData(ex.get(), buf, bufsize);
+ ALOGV("%s: read %d", __func__, sampleSize);
+ if (sampleSize < 0) {
+ sampleSize = 0;
+ sawInputEOS = true;
+ ALOGV("%s: EOS", __func__);
+ }
+ const int64_t presentationTimeUs = AMediaExtractor_getSampleTime(ex.get());
+
+ const media_status_t mstatus = AMediaCodec_queueInputBuffer(
+ codec.get(), bufidx,
+ 0 /* offset */, sampleSize, presentationTimeUs,
+ sawInputEOS ? AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM : 0);
+ if (mstatus != AMEDIA_OK) {
+ // AMEDIA_ERROR_UNKNOWN == { -ERANGE -EINVAL -EACCES }
+ ALOGE("%s: AMediaCodec_queueInputBuffer returned status %d,"
+ "short decode",
+ __func__, (int)mstatus);
+ break;
+ }
+ (void)AMediaExtractor_advance(ex.get());
+ }
+ }
+
+ AMediaCodecBufferInfo info;
+ const int status = AMediaCodec_dequeueOutputBuffer(codec.get(), &info, 1);
+ ALOGV("%s: dequeueoutput returned: %d", __func__, status);
+ if (status >= 0) {
+ if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
+ ALOGV("%s: output EOS", __func__);
+ sawOutputEOS = true;
+ }
+ ALOGV("%s: got decoded buffer size %d", __func__, info.size);
+
+ const uint8_t * const buf = AMediaCodec_getOutputBuffer(
+ codec.get(), status, nullptr /* out_size */);
+ if (buf == nullptr) {
+ ALOGE("%s: AMediaCodec_getOutputBuffer returned nullptr, short decode",
+ __func__);
+ break;
+ }
+ const size_t dataSize = std::min((size_t)info.size, available);
+ memcpy(writePos, buf + info.offset, dataSize);
+ writePos += dataSize;
+ written += dataSize;
+ available -= dataSize;
+ const media_status_t mstatus = AMediaCodec_releaseOutputBuffer(
+ codec.get(), status, false /* render */);
+ if (mstatus != AMEDIA_OK) {
+ // AMEDIA_ERROR_UNKNOWN == { -ERANGE -EINVAL -EACCES }
+ ALOGE("%s: AMediaCodec_releaseOutputBuffer"
+ " returned status %d, short decode",
+ __func__, (int)mstatus);
+ break;
+ }
+ if (available == 0) {
+ // there might be more data, but there's no space for it
+ sawOutputEOS = true;
+ }
+ } else if (status == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {
+ ALOGV("%s: output buffers changed", __func__);
+ } else if (status == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
+ format.reset(AMediaCodec_getOutputFormat(codec.get())); // update format
+ ALOGV("%s: format changed to: %s",
+ __func__, AMediaFormat_toString(format.get()));
+ } else if (status == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
+ ALOGV("%s: no output buffer right now", __func__);
+ } else if (status <= AMEDIA_ERROR_BASE) {
+ ALOGE("%s: decode error: %d", __func__, status);
+ break;
+ } else {
+ ALOGV("%s: unexpected info code: %d", __func__, status);
+ }
+ }
+
+ (void)AMediaCodec_stop(codec.get());
+ if (!AMediaFormat_getInt32(
+ format.get(), AMEDIAFORMAT_KEY_SAMPLE_RATE, (int32_t*) rate) ||
+ !AMediaFormat_getInt32(
+ format.get(), AMEDIAFORMAT_KEY_CHANNEL_COUNT, channelCount)) {
+ return UNKNOWN_ERROR;
+ }
+ if (!AMediaFormat_getInt32(format.get(), AMEDIAFORMAT_KEY_CHANNEL_MASK,
+ (int32_t*) channelMask)) {
+ *channelMask = AUDIO_CHANNEL_NONE;
+ }
+ *sizeInBytes = written;
+ return OK;
+ }
+ }
+ return UNKNOWN_ERROR;
+}
+
+status_t Sound::doLoad()
+{
+ ALOGV("%s()", __func__);
+ status_t status = NO_INIT;
+ if (mFd.get() != -1) {
+ mHeap = new MemoryHeapBase(kDefaultHeapSize);
+
+ ALOGV("%s: start decode", __func__);
+ uint32_t sampleRate;
+ int32_t channelCount;
+ audio_format_t format;
+ audio_channel_mask_t channelMask;
+ status = decode(mFd.get(), mOffset, mLength, &sampleRate, &channelCount, &format,
+ &channelMask, mHeap, &mSizeInBytes);
+ ALOGV("%s: close(%d)", __func__, mFd.get());
+ mFd.reset(); // close
+
+ if (status != NO_ERROR) {
+ ALOGE("%s: unable to load sound", __func__);
+ } else if (sampleRate > kMaxSampleRate) {
+ ALOGE("%s: sample rate (%u) out of range", __func__, sampleRate);
+ status = BAD_VALUE;
+ } else if (channelCount < 1 || channelCount > FCC_8) {
+ ALOGE("%s: sample channel count (%d) out of range", __func__, channelCount);
+ status = BAD_VALUE;
+ } else {
+ // Correctly loaded, proper parameters
+ ALOGV("%s: pointer = %p, sizeInBytes = %zu, sampleRate = %u, channelCount = %d",
+ __func__, mHeap->getBase(), mSizeInBytes, sampleRate, channelCount);
+ mData = new MemoryBase(mHeap, 0, mSizeInBytes);
+ mSampleRate = sampleRate;
+ mChannelCount = channelCount;
+ mFormat = format;
+ mChannelMask = channelMask;
+ mState = READY; // this should be last, as it is an atomic sync point
+ return NO_ERROR;
+ }
+ } else {
+ ALOGE("%s: uninitialized fd, dup failed", __func__);
+ }
+ // ERROR handling
+ mHeap.clear();
+ mState = DECODE_ERROR; // this should be last, as it is an atomic sync point
+ return status;
+}
+
+} // namespace android::soundpool
diff --git a/media/jni/soundpool/Sound.h b/media/jni/soundpool/Sound.h
new file mode 100644
index 000000000000..efe940a7bd68
--- /dev/null
+++ b/media/jni/soundpool/Sound.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android-base/unique_fd.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <system/audio.h>
+
+namespace android::soundpool {
+
+class SoundDecoder;
+
+/**
+ * Sound is a resource used by SoundPool, referenced by soundID.
+ *
+ * After loading, it is effectively const so no locking required.
+ * However, in order to guarantee that all the values have been
+ * written properly and read properly, we use the mState as an atomic synchronization
+ * point. So if getState() shows READY, then all the other getters may
+ * be safely read.
+ *
+ * Technical details:
+ * We access the mState atomic value through memory_order_seq_cst
+ *
+ * https://en.cppreference.com/w/cpp/atomic/memory_order
+ *
+ * which provides memory barriers. So if the last value written by the SoundDecoder
+ * is mState, then the compiler ensures no other prior writes by SoundDecoder will be
+ * reordered afterwards, and memory barrier is placed (as necessary) to ensure the
+ * cache is visible to other processors.
+ *
+ * Likewise, if the first value read by SoundPool is mState,
+ * the compiler ensures no reads for that thread will be reordered before mState is read,
+ * and a memory barrier is placed (as necessary) to ensure that the cache is properly
+ * updated with other processor's writes before reading.
+ *
+ * See https://developer.android.com/training/articles/smp for discussions about
+ * the variant load-acquire, store-release semantics.
+ */
+class Sound {
+ friend SoundDecoder; // calls doLoad().
+
+public:
+ enum sound_state : int32_t { LOADING, READY, DECODE_ERROR };
+ // A sound starts in the LOADING state and transitions only once
+ // to either READY or DECODE_ERROR when doLoad() is called.
+
+ Sound(int soundID, int fd, int64_t offset, int64_t length);
+ ~Sound();
+
+ int32_t getSoundID() const { return mSoundID; }
+ int32_t getChannelCount() const { return mChannelCount; }
+ uint32_t getSampleRate() const { return mSampleRate; }
+ audio_format_t getFormat() const { return mFormat; }
+ audio_channel_mask_t getChannelMask() const { return mChannelMask; }
+ size_t getSizeInBytes() const { return mSizeInBytes; }
+ sound_state getState() const { return mState; }
+ uint8_t* getData() const { return static_cast<uint8_t*>(mData->unsecurePointer()); }
+ sp<IMemory> getIMemory() const { return mData; }
+
+private:
+ status_t doLoad(); // only SoundDecoder accesses this.
+
+ size_t mSizeInBytes = 0;
+ const int32_t mSoundID;
+ uint32_t mSampleRate = 0;
+ std::atomic<sound_state> mState = LOADING; // used as synchronization point
+ int32_t mChannelCount = 0;
+ audio_format_t mFormat = AUDIO_FORMAT_INVALID;
+ audio_channel_mask_t mChannelMask = AUDIO_CHANNEL_NONE;
+ base::unique_fd mFd; // initialized in constructor, reset to -1 after loading
+ const int64_t mOffset; // int64_t to match java long, see off64_t
+ const int64_t mLength; // int64_t to match java long, see off64_t
+ sp<IMemory> mData;
+ sp<MemoryHeapBase> mHeap;
+};
+
+} // namespace android::soundpool
diff --git a/media/jni/soundpool/SoundDecoder.cpp b/media/jni/soundpool/SoundDecoder.cpp
new file mode 100644
index 000000000000..5ed10b0d785f
--- /dev/null
+++ b/media/jni/soundpool/SoundDecoder.cpp
@@ -0,0 +1,116 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoundPool::SoundDecoder"
+#include "utils/Log.h"
+
+#include "SoundDecoder.h"
+
+namespace android::soundpool {
+
+// Maximum Samples that can be background decoded before we block the caller.
+static constexpr size_t kMaxQueueSize = 128;
+
+// The amount of time we wait for a new Sound decode request
+// before the SoundDecoder thread closes.
+static constexpr int32_t kWaitTimeBeforeCloseMs = 1000;
+
+SoundDecoder::SoundDecoder(SoundManager* soundManager, size_t threads)
+ : mSoundManager(soundManager)
+{
+ ALOGV("%s(%p, %zu)", __func__, soundManager, threads);
+ // ThreadPool is created, but we don't launch any threads.
+ mThreadPool = std::make_unique<ThreadPool>(
+ std::min(threads, (size_t)std::thread::hardware_concurrency()),
+ "SoundDecoder_");
+}
+
+SoundDecoder::~SoundDecoder()
+{
+ ALOGV("%s()", __func__);
+ quit();
+}
+
+void SoundDecoder::quit()
+{
+ ALOGV("%s()", __func__);
+ {
+ std::lock_guard lock(mLock);
+ mQuit = true;
+ mQueueSpaceAvailable.notify_all(); // notify all load waiters
+ mQueueDataAvailable.notify_all(); // notify all worker threads
+ }
+ mThreadPool->quit();
+}
+
+void SoundDecoder::run(int32_t id)
+{
+ ALOGV("%s(%d): entering", __func__, id);
+ std::unique_lock lock(mLock);
+ while (!mQuit) {
+ if (mSoundIDs.size() == 0) {
+ ALOGV("%s(%d): waiting", __func__, id);
+ mQueueDataAvailable.wait_for(
+ lock, std::chrono::duration<int32_t, std::milli>(kWaitTimeBeforeCloseMs));
+ if (mSoundIDs.size() == 0) {
+ break; // no new sound, exit this thread.
+ }
+ continue;
+ }
+ const int32_t soundID = mSoundIDs.front();
+ mSoundIDs.pop_front();
+ mQueueSpaceAvailable.notify_one();
+ ALOGV("%s(%d): processing soundID: %d size: %zu", __func__, id, soundID, mSoundIDs.size());
+ lock.unlock();
+ std::shared_ptr<Sound> sound = mSoundManager->findSound(soundID);
+ status_t status = NO_INIT;
+ if (sound.get() != nullptr) {
+ status = sound->doLoad();
+ }
+ ALOGV("%s(%d): notifying loaded soundID:%d status:%d", __func__, id, soundID, status);
+ mSoundManager->notify(SoundPoolEvent(SoundPoolEvent::SOUND_LOADED, soundID, status));
+ lock.lock();
+ }
+ ALOGV("%s(%d): exiting", __func__, id);
+}
+
+void SoundDecoder::loadSound(int32_t soundID)
+{
+ ALOGV("%s(%d)", __func__, soundID);
+ size_t pendingSounds;
+ {
+ std::unique_lock lock(mLock);
+ while (mSoundIDs.size() == kMaxQueueSize) {
+ if (mQuit) return;
+ ALOGV("%s: waiting soundID: %d size: %zu", __func__, soundID, mSoundIDs.size());
+ mQueueSpaceAvailable.wait(lock);
+ }
+ if (mQuit) return;
+ mSoundIDs.push_back(soundID);
+ mQueueDataAvailable.notify_one();
+ ALOGV("%s: adding soundID: %d size: %zu", __func__, soundID, mSoundIDs.size());
+ pendingSounds = mSoundIDs.size();
+ }
+ // Launch threads as needed. The "as needed" is weakly consistent as we release mLock.
+ if (pendingSounds > mThreadPool->getActiveThreadCount()) {
+ const int32_t id = mThreadPool->launch([this](int32_t id) { run(id); });
+ (void)id; // avoid clang warning -Wunused-variable -Wused-but-marked-unused
+ ALOGV_IF(id != 0, "%s: launched thread %d", __func__, id);
+ }
+}
+
+} // end namespace android::soundpool
diff --git a/media/jni/soundpool/SoundDecoder.h b/media/jni/soundpool/SoundDecoder.h
new file mode 100644
index 000000000000..7b62114483cf
--- /dev/null
+++ b/media/jni/soundpool/SoundDecoder.h
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "SoundPool.h"
+
+#include <deque>
+#include <mutex>
+
+namespace android::soundpool {
+
+/**
+ * SoundDecoder handles background decoding tasks.
+ */
+class SoundDecoder {
+public:
+ SoundDecoder(SoundManager* soundManager, size_t threads);
+ ~SoundDecoder();
+ void loadSound(int32_t soundID) NO_THREAD_SAFETY_ANALYSIS; // uses unique_lock
+ void quit();
+
+private:
+ // The decode thread function.
+ void run(int32_t id) NO_THREAD_SAFETY_ANALYSIS; // uses unique_lock
+
+ SoundManager* const mSoundManager; // set in constructor, has own lock
+ std::unique_ptr<ThreadPool> mThreadPool; // set in constructor, has own lock
+
+ std::mutex mLock;
+ std::condition_variable mQueueSpaceAvailable GUARDED_BY(mLock);
+ std::condition_variable mQueueDataAvailable GUARDED_BY(mLock);
+
+ std::deque<int32_t> mSoundIDs GUARDED_BY(mLock);
+ bool mQuit GUARDED_BY(mLock) = false;
+};
+
+} // end namespace android::soundpool
+
diff --git a/media/jni/soundpool/SoundManager.cpp b/media/jni/soundpool/SoundManager.cpp
new file mode 100644
index 000000000000..5b16174eef2b
--- /dev/null
+++ b/media/jni/soundpool/SoundManager.cpp
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoundPool::SoundManager"
+#include <utils/Log.h>
+
+#include "SoundManager.h"
+
+#include <thread>
+
+#include "SoundDecoder.h"
+
+namespace android::soundpool {
+
+static const size_t kDecoderThreads = std::thread::hardware_concurrency() >= 4 ? 2 : 1;
+
+SoundManager::SoundManager()
+ : mDecoder{std::make_unique<SoundDecoder>(this, kDecoderThreads)}
+{
+ ALOGV("%s()", __func__);
+}
+
+SoundManager::~SoundManager()
+{
+ ALOGV("%s()", __func__);
+ mDecoder->quit();
+
+ std::lock_guard lock(mSoundManagerLock);
+ mSounds.clear();
+}
+
+int32_t SoundManager::load(int fd, int64_t offset, int64_t length, int32_t priority)
+{
+ ALOGV("%s(fd=%d, offset=%lld, length=%lld, priority=%d)",
+ __func__, fd, (long long)offset, (long long)length, priority);
+ int32_t soundID;
+ {
+ std::lock_guard lock(mSoundManagerLock);
+ // mNextSoundID is always positive and does not "integer overflow"
+ do {
+ mNextSoundID = mNextSoundID == INT32_MAX ? 1 : mNextSoundID + 1;
+ } while (findSound_l(mNextSoundID) != nullptr);
+ soundID = mNextSoundID;
+ auto sound = std::make_shared<Sound>(soundID, fd, offset, length);
+ mSounds.emplace(soundID, sound);
+ }
+ // mDecoder->loadSound() must be called outside of mSoundManagerLock.
+ // mDecoder->loadSound() may block on mDecoder message queue space;
+ // the message queue emptying may block on SoundManager::findSound().
+ //
+ // It is theoretically possible that sound loads might decode out-of-order.
+ mDecoder->loadSound(soundID);
+ return soundID;
+}
+
+bool SoundManager::unload(int32_t soundID)
+{
+ ALOGV("%s(soundID=%d)", __func__, soundID);
+ std::lock_guard lock(mSoundManagerLock);
+ return mSounds.erase(soundID) > 0; // erase() returns number of sounds removed.
+}
+
+std::shared_ptr<Sound> SoundManager::findSound(int32_t soundID) const
+{
+ std::lock_guard lock(mSoundManagerLock);
+ return findSound_l(soundID);
+}
+
+std::shared_ptr<Sound> SoundManager::findSound_l(int32_t soundID) const
+{
+ auto it = mSounds.find(soundID);
+ return it != mSounds.end() ? it->second : nullptr;
+}
+
+void SoundManager::setCallback(SoundPool *soundPool, SoundPoolCallback* callback, void* user)
+{
+ mCallbackHandler.setCallback(soundPool, callback, user);
+}
+
+void SoundManager::notify(SoundPoolEvent event)
+{
+ mCallbackHandler.notify(event);
+}
+
+void* SoundManager::getUserData() const
+{
+ return mCallbackHandler.getUserData();
+}
+
+} // namespace android::soundpool
diff --git a/media/jni/soundpool/SoundManager.h b/media/jni/soundpool/SoundManager.h
new file mode 100644
index 000000000000..4a4e3b87be26
--- /dev/null
+++ b/media/jni/soundpool/SoundManager.h
@@ -0,0 +1,113 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "Sound.h"
+
+#include <mutex>
+#include <unordered_map>
+
+#include <android-base/thread_annotations.h>
+
+namespace android {
+
+class SoundPool;
+
+// for queued events
+class SoundPoolEvent {
+public:
+ explicit SoundPoolEvent(int msg, int arg1 = 0, int arg2 = 0) :
+ mMsg(msg), mArg1(arg1), mArg2(arg2) {}
+ const int mMsg; // MessageType
+ const int mArg1; // soundID
+ const int mArg2; // status
+ enum MessageType { INVALID, SOUND_LOADED };
+};
+
+// callback function prototype
+typedef void SoundPoolCallback(SoundPoolEvent event, SoundPool* soundPool, void* user);
+
+} // namespace android
+
+namespace android::soundpool {
+
+// This class manages Sounds for the SoundPool.
+class SoundManager {
+public:
+ SoundManager();
+ ~SoundManager();
+
+ // Matches corresponding SoundPool API functions
+ int32_t load(int fd, int64_t offset, int64_t length, int32_t priority);
+ bool unload(int32_t soundID);
+ void setCallback(SoundPool* soundPool, SoundPoolCallback* callback, void* user);
+ void* getUserData() const;
+
+ // SoundPool and SoundDecoder access
+ std::shared_ptr<Sound> findSound(int32_t soundID) const;
+
+ // from the SoundDecoder
+ void notify(SoundPoolEvent event);
+
+private:
+
+ // CallbackHandler is used to manage notifications back to the app when a sound
+ // has been loaded. It uses a recursive lock to allow setting the callback
+ // during the callback.
+ class CallbackHandler {
+ public:
+ void setCallback(SoundPool *soundPool, SoundPoolCallback* callback, void* userData)
+ {
+ std::lock_guard<std::recursive_mutex> lock(mCallbackLock);
+ mSoundPool = soundPool;
+ mCallback = callback;
+ mUserData = userData;
+ }
+ void notify(SoundPoolEvent event) const
+ {
+ std::lock_guard<std::recursive_mutex> lock(mCallbackLock);
+ if (mCallback != nullptr) {
+ mCallback(event, mSoundPool, mUserData);
+ // Note: mCallback may call setCallback().
+ // so mCallback, mUserData may have changed.
+ }
+ }
+ void* getUserData() const
+ {
+ std::lock_guard<std::recursive_mutex> lock(mCallbackLock);
+ return mUserData;
+ }
+ private:
+ mutable std::recursive_mutex mCallbackLock; // allow mCallback to setCallback().
+ // No thread-safety checks in R for recursive_mutex.
+ SoundPool* mSoundPool = nullptr; // GUARDED_BY(mCallbackLock)
+ SoundPoolCallback* mCallback = nullptr; // GUARDED_BY(mCallbackLock)
+ void* mUserData = nullptr; // GUARDED_BY(mCallbackLock)
+ };
+
+ std::shared_ptr<Sound> findSound_l(int32_t soundID) const REQUIRES(mSoundManagerLock);
+
+ // The following variables are initialized in constructor and can be accessed anytime.
+ CallbackHandler mCallbackHandler; // has its own lock
+ const std::unique_ptr<SoundDecoder> mDecoder; // has its own lock
+
+ mutable std::mutex mSoundManagerLock;
+ std::unordered_map<int, std::shared_ptr<Sound>> mSounds GUARDED_BY(mSoundManagerLock);
+ int32_t mNextSoundID GUARDED_BY(mSoundManagerLock) = 0;
+};
+
+} // namespace android::soundpool
diff --git a/media/jni/soundpool/SoundPool.cpp b/media/jni/soundpool/SoundPool.cpp
index 102bbf0e5931..ac44843859f6 100644
--- a/media/jni/soundpool/SoundPool.cpp
+++ b/media/jni/soundpool/SoundPool.cpp
@@ -16,1124 +16,230 @@
//#define LOG_NDEBUG 0
#define LOG_TAG "SoundPool"
-
-#include <chrono>
-#include <inttypes.h>
-#include <thread>
#include <utils/Log.h>
-#define USE_SHARED_MEM_BUFFER
+#include <algorithm>
+#include <thread>
-#include <media/AudioTrack.h>
#include "SoundPool.h"
-#include "SoundPoolThread.h"
-#include <media/NdkMediaCodec.h>
-#include <media/NdkMediaExtractor.h>
-#include <media/NdkMediaFormat.h>
namespace android
{
-int kDefaultBufferCount = 4;
-uint32_t kMaxSampleRate = 48000;
-uint32_t kDefaultSampleRate = 44100;
-uint32_t kDefaultFrameCount = 1200;
-size_t kDefaultHeapSize = 1024 * 1024; // 1MB
-
-
-SoundPool::SoundPool(int maxChannels, const audio_attributes_t* pAttributes)
-{
- ALOGV("SoundPool constructor: maxChannels=%d, attr.usage=%d, attr.flags=0x%x, attr.tags=%s",
- maxChannels, pAttributes->usage, pAttributes->flags, pAttributes->tags);
-
- // check limits
- mMaxChannels = maxChannels;
- if (mMaxChannels < 1) {
- mMaxChannels = 1;
- }
- else if (mMaxChannels > 32) {
- mMaxChannels = 32;
- }
- ALOGW_IF(maxChannels != mMaxChannels, "App requested %d channels", maxChannels);
-
- mQuit = false;
- mMuted = false;
- mDecodeThread = 0;
- memcpy(&mAttributes, pAttributes, sizeof(audio_attributes_t));
- mAllocated = 0;
- mNextSampleID = 0;
- mNextChannelID = 0;
-
- mCallback = 0;
- mUserData = 0;
-
- mChannelPool = new SoundChannel[mMaxChannels];
- for (int i = 0; i < mMaxChannels; ++i) {
- mChannelPool[i].init(this);
- mChannels.push_back(&mChannelPool[i]);
- }
-
- // start decode thread
- startThreads();
-}
-
-SoundPool::~SoundPool()
-{
- ALOGV("SoundPool destructor");
- mDecodeThread->quit();
- quit();
-
- Mutex::Autolock lock(&mLock);
+// kManagerThreads = 1 historically.
+// Not really necessary to have more than one, but it does speed things up by about
+// 25% having 2 threads instead of 1 when playing many sounds. Having many threads
+// could starve other AudioFlinger clients with SoundPool activity. It may also cause
+// issues with app loading, e.g. Camera.
+static const size_t kStreamManagerThreads = std::thread::hardware_concurrency() >= 4 ? 2 : 1;
- mChannels.clear();
- if (mChannelPool)
- delete [] mChannelPool;
- // clean up samples
- ALOGV("clear samples");
- mSamples.clear();
+// kUseApiLock = true prior to R.
+// Set to true to prevent multiple users access internal to the SoundPool API.
+// Set to false to make the SoundPool methods weakly consistent. When set to false,
+// only AutoPause and AutoResume are locked, which are the only two methods that
+// require API level locking for consistency.
+static constexpr bool kUseApiLock = false;
- if (mDecodeThread)
- delete mDecodeThread;
-}
+namespace {
+// Check input arguments to SoundPool - return "true" to reject request.
-void SoundPool::addToRestartList(SoundChannel* channel)
+bool checkVolume(float *leftVolume, float *rightVolume)
{
- Mutex::Autolock lock(&mRestartLock);
- if (!mQuit) {
- mRestart.push_back(channel);
- mCondition.signal();
+ if (*leftVolume != std::clamp(*leftVolume, 0.f, 1.f) ||
+ *rightVolume != std::clamp(*rightVolume, 0.f, 1.f)) {
+ ALOGI("volume l=%f r=%f out of (0.f, 1.f) bounds, using 1.f", *leftVolume, *rightVolume);
+ // for backward compatibility use 1.f.
+ *leftVolume = *rightVolume = 1.f;
}
+ return false;
}
-void SoundPool::addToStopList(SoundChannel* channel)
+bool checkRate(float *rate)
{
- Mutex::Autolock lock(&mRestartLock);
- if (!mQuit) {
- mStop.push_back(channel);
- mCondition.signal();
+ if (*rate != std::clamp(*rate, 0.125f, 8.f)) {
+ ALOGI("rate %f out of (0.125f, 8.f) bounds, clamping", *rate);
+ // for backward compatibility just clamp
+ *rate = std::clamp(*rate, 0.125f, 8.f);
}
+ return false;
}
-int SoundPool::beginThread(void* arg)
-{
- SoundPool* p = (SoundPool*)arg;
- return p->run();
-}
-
-int SoundPool::run()
+bool checkPriority(int32_t *priority)
{
- mRestartLock.lock();
- while (!mQuit) {
- mCondition.wait(mRestartLock);
- ALOGV("awake");
- if (mQuit) break;
-
- while (!mStop.empty()) {
- SoundChannel* channel;
- ALOGV("Getting channel from stop list");
- List<SoundChannel* >::iterator iter = mStop.begin();
- channel = *iter;
- mStop.erase(iter);
- mRestartLock.unlock();
- if (channel != 0) {
- Mutex::Autolock lock(&mLock);
- channel->stop();
- }
- mRestartLock.lock();
- if (mQuit) break;
- }
-
- while (!mRestart.empty()) {
- SoundChannel* channel;
- ALOGV("Getting channel from list");
- List<SoundChannel*>::iterator iter = mRestart.begin();
- channel = *iter;
- mRestart.erase(iter);
- mRestartLock.unlock();
- if (channel != 0) {
- Mutex::Autolock lock(&mLock);
- channel->nextEvent();
- }
- mRestartLock.lock();
- if (mQuit) break;
- }
+ if (*priority < 0) {
+ ALOGI("negative priority %d, should be >= 0.", *priority);
+ // for backward compatibility, ignore.
}
-
- mStop.clear();
- mRestart.clear();
- mCondition.signal();
- mRestartLock.unlock();
- ALOGV("goodbye");
- return 0;
-}
-
-void SoundPool::quit()
-{
- mRestartLock.lock();
- mQuit = true;
- mCondition.signal();
- mCondition.wait(mRestartLock);
- ALOGV("return from quit");
- mRestartLock.unlock();
-}
-
-bool SoundPool::startThreads()
-{
- createThreadEtc(beginThread, this, "SoundPool");
- if (mDecodeThread == NULL)
- mDecodeThread = new SoundPoolThread(this);
- return mDecodeThread != NULL;
+ return false;
}
-sp<Sample> SoundPool::findSample(int sampleID)
+bool checkLoop(int32_t *loop)
{
- Mutex::Autolock lock(&mLock);
- return findSample_l(sampleID);
+ if (*loop < -1) {
+ ALOGI("loop %d, should be >= -1", *loop);
+ *loop = -1;
+ }
+ return false;
}
-sp<Sample> SoundPool::findSample_l(int sampleID)
-{
- return mSamples.valueFor(sampleID);
-}
+} // namespace
-SoundChannel* SoundPool::findChannel(int channelID)
+SoundPool::SoundPool(int32_t maxStreams, const audio_attributes_t* attributes)
+ : mStreamManager(maxStreams, kStreamManagerThreads, attributes)
{
- for (int i = 0; i < mMaxChannels; ++i) {
- if (mChannelPool[i].channelID() == channelID) {
- return &mChannelPool[i];
- }
- }
- return NULL;
+ ALOGV("%s(maxStreams=%d, attr={ content_type=%d, usage=%d, flags=0x%x, tags=%s })",
+ __func__, maxStreams,
+ attributes->content_type, attributes->usage, attributes->flags, attributes->tags);
}
-SoundChannel* SoundPool::findNextChannel(int channelID)
+SoundPool::~SoundPool()
{
- for (int i = 0; i < mMaxChannels; ++i) {
- if (mChannelPool[i].nextChannelID() == channelID) {
- return &mChannelPool[i];
- }
- }
- return NULL;
+ ALOGV("%s()", __func__);
}
-int SoundPool::load(int fd, int64_t offset, int64_t length, int priority __unused)
+int32_t SoundPool::load(int fd, int64_t offset, int64_t length, int32_t priority)
{
- ALOGV("load: fd=%d, offset=%" PRId64 ", length=%" PRId64 ", priority=%d",
- fd, offset, length, priority);
- int sampleID;
- {
- Mutex::Autolock lock(&mLock);
- sampleID = ++mNextSampleID;
- sp<Sample> sample = new Sample(sampleID, fd, offset, length);
- mSamples.add(sampleID, sample);
- sample->startLoad();
- }
- // mDecodeThread->loadSample() must be called outside of mLock.
- // mDecodeThread->loadSample() may block on mDecodeThread message queue space;
- // the message queue emptying may block on SoundPool::findSample().
- //
- // It theoretically possible that sample loads might decode out-of-order.
- mDecodeThread->loadSample(sampleID);
- return sampleID;
+ ALOGV("%s(fd=%d, offset=%lld, length=%lld, priority=%d)",
+ __func__, fd, (long long)offset, (long long)length, priority);
+ auto apiLock = kUseApiLock ? std::make_unique<std::lock_guard<std::mutex>>(mApiLock) : nullptr;
+ return mSoundManager.load(fd, offset, length, priority);
}
-bool SoundPool::unload(int sampleID)
+bool SoundPool::unload(int32_t soundID)
{
- ALOGV("unload: sampleID=%d", sampleID);
- Mutex::Autolock lock(&mLock);
- return mSamples.removeItem(sampleID) >= 0; // removeItem() returns index or BAD_VALUE
+ ALOGV("%s(%d)", __func__, soundID);
+ auto apiLock = kUseApiLock ? std::make_unique<std::lock_guard<std::mutex>>(mApiLock) : nullptr;
+ return mSoundManager.unload(soundID);
}
-int SoundPool::play(int sampleID, float leftVolume, float rightVolume,
- int priority, int loop, float rate)
+int32_t SoundPool::play(int32_t soundID, float leftVolume, float rightVolume,
+ int32_t priority, int32_t loop, float rate)
{
- ALOGV("play sampleID=%d, leftVolume=%f, rightVolume=%f, priority=%d, loop=%d, rate=%f",
- sampleID, leftVolume, rightVolume, priority, loop, rate);
- SoundChannel* channel;
- int channelID;
-
- Mutex::Autolock lock(&mLock);
-
- if (mQuit) {
- return 0;
- }
- // is sample ready?
- sp<Sample> sample(findSample_l(sampleID));
- if ((sample == 0) || (sample->state() != Sample::READY)) {
- ALOGW(" sample %d not READY", sampleID);
- return 0;
- }
+ ALOGV("%s(soundID=%d, leftVolume=%f, rightVolume=%f, priority=%d, loop=%d, rate=%f)",
+ __func__, soundID, leftVolume, rightVolume, priority, loop, rate);
- dump();
+ // New for R: check arguments to ensure track can be created.
+ // If SoundPool defers the creation of the AudioTrack to the StreamManager thread,
+ // the failure to create may not be visible to the caller, so this precheck is needed.
+ if (checkVolume(&leftVolume, &rightVolume)
+ || checkPriority(&priority)
+ || checkLoop(&loop)
+ || checkRate(&rate)) return 0;
- // allocate a channel
- channel = allocateChannel_l(priority, sampleID);
-
- // no channel allocated - return 0
- if (!channel) {
- ALOGV("No channel allocated");
+ auto apiLock = kUseApiLock ? std::make_unique<std::lock_guard<std::mutex>>(mApiLock) : nullptr;
+ const std::shared_ptr<soundpool::Sound> sound = mSoundManager.findSound(soundID);
+ if (sound == nullptr || sound->getState() != soundpool::Sound::READY) {
+ ALOGW("%s soundID %d not READY", __func__, soundID);
return 0;
}
- channelID = ++mNextChannelID;
-
- ALOGV("play channel %p state = %d", channel, channel->state());
- channel->play(sample, channelID, leftVolume, rightVolume, priority, loop, rate);
- return channelID;
-}
-
-SoundChannel* SoundPool::allocateChannel_l(int priority, int sampleID)
-{
- List<SoundChannel*>::iterator iter;
- SoundChannel* channel = NULL;
-
- // check if channel for given sampleID still available
- if (!mChannels.empty()) {
- for (iter = mChannels.begin(); iter != mChannels.end(); ++iter) {
- if (sampleID == (*iter)->getPrevSampleID() && (*iter)->state() == SoundChannel::IDLE) {
- channel = *iter;
- mChannels.erase(iter);
- ALOGV("Allocated recycled channel for same sampleID");
- break;
- }
- }
- }
-
- // allocate any channel
- if (!channel && !mChannels.empty()) {
- iter = mChannels.begin();
- if (priority >= (*iter)->priority()) {
- channel = *iter;
- mChannels.erase(iter);
- ALOGV("Allocated active channel");
- }
- }
-
- // update priority and put it back in the list
- if (channel) {
- channel->setPriority(priority);
- for (iter = mChannels.begin(); iter != mChannels.end(); ++iter) {
- if (priority < (*iter)->priority()) {
- break;
- }
- }
- mChannels.insert(iter, channel);
- }
- return channel;
-}
-
-// move a channel from its current position to the front of the list
-void SoundPool::moveToFront_l(SoundChannel* channel)
-{
- for (List<SoundChannel*>::iterator iter = mChannels.begin(); iter != mChannels.end(); ++iter) {
- if (*iter == channel) {
- mChannels.erase(iter);
- mChannels.push_front(channel);
- break;
- }
- }
-}
-
-void SoundPool::pause(int channelID)
-{
- ALOGV("pause(%d)", channelID);
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->pause();
- }
+ const int32_t streamID = mStreamManager.queueForPlay(
+ sound, soundID, leftVolume, rightVolume, priority, loop, rate);
+ ALOGV("%s returned %d", __func__, streamID);
+ return streamID;
}
void SoundPool::autoPause()
{
- ALOGV("autoPause()");
- Mutex::Autolock lock(&mLock);
- for (int i = 0; i < mMaxChannels; ++i) {
- SoundChannel* channel = &mChannelPool[i];
- channel->autoPause();
- }
+ ALOGV("%s()", __func__);
+ auto apiLock = std::make_unique<std::lock_guard<std::mutex>>(mApiLock);
+ mStreamManager.forEach([](soundpool::Stream *stream) { stream->autoPause(); });
}
-void SoundPool::resume(int channelID)
+void SoundPool::autoResume()
{
- ALOGV("resume(%d)", channelID);
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->resume();
- }
+ ALOGV("%s()", __func__);
+ auto apiLock = std::make_unique<std::lock_guard<std::mutex>>(mApiLock);
+ mStreamManager.forEach([](soundpool::Stream *stream) { stream->autoResume(); });
}
void SoundPool::mute(bool muting)
{
- ALOGV("mute(%d)", muting);
- Mutex::Autolock lock(&mLock);
- mMuted = muting;
- if (!mChannels.empty()) {
- for (List<SoundChannel*>::iterator iter = mChannels.begin();
- iter != mChannels.end(); ++iter) {
- (*iter)->mute(muting);
- }
- }
+ ALOGV("%s(%d)", __func__, muting);
+ auto apiLock = std::make_unique<std::lock_guard<std::mutex>>(mApiLock);
+ mStreamManager.forEach([=](soundpool::Stream *stream) { stream->mute(muting); });
}
-void SoundPool::autoResume()
+void SoundPool::pause(int32_t streamID)
{
- ALOGV("autoResume()");
- Mutex::Autolock lock(&mLock);
- for (int i = 0; i < mMaxChannels; ++i) {
- SoundChannel* channel = &mChannelPool[i];
- channel->autoResume();
+ ALOGV("%s(%d)", __func__, streamID);
+ auto apiLock = kUseApiLock ? std::make_unique<std::lock_guard<std::mutex>>(mApiLock) : nullptr;
+ if (soundpool::Stream* stream = mStreamManager.findStream(streamID)) {
+ stream->pause(streamID);
}
}
-void SoundPool::stop(int channelID)
+void SoundPool::resume(int32_t streamID)
{
- ALOGV("stop(%d)", channelID);
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->stop();
- } else {
- channel = findNextChannel(channelID);
- if (channel)
- channel->clearNextEvent();
+ ALOGV("%s(%d)", __func__, streamID);
+ auto apiLock = kUseApiLock ? std::make_unique<std::lock_guard<std::mutex>>(mApiLock) : nullptr;
+ if (soundpool::Stream* stream = mStreamManager.findStream(streamID)) {
+ stream->resume(streamID);
}
}
-void SoundPool::setVolume(int channelID, float leftVolume, float rightVolume)
+void SoundPool::stop(int32_t streamID)
{
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->setVolume(leftVolume, rightVolume);
+ ALOGV("%s(%d)", __func__, streamID);
+ auto apiLock = kUseApiLock ? std::make_unique<std::lock_guard<std::mutex>>(mApiLock) : nullptr;
+ soundpool::Stream* stream = mStreamManager.findStream(streamID);
+ if (stream != nullptr && stream->requestStop(streamID)) {
+ mStreamManager.moveToRestartQueue(stream);
}
}
-void SoundPool::setPriority(int channelID, int priority)
+void SoundPool::setVolume(int32_t streamID, float leftVolume, float rightVolume)
{
- ALOGV("setPriority(%d, %d)", channelID, priority);
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->setPriority(priority);
+ ALOGV("%s(%d, %f %f)", __func__, streamID, leftVolume, rightVolume);
+ if (checkVolume(&leftVolume, &rightVolume)) return;
+ auto apiLock = kUseApiLock ? std::make_unique<std::lock_guard<std::mutex>>(mApiLock) : nullptr;
+ if (soundpool::Stream* stream = mStreamManager.findStream(streamID)) {
+ stream->setVolume(streamID, leftVolume, rightVolume);
}
}
-void SoundPool::setLoop(int channelID, int loop)
+void SoundPool::setPriority(int32_t streamID, int32_t priority)
{
- ALOGV("setLoop(%d, %d)", channelID, loop);
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->setLoop(loop);
+ ALOGV("%s(%d, %d)", __func__, streamID, priority);
+ if (checkPriority(&priority)) return;
+ auto apiLock = kUseApiLock ? std::make_unique<std::lock_guard<std::mutex>>(mApiLock) : nullptr;
+ if (soundpool::Stream* stream = mStreamManager.findStream(streamID)) {
+ stream->setPriority(streamID, priority);
}
}
-void SoundPool::setRate(int channelID, float rate)
+void SoundPool::setLoop(int32_t streamID, int32_t loop)
{
- ALOGV("setRate(%d, %f)", channelID, rate);
- Mutex::Autolock lock(&mLock);
- SoundChannel* channel = findChannel(channelID);
- if (channel) {
- channel->setRate(rate);
+ ALOGV("%s(%d, %d)", __func__, streamID, loop);
+ if (checkLoop(&loop)) return;
+ auto apiLock = kUseApiLock ? std::make_unique<std::lock_guard<std::mutex>>(mApiLock) : nullptr;
+ if (soundpool::Stream* stream = mStreamManager.findStream(streamID)) {
+ stream->setLoop(streamID, loop);
}
}
-// call with lock held
-void SoundPool::done_l(SoundChannel* channel)
+void SoundPool::setRate(int32_t streamID, float rate)
{
- ALOGV("done_l(%d)", channel->channelID());
- // if "stolen", play next event
- if (channel->nextChannelID() != 0) {
- ALOGV("add to restart list");
- addToRestartList(channel);
- }
-
- // return to idle state
- else {
- ALOGV("move to front");
- moveToFront_l(channel);
+ ALOGV("%s(%d, %f)", __func__, streamID, rate);
+ if (checkRate(&rate)) return;
+ auto apiLock = kUseApiLock ? std::make_unique<std::lock_guard<std::mutex>>(mApiLock) : nullptr;
+ if (soundpool::Stream* stream = mStreamManager.findStream(streamID)) {
+ stream->setRate(streamID, rate);
}
}
void SoundPool::setCallback(SoundPoolCallback* callback, void* user)
{
- Mutex::Autolock lock(&mCallbackLock);
- mCallback = callback;
- mUserData = user;
-}
-
-void SoundPool::notify(SoundPoolEvent event)
-{
- Mutex::Autolock lock(&mCallbackLock);
- if (mCallback != NULL) {
- mCallback(event, this, mUserData);
- }
-}
-
-void SoundPool::dump()
-{
- for (int i = 0; i < mMaxChannels; ++i) {
- mChannelPool[i].dump();
- }
-}
-
-
-Sample::Sample(int sampleID, int fd, int64_t offset, int64_t length)
-{
- init();
- mSampleID = sampleID;
- mFd = dup(fd);
- mOffset = offset;
- mLength = length;
- ALOGV("create sampleID=%d, fd=%d, offset=%" PRId64 " length=%" PRId64,
- mSampleID, mFd, mLength, mOffset);
-}
-
-void Sample::init()
-{
- mSize = 0;
- mRefCount = 0;
- mSampleID = 0;
- mState = UNLOADED;
- mFd = -1;
- mOffset = 0;
- mLength = 0;
-}
-
-Sample::~Sample()
-{
- ALOGV("Sample::destructor sampleID=%d, fd=%d", mSampleID, mFd);
- if (mFd > 0) {
- ALOGV("close(%d)", mFd);
- ::close(mFd);
- }
-}
-
-static status_t decode(int fd, int64_t offset, int64_t length,
- uint32_t *rate, int *numChannels, audio_format_t *audioFormat,
- audio_channel_mask_t *channelMask, sp<MemoryHeapBase> heap,
- size_t *memsize) {
-
- ALOGV("fd %d, offset %" PRId64 ", size %" PRId64, fd, offset, length);
- AMediaExtractor *ex = AMediaExtractor_new();
- status_t err = AMediaExtractor_setDataSourceFd(ex, fd, offset, length);
-
- if (err != AMEDIA_OK) {
- AMediaExtractor_delete(ex);
- return err;
- }
-
- *audioFormat = AUDIO_FORMAT_PCM_16_BIT;
-
- size_t numTracks = AMediaExtractor_getTrackCount(ex);
- for (size_t i = 0; i < numTracks; i++) {
- AMediaFormat *format = AMediaExtractor_getTrackFormat(ex, i);
- const char *mime;
- if (!AMediaFormat_getString(format, AMEDIAFORMAT_KEY_MIME, &mime)) {
- AMediaExtractor_delete(ex);
- AMediaFormat_delete(format);
- return UNKNOWN_ERROR;
- }
- if (strncmp(mime, "audio/", 6) == 0) {
-
- AMediaCodec *codec = AMediaCodec_createDecoderByType(mime);
- if (codec == NULL
- || AMediaCodec_configure(codec, format,
- NULL /* window */, NULL /* drm */, 0 /* flags */) != AMEDIA_OK
- || AMediaCodec_start(codec) != AMEDIA_OK
- || AMediaExtractor_selectTrack(ex, i) != AMEDIA_OK) {
- AMediaExtractor_delete(ex);
- AMediaCodec_delete(codec);
- AMediaFormat_delete(format);
- return UNKNOWN_ERROR;
- }
-
- bool sawInputEOS = false;
- bool sawOutputEOS = false;
- uint8_t* writePos = static_cast<uint8_t*>(heap->getBase());
- size_t available = heap->getSize();
- size_t written = 0;
-
- AMediaFormat_delete(format);
- format = AMediaCodec_getOutputFormat(codec);
-
- while (!sawOutputEOS) {
- if (!sawInputEOS) {
- ssize_t bufidx = AMediaCodec_dequeueInputBuffer(codec, 5000);
- ALOGV("input buffer %zd", bufidx);
- if (bufidx >= 0) {
- size_t bufsize;
- uint8_t *buf = AMediaCodec_getInputBuffer(codec, bufidx, &bufsize);
- if (buf == nullptr) {
- ALOGE("AMediaCodec_getInputBuffer returned nullptr, short decode");
- break;
- }
- int sampleSize = AMediaExtractor_readSampleData(ex, buf, bufsize);
- ALOGV("read %d", sampleSize);
- if (sampleSize < 0) {
- sampleSize = 0;
- sawInputEOS = true;
- ALOGV("EOS");
- }
- int64_t presentationTimeUs = AMediaExtractor_getSampleTime(ex);
-
- media_status_t mstatus = AMediaCodec_queueInputBuffer(codec, bufidx,
- 0 /* offset */, sampleSize, presentationTimeUs,
- sawInputEOS ? AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM : 0);
- if (mstatus != AMEDIA_OK) {
- // AMEDIA_ERROR_UNKNOWN == { -ERANGE -EINVAL -EACCES }
- ALOGE("AMediaCodec_queueInputBuffer returned status %d, short decode",
- (int)mstatus);
- break;
- }
- (void)AMediaExtractor_advance(ex);
- }
- }
-
- AMediaCodecBufferInfo info;
- int status = AMediaCodec_dequeueOutputBuffer(codec, &info, 1);
- ALOGV("dequeueoutput returned: %d", status);
- if (status >= 0) {
- if (info.flags & AMEDIACODEC_BUFFER_FLAG_END_OF_STREAM) {
- ALOGV("output EOS");
- sawOutputEOS = true;
- }
- ALOGV("got decoded buffer size %d", info.size);
-
- uint8_t *buf = AMediaCodec_getOutputBuffer(codec, status, NULL /* out_size */);
- if (buf == nullptr) {
- ALOGE("AMediaCodec_getOutputBuffer returned nullptr, short decode");
- break;
- }
- size_t dataSize = info.size;
- if (dataSize > available) {
- dataSize = available;
- }
- memcpy(writePos, buf + info.offset, dataSize);
- writePos += dataSize;
- written += dataSize;
- available -= dataSize;
- media_status_t mstatus = AMediaCodec_releaseOutputBuffer(
- codec, status, false /* render */);
- if (mstatus != AMEDIA_OK) {
- // AMEDIA_ERROR_UNKNOWN == { -ERANGE -EINVAL -EACCES }
- ALOGE("AMediaCodec_releaseOutputBuffer returned status %d, short decode",
- (int)mstatus);
- break;
- }
- if (available == 0) {
- // there might be more data, but there's no space for it
- sawOutputEOS = true;
- }
- } else if (status == AMEDIACODEC_INFO_OUTPUT_BUFFERS_CHANGED) {
- ALOGV("output buffers changed");
- } else if (status == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
- AMediaFormat_delete(format);
- format = AMediaCodec_getOutputFormat(codec);
- ALOGV("format changed to: %s", AMediaFormat_toString(format));
- } else if (status == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
- ALOGV("no output buffer right now");
- } else if (status <= AMEDIA_ERROR_BASE) {
- ALOGE("decode error: %d", status);
- break;
- } else {
- ALOGV("unexpected info code: %d", status);
- }
- }
-
- (void)AMediaCodec_stop(codec);
- (void)AMediaCodec_delete(codec);
- (void)AMediaExtractor_delete(ex);
- if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_SAMPLE_RATE, (int32_t*) rate) ||
- !AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_COUNT, numChannels)) {
- (void)AMediaFormat_delete(format);
- return UNKNOWN_ERROR;
- }
- if (!AMediaFormat_getInt32(format, AMEDIAFORMAT_KEY_CHANNEL_MASK,
- (int32_t*) channelMask)) {
- *channelMask = AUDIO_CHANNEL_NONE;
- }
- (void)AMediaFormat_delete(format);
- *memsize = written;
- return OK;
- }
- (void)AMediaFormat_delete(format);
- }
- (void)AMediaExtractor_delete(ex);
- return UNKNOWN_ERROR;
-}
-
-status_t Sample::doLoad()
-{
- uint32_t sampleRate;
- int numChannels;
- audio_format_t format;
- audio_channel_mask_t channelMask;
- status_t status;
- mHeap = new MemoryHeapBase(kDefaultHeapSize);
-
- ALOGV("Start decode");
- status = decode(mFd, mOffset, mLength, &sampleRate, &numChannels, &format,
- &channelMask, mHeap, &mSize);
- ALOGV("close(%d)", mFd);
- ::close(mFd);
- mFd = -1;
- if (status != NO_ERROR) {
- ALOGE("Unable to load sample");
- goto error;
- }
- ALOGV("pointer = %p, size = %zu, sampleRate = %u, numChannels = %d",
- mHeap->getBase(), mSize, sampleRate, numChannels);
-
- if (sampleRate > kMaxSampleRate) {
- ALOGE("Sample rate (%u) out of range", sampleRate);
- status = BAD_VALUE;
- goto error;
- }
-
- if ((numChannels < 1) || (numChannels > FCC_8)) {
- ALOGE("Sample channel count (%d) out of range", numChannels);
- status = BAD_VALUE;
- goto error;
- }
-
- mData = new MemoryBase(mHeap, 0, mSize);
- mSampleRate = sampleRate;
- mNumChannels = numChannels;
- mFormat = format;
- mChannelMask = channelMask;
- mState = READY;
- return NO_ERROR;
-
-error:
- mHeap.clear();
- return status;
-}
-
-
-void SoundChannel::init(SoundPool* soundPool)
-{
- mSoundPool = soundPool;
- mPrevSampleID = -1;
-}
-
-// call with sound pool lock held
-void SoundChannel::play(const sp<Sample>& sample, int nextChannelID, float leftVolume,
- float rightVolume, int priority, int loop, float rate)
-{
- sp<AudioTrack> oldTrack;
- sp<AudioTrack> newTrack;
- status_t status = NO_ERROR;
-
- { // scope for the lock
- Mutex::Autolock lock(&mLock);
-
- ALOGV("SoundChannel::play %p: sampleID=%d, channelID=%d, leftVolume=%f, rightVolume=%f,"
- " priority=%d, loop=%d, rate=%f",
- this, sample->sampleID(), nextChannelID, leftVolume, rightVolume,
- priority, loop, rate);
-
- // if not idle, this voice is being stolen
- if (mState != IDLE) {
- ALOGV("channel %d stolen - event queued for channel %d", channelID(), nextChannelID);
- mNextEvent.set(sample, nextChannelID, leftVolume, rightVolume, priority, loop, rate);
- stop_l();
- return;
- }
-
- // initialize track
- size_t afFrameCount;
- uint32_t afSampleRate;
- audio_stream_type_t streamType =
- AudioSystem::attributesToStreamType(*mSoundPool->attributes());
- if (AudioSystem::getOutputFrameCount(&afFrameCount, streamType) != NO_ERROR) {
- afFrameCount = kDefaultFrameCount;
- }
- if (AudioSystem::getOutputSamplingRate(&afSampleRate, streamType) != NO_ERROR) {
- afSampleRate = kDefaultSampleRate;
- }
- int numChannels = sample->numChannels();
- uint32_t sampleRate = uint32_t(float(sample->sampleRate()) * rate + 0.5);
- size_t frameCount = 0;
-
- if (loop) {
- const audio_format_t format = sample->format();
- const size_t frameSize = audio_is_linear_pcm(format)
- ? numChannels * audio_bytes_per_sample(format) : 1;
- frameCount = sample->size() / frameSize;
- }
-
-#ifndef USE_SHARED_MEM_BUFFER
- uint32_t totalFrames = (kDefaultBufferCount * afFrameCount * sampleRate) / afSampleRate;
- // Ensure minimum audio buffer size in case of short looped sample
- if(frameCount < totalFrames) {
- frameCount = totalFrames;
- }
-#endif
-
- // check if the existing track has the same sample id.
- if (mAudioTrack != 0 && mPrevSampleID == sample->sampleID()) {
- // the sample rate may fail to change if the audio track is a fast track.
- if (mAudioTrack->setSampleRate(sampleRate) == NO_ERROR) {
- newTrack = mAudioTrack;
- ALOGV("reusing track %p for sample %d", mAudioTrack.get(), sample->sampleID());
- }
- }
- if (newTrack == 0) {
- // mToggle toggles each time a track is started on a given channel.
- // The toggle is concatenated with the SoundChannel address and passed to AudioTrack
- // as callback user data. This enables the detection of callbacks received from the old
- // audio track while the new one is being started and avoids processing them with
- // wrong audio audio buffer size (mAudioBufferSize)
- unsigned long toggle = mToggle ^ 1;
- void *userData = (void *)((unsigned long)this | toggle);
- audio_channel_mask_t sampleChannelMask = sample->channelMask();
- // When sample contains a not none channel mask, use it as is.
- // Otherwise, use channel count to calculate channel mask.
- audio_channel_mask_t channelMask = sampleChannelMask != AUDIO_CHANNEL_NONE
- ? sampleChannelMask : audio_channel_out_mask_from_count(numChannels);
-
- // do not create a new audio track if current track is compatible with sample parameters
- #ifdef USE_SHARED_MEM_BUFFER
- newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
- channelMask, sample->getIMemory(), AUDIO_OUTPUT_FLAG_FAST, callback, userData,
- 0 /*default notification frames*/, AUDIO_SESSION_ALLOCATE,
- AudioTrack::TRANSFER_DEFAULT,
- NULL /*offloadInfo*/, -1 /*uid*/, -1 /*pid*/, mSoundPool->attributes());
- #else
- uint32_t bufferFrames = (totalFrames + (kDefaultBufferCount - 1)) / kDefaultBufferCount;
- newTrack = new AudioTrack(streamType, sampleRate, sample->format(),
- channelMask, frameCount, AUDIO_OUTPUT_FLAG_FAST, callback, userData,
- bufferFrames, AUDIO_SESSION_ALLOCATE, AudioTrack::TRANSFER_DEFAULT,
- NULL /*offloadInfo*/, -1 /*uid*/, -1 /*pid*/, mSoundPool->attributes());
- #endif
- oldTrack = mAudioTrack;
- status = newTrack->initCheck();
- if (status != NO_ERROR) {
- ALOGE("Error creating AudioTrack");
- // newTrack goes out of scope, so reference count drops to zero
- goto exit;
- }
- // From now on, AudioTrack callbacks received with previous toggle value will be ignored.
- mToggle = toggle;
- mAudioTrack = newTrack;
- ALOGV("using new track %p for sample %d", newTrack.get(), sample->sampleID());
- }
- if (mMuted) {
- newTrack->setVolume(0.0f, 0.0f);
- } else {
- newTrack->setVolume(leftVolume, rightVolume);
- }
- newTrack->setLoop(0, frameCount, loop);
- mPos = 0;
- mSample = sample;
- mChannelID = nextChannelID;
- mPriority = priority;
- mLoop = loop;
- mLeftVolume = leftVolume;
- mRightVolume = rightVolume;
- mNumChannels = numChannels;
- mRate = rate;
- clearNextEvent();
- mState = PLAYING;
- mAudioTrack->start();
- mAudioBufferSize = newTrack->frameCount()*newTrack->frameSize();
- }
-
-exit:
- ALOGV("delete oldTrack %p", oldTrack.get());
- if (status != NO_ERROR) {
- mAudioTrack.clear();
- }
-}
-
-void SoundChannel::nextEvent()
-{
- sp<Sample> sample;
- int nextChannelID;
- float leftVolume;
- float rightVolume;
- int priority;
- int loop;
- float rate;
-
- // check for valid event
- {
- Mutex::Autolock lock(&mLock);
- nextChannelID = mNextEvent.channelID();
- if (nextChannelID == 0) {
- ALOGV("stolen channel has no event");
- return;
- }
-
- sample = mNextEvent.sample();
- leftVolume = mNextEvent.leftVolume();
- rightVolume = mNextEvent.rightVolume();
- priority = mNextEvent.priority();
- loop = mNextEvent.loop();
- rate = mNextEvent.rate();
- }
-
- ALOGV("Starting stolen channel %d -> %d", channelID(), nextChannelID);
- play(sample, nextChannelID, leftVolume, rightVolume, priority, loop, rate);
-}
-
-void SoundChannel::callback(int event, void* user, void *info)
-{
- SoundChannel* channel = static_cast<SoundChannel*>((void *)((unsigned long)user & ~1));
-
- channel->process(event, info, (unsigned long)user & 1);
-}
-
-void SoundChannel::process(int event, void *info, unsigned long toggle)
-{
- //ALOGV("process(%d)", mChannelID);
-
- Mutex::Autolock lock(&mLock);
-
- AudioTrack::Buffer* b = NULL;
- if (event == AudioTrack::EVENT_MORE_DATA) {
- b = static_cast<AudioTrack::Buffer *>(info);
- }
-
- if (mToggle != toggle) {
- ALOGV("process wrong toggle %p channel %d", this, mChannelID);
- if (b != NULL) {
- b->size = 0;
- }
- return;
- }
-
- sp<Sample> sample = mSample;
-
-// ALOGV("SoundChannel::process event %d", event);
-
- if (event == AudioTrack::EVENT_MORE_DATA) {
-
- // check for stop state
- if (b->size == 0) return;
-
- if (mState == IDLE) {
- b->size = 0;
- return;
- }
-
- if (sample != 0) {
- // fill buffer
- uint8_t* q = (uint8_t*) b->i8;
- size_t count = 0;
-
- if (mPos < (int)sample->size()) {
- uint8_t* p = sample->data() + mPos;
- count = sample->size() - mPos;
- if (count > b->size) {
- count = b->size;
- }
- memcpy(q, p, count);
-// ALOGV("fill: q=%p, p=%p, mPos=%u, b->size=%u, count=%d", q, p, mPos, b->size,
-// count);
- } else if (mPos < mAudioBufferSize) {
- count = mAudioBufferSize - mPos;
- if (count > b->size) {
- count = b->size;
- }
- memset(q, 0, count);
-// ALOGV("fill extra: q=%p, mPos=%u, b->size=%u, count=%d", q, mPos, b->size, count);
- }
-
- mPos += count;
- b->size = count;
- //ALOGV("buffer=%p, [0]=%d", b->i16, b->i16[0]);
- }
- } else if (event == AudioTrack::EVENT_UNDERRUN || event == AudioTrack::EVENT_BUFFER_END) {
- ALOGV("process %p channel %d event %s",
- this, mChannelID, (event == AudioTrack::EVENT_UNDERRUN) ? "UNDERRUN" :
- "BUFFER_END");
- // Only BUFFER_END should happen as we use static tracks.
- setVolume_l(0.f, 0.f); // set volume to 0 to indicate no need to ramp volume down.
- mSoundPool->addToStopList(this);
- } else if (event == AudioTrack::EVENT_LOOP_END) {
- ALOGV("End loop %p channel %d", this, mChannelID);
- } else if (event == AudioTrack::EVENT_NEW_IAUDIOTRACK) {
- ALOGV("process %p channel %d NEW_IAUDIOTRACK", this, mChannelID);
- } else {
- ALOGW("SoundChannel::process unexpected event %d", event);
- }
-}
-
-
-// call with lock held
-bool SoundChannel::doStop_l()
-{
- if (mState != IDLE) {
- ALOGV("stop");
- if (mLeftVolume != 0.f || mRightVolume != 0.f) {
- setVolume_l(0.f, 0.f);
- if (mSoundPool->attributes()->usage != AUDIO_USAGE_GAME) {
- // Since we're forcibly halting the previously playing content,
- // we sleep here to ensure the volume is ramped down before we stop the track.
- // Ideally the sleep time is the mixer period, or an approximation thereof
- // (Fast vs Normal tracks are different).
- ALOGV("sleeping: ChannelID:%d SampleID:%d", mChannelID, mSample->sampleID());
- std::this_thread::sleep_for(std::chrono::milliseconds(20));
- }
- }
- mAudioTrack->stop();
- mPrevSampleID = mSample->sampleID();
- mSample.clear();
- mState = IDLE;
- mPriority = IDLE_PRIORITY;
- return true;
- }
- return false;
-}
-
-// call with lock held and sound pool lock held
-void SoundChannel::stop_l()
-{
- if (doStop_l()) {
- mSoundPool->done_l(this);
- }
-}
-
-// call with sound pool lock held
-void SoundChannel::stop()
-{
- bool stopped;
- {
- Mutex::Autolock lock(&mLock);
- stopped = doStop_l();
- }
-
- if (stopped) {
- mSoundPool->done_l(this);
- }
-}
-
-//FIXME: Pause is a little broken right now
-void SoundChannel::pause()
-{
- Mutex::Autolock lock(&mLock);
- if (mState == PLAYING) {
- ALOGV("pause track");
- mState = PAUSED;
- mAudioTrack->pause();
- }
-}
-
-void SoundChannel::autoPause()
-{
- Mutex::Autolock lock(&mLock);
- if (mState == PLAYING) {
- ALOGV("pause track");
- mState = PAUSED;
- mAutoPaused = true;
- mAudioTrack->pause();
- }
-}
-
-void SoundChannel::resume()
-{
- Mutex::Autolock lock(&mLock);
- if (mState == PAUSED) {
- ALOGV("resume track");
- mState = PLAYING;
- mAutoPaused = false;
- mAudioTrack->start();
- }
-}
-
-void SoundChannel::autoResume()
-{
- Mutex::Autolock lock(&mLock);
- if (mAutoPaused && (mState == PAUSED)) {
- ALOGV("resume track");
- mState = PLAYING;
- mAutoPaused = false;
- mAudioTrack->start();
- }
-}
-
-void SoundChannel::setRate(float rate)
-{
- Mutex::Autolock lock(&mLock);
- if (mAudioTrack != NULL && mSample != 0) {
- uint32_t sampleRate = uint32_t(float(mSample->sampleRate()) * rate + 0.5);
- mAudioTrack->setSampleRate(sampleRate);
- mRate = rate;
- }
-}
-
-// call with lock held
-void SoundChannel::setVolume_l(float leftVolume, float rightVolume)
-{
- mLeftVolume = leftVolume;
- mRightVolume = rightVolume;
- if (mAudioTrack != NULL && !mMuted)
- mAudioTrack->setVolume(leftVolume, rightVolume);
-}
-
-void SoundChannel::setVolume(float leftVolume, float rightVolume)
-{
- Mutex::Autolock lock(&mLock);
- setVolume_l(leftVolume, rightVolume);
-}
-
-void SoundChannel::mute(bool muting)
-{
- Mutex::Autolock lock(&mLock);
- mMuted = muting;
- if (mAudioTrack != NULL) {
- if (mMuted) {
- mAudioTrack->setVolume(0.0f, 0.0f);
- } else {
- mAudioTrack->setVolume(mLeftVolume, mRightVolume);
- }
- }
-}
-
-void SoundChannel::setLoop(int loop)
-{
- Mutex::Autolock lock(&mLock);
- if (mAudioTrack != NULL && mSample != 0) {
- uint32_t loopEnd = mSample->size()/mNumChannels/
- ((mSample->format() == AUDIO_FORMAT_PCM_16_BIT) ? sizeof(int16_t) : sizeof(uint8_t));
- mAudioTrack->setLoop(0, loopEnd, loop);
- mLoop = loop;
- }
-}
-
-SoundChannel::~SoundChannel()
-{
- ALOGV("SoundChannel destructor %p", this);
- {
- Mutex::Autolock lock(&mLock);
- clearNextEvent();
- doStop_l();
- }
- // do not call AudioTrack destructor with mLock held as it will wait for the AudioTrack
- // callback thread to exit which may need to execute process() and acquire the mLock.
- mAudioTrack.clear();
-}
-
-void SoundChannel::dump()
-{
- ALOGV("mState = %d mChannelID=%d, mNumChannels=%d, mPos = %d, mPriority=%d, mLoop=%d",
- mState, mChannelID, mNumChannels, mPos, mPriority, mLoop);
+ ALOGV("%s(%p, %p)", __func__, callback, user);
+ auto apiLock = kUseApiLock ? std::make_unique<std::lock_guard<std::mutex>>(mApiLock) : nullptr;
+ mSoundManager.setCallback(this, callback, user);
}
-void SoundEvent::set(const sp<Sample>& sample, int channelID, float leftVolume,
- float rightVolume, int priority, int loop, float rate)
+void* SoundPool::getUserData() const
{
- mSample = sample;
- mChannelID = channelID;
- mLeftVolume = leftVolume;
- mRightVolume = rightVolume;
- mPriority = priority;
- mLoop = loop;
- mRate =rate;
+ ALOGV("%s()", __func__);
+ auto apiLock = kUseApiLock ? std::make_unique<std::lock_guard<std::mutex>>(mApiLock) : nullptr;
+ return mSoundManager.getUserData();
}
} // end namespace android
diff --git a/media/jni/soundpool/SoundPool.h b/media/jni/soundpool/SoundPool.h
index 9d7410305c2c..d5b16ef629cd 100644
--- a/media/jni/soundpool/SoundPool.h
+++ b/media/jni/soundpool/SoundPool.h
@@ -14,227 +14,59 @@
* limitations under the License.
*/
-#ifndef SOUNDPOOL_H_
-#define SOUNDPOOL_H_
+#pragma once
-#include <utils/threads.h>
-#include <utils/List.h>
-#include <utils/Vector.h>
-#include <utils/KeyedVector.h>
-#include <media/AudioTrack.h>
-#include <binder/MemoryHeapBase.h>
-#include <binder/MemoryBase.h>
+#include "SoundManager.h"
+#include "StreamManager.h"
namespace android {
-static const int IDLE_PRIORITY = -1;
-
-// forward declarations
-class SoundEvent;
-class SoundPoolThread;
-class SoundPool;
-
-// for queued events
-class SoundPoolEvent {
-public:
- explicit SoundPoolEvent(int msg, int arg1=0, int arg2=0) :
- mMsg(msg), mArg1(arg1), mArg2(arg2) {}
- int mMsg;
- int mArg1;
- int mArg2;
- enum MessageType { INVALID, SAMPLE_LOADED };
-};
-
-// callback function prototype
-typedef void SoundPoolCallback(SoundPoolEvent event, SoundPool* soundPool, void* user);
-
-// tracks samples used by application
-class Sample : public RefBase {
-public:
- enum sample_state { UNLOADED, LOADING, READY, UNLOADING };
- Sample(int sampleID, int fd, int64_t offset, int64_t length);
- ~Sample();
- int sampleID() { return mSampleID; }
- int numChannels() { return mNumChannels; }
- int sampleRate() { return mSampleRate; }
- audio_format_t format() { return mFormat; }
- audio_channel_mask_t channelMask() { return mChannelMask; }
- size_t size() { return mSize; }
- int state() { return mState; }
- uint8_t* data() { return static_cast<uint8_t*>(mData->pointer()); }
- status_t doLoad();
- void startLoad() { mState = LOADING; }
- sp<IMemory> getIMemory() { return mData; }
-
-private:
- void init();
-
- size_t mSize;
- volatile int32_t mRefCount;
- uint16_t mSampleID;
- uint16_t mSampleRate;
- uint8_t mState;
- uint8_t mNumChannels;
- audio_format_t mFormat;
- audio_channel_mask_t mChannelMask;
- int mFd;
- int64_t mOffset;
- int64_t mLength;
- sp<IMemory> mData;
- sp<MemoryHeapBase> mHeap;
-};
-
-// stores pending events for stolen channels
-class SoundEvent
-{
-public:
- SoundEvent() : mChannelID(0), mLeftVolume(0), mRightVolume(0),
- mPriority(IDLE_PRIORITY), mLoop(0), mRate(0) {}
- void set(const sp<Sample>& sample, int channelID, float leftVolume,
- float rightVolume, int priority, int loop, float rate);
- sp<Sample> sample() { return mSample; }
- int channelID() { return mChannelID; }
- float leftVolume() { return mLeftVolume; }
- float rightVolume() { return mRightVolume; }
- int priority() { return mPriority; }
- int loop() { return mLoop; }
- float rate() { return mRate; }
- void clear() { mChannelID = 0; mSample.clear(); }
-
-protected:
- sp<Sample> mSample;
- int mChannelID;
- float mLeftVolume;
- float mRightVolume;
- int mPriority;
- int mLoop;
- float mRate;
-};
-
-// for channels aka AudioTracks
-class SoundChannel : public SoundEvent {
-public:
- enum state { IDLE, RESUMING, STOPPING, PAUSED, PLAYING };
- SoundChannel() : mState(IDLE), mNumChannels(1),
- mPos(0), mToggle(0), mAutoPaused(false), mMuted(false) {}
- ~SoundChannel();
- void init(SoundPool* soundPool);
- void play(const sp<Sample>& sample, int channelID, float leftVolume, float rightVolume,
- int priority, int loop, float rate);
- void setVolume_l(float leftVolume, float rightVolume);
- void setVolume(float leftVolume, float rightVolume);
- void mute(bool muting);
- void stop_l();
- void stop();
- void pause();
- void autoPause();
- void resume();
- void autoResume();
- void setRate(float rate);
- int state() { return mState; }
- void setPriority(int priority) { mPriority = priority; }
- void setLoop(int loop);
- int numChannels() { return mNumChannels; }
- void clearNextEvent() { mNextEvent.clear(); }
- void nextEvent();
- int nextChannelID() { return mNextEvent.channelID(); }
- void dump();
- int getPrevSampleID(void) { return mPrevSampleID; }
-
-private:
- static void callback(int event, void* user, void *info);
- void process(int event, void *info, unsigned long toggle);
- bool doStop_l();
-
- SoundPool* mSoundPool;
- sp<AudioTrack> mAudioTrack;
- SoundEvent mNextEvent;
- Mutex mLock;
- int mState;
- int mNumChannels;
- int mPos;
- int mAudioBufferSize;
- unsigned long mToggle;
- bool mAutoPaused;
- int mPrevSampleID;
- bool mMuted;
-};
-
-// application object for managing a pool of sounds
+/**
+ * Native class for Java SoundPool, manages a pool of sounds.
+ *
+ * See the Android SoundPool Java documentation for description of valid values.
+ * https://developer.android.com/reference/android/media/SoundPool
+ */
class SoundPool {
- friend class SoundPoolThread;
- friend class SoundChannel;
public:
- SoundPool(int maxChannels, const audio_attributes_t* pAttributes);
+ SoundPool(int32_t maxStreams, const audio_attributes_t* attributes);
~SoundPool();
- int load(int fd, int64_t offset, int64_t length, int priority);
- bool unload(int sampleID);
- int play(int sampleID, float leftVolume, float rightVolume, int priority,
- int loop, float rate);
- void pause(int channelID);
- void mute(bool muting);
+
+ // SoundPool Java API support
+ int32_t load(int fd, int64_t offset, int64_t length, int32_t priority);
+ bool unload(int32_t soundID);
+ int32_t play(int32_t soundID, float leftVolume, float rightVolume, int32_t priority,
+ int32_t loop, float rate);
+ void pause(int32_t streamID);
void autoPause();
- void resume(int channelID);
+ void resume(int32_t streamID);
void autoResume();
- void stop(int channelID);
- void setVolume(int channelID, float leftVolume, float rightVolume);
- void setPriority(int channelID, int priority);
- void setLoop(int channelID, int loop);
- void setRate(int channelID, float rate);
- const audio_attributes_t* attributes() { return &mAttributes; }
-
- // called from SoundPoolThread
- void sampleLoaded(int sampleID);
- sp<Sample> findSample(int sampleID);
-
- // called from AudioTrack thread
- void done_l(SoundChannel* channel);
-
- // callback function
+ void stop(int32_t streamID);
+ void setVolume(int32_t streamID, float leftVolume, float rightVolume);
+ void setPriority(int32_t streamID, int32_t priority);
+ void setLoop(int32_t streamID, int32_t loop);
+ void setRate(int32_t streamID, float rate);
void setCallback(SoundPoolCallback* callback, void* user);
- void* getUserData() { return mUserData; }
+ void* getUserData() const;
-private:
- SoundPool() {} // no default constructor
- bool startThreads();
- sp<Sample> findSample_l(int sampleID);
- SoundChannel* findChannel (int channelID);
- SoundChannel* findNextChannel (int channelID);
- SoundChannel* allocateChannel_l(int priority, int sampleID);
- void moveToFront_l(SoundChannel* channel);
- void notify(SoundPoolEvent event);
- void dump();
-
- // restart thread
- void addToRestartList(SoundChannel* channel);
- void addToStopList(SoundChannel* channel);
- static int beginThread(void* arg);
- int run();
- void quit();
+ // not exposed in the public Java API, used for internal playerSetVolume() muting.
+ void mute(bool muting);
- Mutex mLock;
- Mutex mRestartLock;
- Condition mCondition;
- SoundPoolThread* mDecodeThread;
- SoundChannel* mChannelPool;
- List<SoundChannel*> mChannels;
- List<SoundChannel*> mRestart;
- List<SoundChannel*> mStop;
- DefaultKeyedVector< int, sp<Sample> > mSamples;
- int mMaxChannels;
- audio_attributes_t mAttributes;
- int mAllocated;
- int mNextSampleID;
- int mNextChannelID;
- bool mQuit;
- bool mMuted;
+private:
- // callback
- Mutex mCallbackLock;
- SoundPoolCallback* mCallback;
- void* mUserData;
+ // Constructor initialized variables
+ // Can access without lock as they are internally locked,
+ // though care needs to be taken that the final result composed of
+ // individually consistent actions are consistent.
+ soundpool::SoundManager mSoundManager;
+ soundpool::StreamManager mStreamManager;
+
+ // mApiLock serializes SoundPool application calls (configurable by kUseApiLock).
+ // It only locks at the SoundPool layer and not below. At this level,
+ // mApiLock is only required for autoPause() and autoResume() to prevent zippering
+ // of the individual pauses and resumes, and mute() for self-interaction with itself.
+ // It is optional for all other apis.
+ mutable std::mutex mApiLock;
};
} // end namespace android
-
-#endif /*SOUNDPOOL_H_*/
diff --git a/media/jni/soundpool/SoundPoolThread.cpp b/media/jni/soundpool/SoundPoolThread.cpp
deleted file mode 100644
index ba3b482935dd..000000000000
--- a/media/jni/soundpool/SoundPoolThread.cpp
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Copyright (C) 2007 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoundPoolThread"
-#include "utils/Log.h"
-
-#include "SoundPoolThread.h"
-
-namespace android {
-
-void SoundPoolThread::write(SoundPoolMsg msg) {
- Mutex::Autolock lock(&mLock);
- while (mMsgQueue.size() >= maxMessages) {
- mCondition.wait(mLock);
- }
-
- // if thread is quitting, don't add to queue
- if (mRunning) {
- mMsgQueue.push(msg);
- mCondition.signal();
- }
-}
-
-const SoundPoolMsg SoundPoolThread::read() {
- Mutex::Autolock lock(&mLock);
- while (mMsgQueue.size() == 0) {
- mCondition.wait(mLock);
- }
- SoundPoolMsg msg = mMsgQueue[0];
- mMsgQueue.removeAt(0);
- mCondition.signal();
- return msg;
-}
-
-void SoundPoolThread::quit() {
- Mutex::Autolock lock(&mLock);
- if (mRunning) {
- mRunning = false;
- mMsgQueue.clear();
- mMsgQueue.push(SoundPoolMsg(SoundPoolMsg::KILL, 0));
- mCondition.signal();
- mCondition.wait(mLock);
- }
- ALOGV("return from quit");
-}
-
-SoundPoolThread::SoundPoolThread(SoundPool* soundPool) :
- mSoundPool(soundPool)
-{
- mMsgQueue.setCapacity(maxMessages);
- if (createThreadEtc(beginThread, this, "SoundPoolThread")) {
- mRunning = true;
- }
-}
-
-SoundPoolThread::~SoundPoolThread()
-{
- quit();
-}
-
-int SoundPoolThread::beginThread(void* arg) {
- ALOGV("beginThread");
- SoundPoolThread* soundPoolThread = (SoundPoolThread*)arg;
- return soundPoolThread->run();
-}
-
-int SoundPoolThread::run() {
- ALOGV("run");
- for (;;) {
- SoundPoolMsg msg = read();
- ALOGV("Got message m=%d, mData=%d", msg.mMessageType, msg.mData);
- switch (msg.mMessageType) {
- case SoundPoolMsg::KILL:
- ALOGV("goodbye");
- return NO_ERROR;
- case SoundPoolMsg::LOAD_SAMPLE:
- doLoadSample(msg.mData);
- break;
- default:
- ALOGW("run: Unrecognized message %d\n",
- msg.mMessageType);
- break;
- }
- }
-}
-
-void SoundPoolThread::loadSample(int sampleID) {
- write(SoundPoolMsg(SoundPoolMsg::LOAD_SAMPLE, sampleID));
-}
-
-void SoundPoolThread::doLoadSample(int sampleID) {
- sp <Sample> sample = mSoundPool->findSample(sampleID);
- status_t status = -1;
- if (sample != 0) {
- status = sample->doLoad();
- }
- mSoundPool->notify(SoundPoolEvent(SoundPoolEvent::SAMPLE_LOADED, sampleID, status));
-}
-
-} // end namespace android
diff --git a/media/jni/soundpool/SoundPoolThread.h b/media/jni/soundpool/SoundPoolThread.h
deleted file mode 100644
index 7b3e1dda0a23..000000000000
--- a/media/jni/soundpool/SoundPoolThread.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Copyright (C) 2007 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOUNDPOOLTHREAD_H_
-#define SOUNDPOOLTHREAD_H_
-
-#include <utils/threads.h>
-#include <utils/Vector.h>
-#include <media/AudioTrack.h>
-
-#include "SoundPool.h"
-
-namespace android {
-
-class SoundPoolMsg {
-public:
- enum MessageType { INVALID, KILL, LOAD_SAMPLE };
- SoundPoolMsg() : mMessageType(INVALID), mData(0) {}
- SoundPoolMsg(MessageType MessageType, int data) :
- mMessageType(MessageType), mData(data) {}
- uint16_t mMessageType;
- uint16_t mData;
-};
-
-/*
- * This class handles background requests from the SoundPool
- */
-class SoundPoolThread {
-public:
- explicit SoundPoolThread(SoundPool* SoundPool);
- ~SoundPoolThread();
- void loadSample(int sampleID);
- void quit();
- void write(SoundPoolMsg msg);
-
-private:
- static const size_t maxMessages = 128;
-
- static int beginThread(void* arg);
- int run();
- void doLoadSample(int sampleID);
- const SoundPoolMsg read();
-
- Mutex mLock;
- Condition mCondition;
- Vector<SoundPoolMsg> mMsgQueue;
- SoundPool* mSoundPool;
- bool mRunning;
-};
-
-} // end namespace android
-
-#endif /*SOUNDPOOLTHREAD_H_*/
diff --git a/media/jni/soundpool/Stream.cpp b/media/jni/soundpool/Stream.cpp
new file mode 100644
index 000000000000..e7042d0562a4
--- /dev/null
+++ b/media/jni/soundpool/Stream.cpp
@@ -0,0 +1,458 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoundPool::Stream"
+#include <utils/Log.h>
+
+#include "Stream.h"
+
+#include "StreamManager.h"
+
+namespace android::soundpool {
+
+Stream::~Stream()
+{
+ ALOGV("%s(%p)", __func__, this);
+}
+
+void Stream::autoPause()
+{
+ std::lock_guard lock(mLock);
+ if (mState == PLAYING) {
+ ALOGV("%s: track streamID: %d", __func__, (int)mStreamID);
+ mState = PAUSED;
+ mAutoPaused = true;
+ if (mAudioTrack != nullptr) {
+ mAudioTrack->pause();
+ }
+ }
+}
+
+void Stream::autoResume()
+{
+ std::lock_guard lock(mLock);
+ if (mAutoPaused) {
+ if (mState == PAUSED) {
+ ALOGV("%s: track streamID: %d", __func__, (int)mStreamID);
+ mState = PLAYING;
+ if (mAudioTrack != nullptr) {
+ mAudioTrack->start();
+ }
+ }
+ mAutoPaused = false; // New for R: always reset autopause (consistent with API spec).
+ }
+}
+
+void Stream::mute(bool muting)
+{
+ std::lock_guard lock(mLock);
+ mMuted = muting;
+ if (mAudioTrack != nullptr) {
+ if (mMuted) {
+ mAudioTrack->setVolume(0.0f, 0.0f);
+ } else {
+ mAudioTrack->setVolume(mLeftVolume, mRightVolume);
+ }
+ }
+}
+
+void Stream::pause(int32_t streamID)
+{
+ std::lock_guard lock(mLock);
+ if (streamID == mStreamID) {
+ if (mState == PLAYING) {
+ ALOGV("%s: track streamID: %d", __func__, streamID);
+ mState = PAUSED;
+ if (mAudioTrack != nullptr) {
+ mAudioTrack->pause();
+ }
+ }
+ }
+}
+
+void Stream::resume(int32_t streamID)
+{
+ std::lock_guard lock(mLock);
+ if (streamID == mStreamID) {
+ if (mState == PAUSED) {
+ ALOGV("%s: track streamID: %d", __func__, streamID);
+ mState = PLAYING;
+ if (mAudioTrack != nullptr) {
+ mAudioTrack->start();
+ }
+ mAutoPaused = false; // TODO: is this right? (ambiguous per spec), move outside?
+ }
+ }
+}
+
+void Stream::setRate(int32_t streamID, float rate)
+{
+ std::lock_guard lock(mLock);
+ if (streamID == mStreamID) {
+ mRate = rate;
+ if (mAudioTrack != nullptr && mSound != nullptr) {
+ const auto sampleRate = (uint32_t)lround(double(mSound->getSampleRate()) * rate);
+ mAudioTrack->setSampleRate(sampleRate);
+ }
+ }
+}
+
+void Stream::setVolume_l(float leftVolume, float rightVolume)
+{
+ mLeftVolume = leftVolume;
+ mRightVolume = rightVolume;
+ if (mAudioTrack != nullptr && !mMuted) {
+ mAudioTrack->setVolume(leftVolume, rightVolume);
+ }
+}
+
+void Stream::setVolume(int32_t streamID, float leftVolume, float rightVolume)
+{
+ std::lock_guard lock(mLock);
+ if (streamID == mStreamID) {
+ setVolume_l(leftVolume, rightVolume);
+ }
+}
+
+void Stream::setPriority(int32_t streamID, int32_t priority)
+{
+ std::lock_guard lock(mLock);
+ if (streamID == mStreamID) {
+ mPriority = priority;
+ }
+}
+
+void Stream::setLoop(int32_t streamID, int32_t loop)
+{
+ std::lock_guard lock(mLock);
+ if (streamID == mStreamID) {
+ if (mAudioTrack != nullptr && mSound != nullptr) {
+ const uint32_t loopEnd = mSound->getSizeInBytes() / mSound->getChannelCount() /
+ (mSound->getFormat() == AUDIO_FORMAT_PCM_16_BIT
+ ? sizeof(int16_t) : sizeof(uint8_t));
+ mAudioTrack->setLoop(0, loopEnd, loop);
+ }
+ mLoop = loop;
+ }
+}
+
+void Stream::setPlay(
+ int32_t streamID, const std::shared_ptr<Sound> &sound, int32_t soundID,
+ float leftVolume, float rightVolume, int32_t priority, int32_t loop, float rate)
+{
+ std::lock_guard lock(mLock);
+ // We must be idle, or we must be repurposing a pending Stream.
+ LOG_ALWAYS_FATAL_IF(mState != IDLE && mAudioTrack != nullptr, "State %d must be IDLE", mState);
+ mSound = sound;
+ mSoundID = soundID;
+ mLeftVolume = leftVolume;
+ mRightVolume = rightVolume;
+ mPriority = priority;
+ mLoop = loop;
+ mRate = rate;
+ mState = PLAYING;
+ mAutoPaused = false; // New for R (consistent with Java API spec).
+ mStreamID = streamID; // prefer this to be the last, as it is an atomic sync point
+}
+
+void Stream::setStopTimeNs(int64_t stopTimeNs)
+{
+ std::lock_guard lock(mLock);
+ mStopTimeNs = stopTimeNs;
+}
+
+bool Stream::requestStop(int32_t streamID)
+{
+ std::lock_guard lock(mLock);
+ if (streamID == mStreamID) {
+ ALOGV("%s: track streamID: %d", __func__, streamID);
+ if (mAudioTrack != nullptr) {
+ if (mState == PLAYING && !mMuted && (mLeftVolume != 0.f || mRightVolume != 0.f)) {
+ setVolume_l(0.f, 0.f);
+ mStopTimeNs = systemTime() + kStopWaitTimeNs;
+ } else {
+ mStopTimeNs = systemTime();
+ }
+ return true; // must be queued on the restart list.
+ }
+ stop_l();
+ }
+ return false;
+}
+
+void Stream::stop()
+{
+ std::lock_guard lock(mLock);
+ stop_l();
+}
+
+void Stream::stop_l()
+{
+ if (mState != IDLE) {
+ ALOGV("%s: track(%p) streamID: %d", __func__, mAudioTrack.get(), (int)mStreamID);
+ if (mAudioTrack != nullptr) {
+ mAudioTrack->stop();
+ }
+ mSound.reset();
+ mState = IDLE;
+ }
+}
+
+void Stream::clearAudioTrack()
+{
+ sp<AudioTrack> release; // release outside of lock.
+ std::lock_guard lock(mLock);
+ // This will invoke the destructor which waits for the AudioTrack thread to join,
+ // and is currently the only safe way to ensure there are no callbacks afterwards.
+ release = mAudioTrack; // or std::swap if we had move semantics.
+ mAudioTrack.clear();
+}
+
+Stream* Stream::getPairStream() const
+{
+ return mStreamManager->getPairStream(this);
+}
+
+Stream* Stream::playPairStream() {
+ Stream* pairStream = getPairStream();
+ LOG_ALWAYS_FATAL_IF(pairStream == nullptr, "No pair stream!");
+ sp<AudioTrack> releaseTracks[2];
+ {
+ ALOGV("%s: track streamID: %d", __func__, (int)getStreamID());
+ // TODO: Do we really want to force a simultaneous synchronization between
+ // the stream and its pair?
+
+ // note locking order - the paired stream is obtained before the queued stream.
+ // we can invert the locking order, but it is slightly more optimal to do it this way.
+ std::lock_guard lockp(pairStream->mLock);
+ if (pairStream->mSound == nullptr) {
+ return nullptr; // no pair sound
+ }
+ {
+ std::lock_guard lock(mLock);
+ LOG_ALWAYS_FATAL_IF(mState != IDLE, "State: %d must be IDLE", mState);
+ // TODO: do we want a specific set() here?
+ pairStream->mAudioTrack = mAudioTrack;
+ pairStream->mSoundID = mSoundID; // optimization to reuse AudioTrack.
+ pairStream->mToggle = mToggle;
+ pairStream->mAutoPaused = mAutoPaused; // save autopause state
+ pairStream->mMuted = mMuted;
+ mAudioTrack.clear(); // the pair owns the audiotrack.
+ mSound.reset();
+ mSoundID = 0;
+ }
+ // TODO: do we need a specific play_l() anymore?
+ const int pairState = pairStream->mState;
+ pairStream->play_l(pairStream->mSound, pairStream->mStreamID,
+ pairStream->mLeftVolume, pairStream->mRightVolume, pairStream->mPriority,
+ pairStream->mLoop, pairStream->mRate, releaseTracks);
+ if (pairStream->mState == IDLE) {
+ return nullptr; // AudioTrack error
+ }
+ if (pairState == PAUSED) { // reestablish pause
+ pairStream->mState = PAUSED;
+ pairStream->mAudioTrack->pause();
+ }
+ }
+ // release tracks outside of Stream lock
+ return pairStream;
+}
+
+void Stream::play_l(const std::shared_ptr<Sound>& sound, int32_t nextStreamID,
+ float leftVolume, float rightVolume, int32_t priority, int32_t loop, float rate,
+ sp<AudioTrack> releaseTracks[2])
+{
+ // These tracks are released without the lock.
+ sp<AudioTrack> &oldTrack = releaseTracks[0];
+ sp<AudioTrack> &newTrack = releaseTracks[1];
+ status_t status = NO_ERROR;
+
+ {
+ ALOGV("%s(%p)(soundID=%d, streamID=%d, leftVolume=%f, rightVolume=%f,"
+ " priority=%d, loop=%d, rate=%f)",
+ __func__, this, sound->getSoundID(), nextStreamID, leftVolume, rightVolume,
+ priority, loop, rate);
+
+ // initialize track
+ const audio_stream_type_t streamType =
+ AudioSystem::attributesToStreamType(*mStreamManager->getAttributes());
+ const int32_t channelCount = sound->getChannelCount();
+ const auto sampleRate = (uint32_t)lround(double(sound->getSampleRate()) * rate);
+ size_t frameCount = 0;
+
+ if (loop) {
+ const audio_format_t format = sound->getFormat();
+ const size_t frameSize = audio_is_linear_pcm(format)
+ ? channelCount * audio_bytes_per_sample(format) : 1;
+ frameCount = sound->getSizeInBytes() / frameSize;
+ }
+
+ // check if the existing track has the same sound id.
+ if (mAudioTrack != nullptr && mSoundID == sound->getSoundID()) {
+ // the sample rate may fail to change if the audio track is a fast track.
+ if (mAudioTrack->setSampleRate(sampleRate) == NO_ERROR) {
+ newTrack = mAudioTrack;
+ ALOGV("%s: reusing track %p for sound %d",
+ __func__, mAudioTrack.get(), sound->getSoundID());
+ }
+ }
+ if (newTrack == nullptr) {
+ // mToggle toggles each time a track is started on a given stream.
+ // The toggle is concatenated with the Stream address and passed to AudioTrack
+ // as callback user data. This enables the detection of callbacks received from the old
+ // audio track while the new one is being started and avoids processing them with
+ // wrong audio audio buffer size (mAudioBufferSize)
+ auto toggle = mToggle ^ 1;
+ void* userData = (void*)((uintptr_t)this | toggle);
+ audio_channel_mask_t soundChannelMask = sound->getChannelMask();
+ // When sound contains a valid channel mask, use it as is.
+ // Otherwise, use stream count to calculate channel mask.
+ audio_channel_mask_t channelMask = soundChannelMask != AUDIO_CHANNEL_NONE
+ ? soundChannelMask : audio_channel_out_mask_from_count(channelCount);
+
+ // do not create a new audio track if current track is compatible with sound parameters
+
+ newTrack = new AudioTrack(streamType, sampleRate, sound->getFormat(),
+ channelMask, sound->getIMemory(), AUDIO_OUTPUT_FLAG_FAST,
+ staticCallback, userData,
+ 0 /*default notification frames*/, AUDIO_SESSION_ALLOCATE,
+ AudioTrack::TRANSFER_DEFAULT,
+ nullptr /*offloadInfo*/, -1 /*uid*/, -1 /*pid*/,
+ mStreamManager->getAttributes());
+ // Set caller name so it can be logged in destructor.
+ // MediaMetricsConstants.h: AMEDIAMETRICS_PROP_CALLERNAME_VALUE_SOUNDPOOL
+ newTrack->setCallerName("soundpool");
+ oldTrack = mAudioTrack;
+ status = newTrack->initCheck();
+ if (status != NO_ERROR) {
+ ALOGE("%s: error creating AudioTrack", __func__);
+ // newTrack goes out of scope, so reference count drops to zero
+ goto exit;
+ }
+ // From now on, AudioTrack callbacks received with previous toggle value will be ignored.
+ mToggle = toggle;
+ mAudioTrack = newTrack;
+ ALOGV("%s: using new track %p for sound %d",
+ __func__, newTrack.get(), sound->getSoundID());
+ }
+ if (mMuted) {
+ newTrack->setVolume(0.0f, 0.0f);
+ } else {
+ newTrack->setVolume(leftVolume, rightVolume);
+ }
+ newTrack->setLoop(0, frameCount, loop);
+ mAudioTrack->start();
+ mSound = sound;
+ mSoundID = sound->getSoundID();
+ mPriority = priority;
+ mLoop = loop;
+ mLeftVolume = leftVolume;
+ mRightVolume = rightVolume;
+ mRate = rate;
+ mState = PLAYING;
+ mStopTimeNs = 0;
+ mStreamID = nextStreamID; // prefer this to be the last, as it is an atomic sync point
+ }
+
+exit:
+ ALOGV("%s: delete oldTrack %p", __func__, oldTrack.get());
+ if (status != NO_ERROR) {
+ // TODO: should we consider keeping the soundID if the old track is OK?
+ // Do not attempt to restart this track (should we remove the stream id?)
+ mState = IDLE;
+ mSoundID = 0;
+ mSound.reset();
+ mAudioTrack.clear(); // actual release from releaseTracks[]
+ }
+}
+
+/* static */
+void Stream::staticCallback(int event, void* user, void* info)
+{
+ const auto userAsInt = (uintptr_t)user;
+ auto stream = reinterpret_cast<Stream*>(userAsInt & ~1);
+ stream->callback(event, info, int(userAsInt & 1), 0 /* tries */);
+}
+
+void Stream::callback(int event, void* info, int toggle, int tries)
+{
+ int32_t activeStreamIDToRestart = 0;
+ {
+ std::unique_lock lock(mLock);
+ ALOGV("%s track(%p) streamID %d", __func__, mAudioTrack.get(), (int)mStreamID);
+
+ if (mAudioTrack == nullptr) {
+ // The AudioTrack is either with this stream or its pair.
+ // if this swaps a few times, the toggle is bound to be wrong, so we fail then.
+ //
+ // TODO: Modify AudioTrack callbacks to avoid the hacky toggle and retry
+ // logic here.
+ if (tries < 3) {
+ lock.unlock();
+ ALOGV("%s streamID %d going to pair stream", __func__, (int)mStreamID);
+ getPairStream()->callback(event, info, toggle, tries + 1);
+ } else {
+ ALOGW("%s streamID %d cannot find track", __func__, (int)mStreamID);
+ }
+ return;
+ }
+ if (mToggle != toggle) {
+ ALOGD("%s streamID %d wrong toggle", __func__, (int)mStreamID);
+ return;
+ }
+ switch (event) {
+ case AudioTrack::EVENT_MORE_DATA:
+ ALOGW("%s streamID %d Invalid EVENT_MORE_DATA for static track",
+ __func__, (int)mStreamID);
+ break;
+ case AudioTrack::EVENT_UNDERRUN:
+ ALOGW("%s streamID %d Invalid EVENT_UNDERRUN for static track",
+ __func__, (int)mStreamID);
+ break;
+ case AudioTrack::EVENT_BUFFER_END:
+ ALOGV("%s streamID %d EVENT_BUFFER_END", __func__, (int)mStreamID);
+ if (mState != IDLE) {
+ activeStreamIDToRestart = mStreamID;
+ mStopTimeNs = systemTime();
+ }
+ break;
+ case AudioTrack::EVENT_LOOP_END:
+ ALOGV("%s streamID %d EVENT_LOOP_END", __func__, (int)mStreamID);
+ break;
+ case AudioTrack::EVENT_NEW_IAUDIOTRACK:
+ ALOGV("%s streamID %d NEW_IAUDIOTRACK", __func__, (int)mStreamID);
+ break;
+ default:
+ ALOGW("%s streamID %d Invalid event %d", __func__, (int)mStreamID, event);
+ break;
+ }
+ } // lock ends here. This is on the callback thread, no need to be precise.
+ if (activeStreamIDToRestart > 0) {
+ // Restart only if a particular streamID is still current and active.
+ ALOGV("%s: moveToRestartQueue %d", __func__, activeStreamIDToRestart);
+ mStreamManager->moveToRestartQueue(this, activeStreamIDToRestart);
+ }
+}
+
+void Stream::dump() const
+{
+ // TODO: consider std::try_lock() - ok for now for ALOGV.
+ ALOGV("mPairStream=%p, mState=%d, mStreamID=%d, mSoundID=%d, mPriority=%d, mLoop=%d",
+ getPairStream(), mState, (int)getStreamID(), getSoundID(), mPriority, mLoop);
+}
+
+} // namespace android::soundpool
diff --git a/media/jni/soundpool/Stream.h b/media/jni/soundpool/Stream.h
new file mode 100644
index 000000000000..d4e5c9fe7f8a
--- /dev/null
+++ b/media/jni/soundpool/Stream.h
@@ -0,0 +1,159 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "Sound.h"
+
+#include <android-base/thread_annotations.h>
+#include <audio_utils/clock.h>
+#include <media/AudioTrack.h>
+
+namespace android::soundpool {
+
+// This is the amount of time to wait after stop is called when stealing an
+// AudioTrack to allow the sound to ramp down. If this is 0, glitches
+// may occur when stealing an AudioTrack.
+inline constexpr int64_t kStopWaitTimeNs = 20 * NANOS_PER_MILLISECOND;
+
+inline constexpr size_t kCacheLineSize = 64; /* std::hardware_constructive_interference_size */
+
+class StreamManager; // forward decl
+
+/**
+ * A Stream is associated with a StreamID exposed to the app to play a Sound.
+ *
+ * The Stream uses monitor locking strategy on mLock.
+ * https://en.wikipedia.org/wiki/Monitor_(synchronization)
+ *
+ * where public methods are guarded by a lock (as needed)
+ *
+ * For Java equivalent APIs, see
+ * https://developer.android.com/reference/android/media/SoundPool
+ *
+ * Streams are paired by the StreamManager, so one stream in the pair may be "stopping"
+ * while the other stream of the pair has been prepared to run
+ * (and the streamID returned to the app) pending its pair to be stopped.
+ * The pair of a Stream may be obtained by calling getPairStream(),
+ * where this->getPairStream()->getPairStream() == this; (pair is a commutative relationship).
+ *
+ * playPairStream() and getPairPriority() access the paired stream.
+ * See also StreamManager.h for details of physical layout implications of paired streams.
+ */
+class alignas(kCacheLineSize) Stream {
+public:
+ enum state { IDLE, PAUSED, PLAYING };
+ // The PAUSED, PLAYING state directly corresponds to the AudioTrack state of an active Stream.
+ //
+ // The IDLE state indicates an inactive Stream. An IDLE Stream may have a non-nullptr
+ // AudioTrack, which may be recycled for use if the SoundID matches the next Stream playback.
+ //
+ // PAUSED -> PLAYING through resume() (see also autoResume())
+ // PLAYING -> PAUSED through pause() (see also autoPause())
+ //
+ // IDLE is the initial state of a Stream and also when a stream becomes inactive.
+ // {PAUSED, PLAYING} -> IDLE through stop() (or if the Sound finishes playing)
+ // IDLE -> PLAYING through play(). (there is no way to start a Stream in paused mode).
+
+ ~Stream();
+ void setStreamManager(StreamManager* streamManager) { // non-nullptr
+ mStreamManager = streamManager; // set in StreamManager constructor, not changed
+ }
+
+ // The following methods are monitor locked by mLock.
+ //
+ // For methods taking a streamID:
+ // if the streamID matches the Stream's mStreamID, then method proceeds
+ // else the command is ignored with no effect.
+
+ // returns true if the stream needs to be explicitly stopped.
+ bool requestStop(int32_t streamID);
+ void stop(); // explicit stop(), typically called from the worker thread.
+ void clearAudioTrack();
+ void pause(int32_t streamID);
+ void autoPause(); // see the Java SoundPool.autoPause documentation for details.
+ void resume(int32_t streamID);
+ void autoResume();
+ void mute(bool muting);
+ void dump() const NO_THREAD_SAFETY_ANALYSIS; // disable for ALOGV (see func for details).
+
+ // returns the pair stream if successful, nullptr otherwise
+ Stream* playPairStream();
+
+ // These parameters are explicitly checked in the SoundPool class
+ // so never deviate from the Java API specified values.
+ void setVolume(int32_t streamID, float leftVolume, float rightVolume);
+ void setRate(int32_t streamID, float rate);
+ void setPriority(int32_t streamID, int priority);
+ void setLoop(int32_t streamID, int loop);
+ void setPlay(int32_t streamID, const std::shared_ptr<Sound> &sound, int32_t soundID,
+ float leftVolume, float rightVolume, int32_t priority, int32_t loop, float rate);
+ void setStopTimeNs(int64_t stopTimeNs); // systemTime() clock monotonic.
+
+ // The following getters are not locked and have weak consistency.
+ // These are considered advisory only - being stale is of nuisance.
+ int32_t getPriority() const NO_THREAD_SAFETY_ANALYSIS { return mPriority; }
+ int32_t getPairPriority() const NO_THREAD_SAFETY_ANALYSIS {
+ return getPairStream()->getPriority();
+ }
+ int64_t getStopTimeNs() const NO_THREAD_SAFETY_ANALYSIS { return mStopTimeNs; }
+
+ // Can change with setPlay()
+ int32_t getStreamID() const NO_THREAD_SAFETY_ANALYSIS { return mStreamID; }
+
+ // Can change with play_l()
+ int32_t getSoundID() const NO_THREAD_SAFETY_ANALYSIS { return mSoundID; }
+
+ bool hasSound() const NO_THREAD_SAFETY_ANALYSIS { return mSound.get() != nullptr; }
+
+ // This never changes. See top of header.
+ Stream* getPairStream() const;
+
+private:
+ void play_l(const std::shared_ptr<Sound>& sound, int streamID,
+ float leftVolume, float rightVolume, int priority, int loop, float rate,
+ sp<AudioTrack> releaseTracks[2]) REQUIRES(mLock);
+ void stop_l() REQUIRES(mLock);
+ void setVolume_l(float leftVolume, float rightVolume) REQUIRES(mLock);
+
+ // For use with AudioTrack callback.
+ static void staticCallback(int event, void* user, void* info);
+ void callback(int event, void* info, int toggle, int tries)
+ NO_THREAD_SAFETY_ANALYSIS; // uses unique_lock
+
+ // StreamManager should be set on construction and not changed.
+ // release mLock before calling into StreamManager
+ StreamManager* mStreamManager = nullptr;
+
+ mutable std::mutex mLock;
+ std::atomic_int32_t mStreamID GUARDED_BY(mLock) = 0; // Valid streamIDs are always positive.
+ int mState GUARDED_BY(mLock) = IDLE;
+ std::shared_ptr<Sound> mSound GUARDED_BY(mLock); // Non-null if playing.
+ int32_t mSoundID GUARDED_BY(mLock) = 0; // SoundID associated with AudioTrack.
+ float mLeftVolume GUARDED_BY(mLock) = 0.f;
+ float mRightVolume GUARDED_BY(mLock) = 0.f;
+ int32_t mPriority GUARDED_BY(mLock) = INT32_MIN;
+ int32_t mLoop GUARDED_BY(mLock) = 0;
+ float mRate GUARDED_BY(mLock) = 0.f;
+ bool mAutoPaused GUARDED_BY(mLock) = false;
+ bool mMuted GUARDED_BY(mLock) = false;
+
+ sp<AudioTrack> mAudioTrack GUARDED_BY(mLock);
+ int mToggle GUARDED_BY(mLock) = 0;
+ int64_t mStopTimeNs GUARDED_BY(mLock) = 0; // if nonzero, time to wait for stop.
+};
+
+} // namespace android::soundpool
diff --git a/media/jni/soundpool/StreamManager.cpp b/media/jni/soundpool/StreamManager.cpp
new file mode 100644
index 000000000000..5b6494d4947e
--- /dev/null
+++ b/media/jni/soundpool/StreamManager.cpp
@@ -0,0 +1,427 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "SoundPool::StreamManager"
+#include <utils/Log.h>
+
+#include "StreamManager.h"
+
+#include <audio_utils/clock.h>
+#include <audio_utils/roundup.h>
+
+namespace android::soundpool {
+
+// kMaxStreams is number that should be less than the current AudioTrack max per UID of 40.
+// It is the maximum number of AudioTrack resources allowed in the SoundPool.
+// We suggest a value at least 4 or greater to allow CTS tests to pass.
+static constexpr int32_t kMaxStreams = 32;
+
+// kStealActiveStream_OldestFirst = false historically (Q and earlier)
+// Changing to true could break app expectations but could change behavior beneficially.
+// In R, we change this to true, as it is the correct way per SoundPool documentation.
+static constexpr bool kStealActiveStream_OldestFirst = true;
+
+// kPlayOnCallingThread = true prior to R.
+// Changing to false means calls to play() are almost instantaneous instead of taking around
+// ~10ms to launch the AudioTrack. It is perhaps 100x faster.
+static constexpr bool kPlayOnCallingThread = true;
+
+// Amount of time for a StreamManager thread to wait before closing.
+static constexpr int64_t kWaitTimeBeforeCloseNs = 9 * NANOS_PER_SECOND;
+
+////////////
+
+StreamMap::StreamMap(int32_t streams) {
+ ALOGV("%s(%d)", __func__, streams);
+ if (streams > kMaxStreams) {
+ ALOGW("%s: requested %d streams, clamping to %d", __func__, streams, kMaxStreams);
+ streams = kMaxStreams;
+ } else if (streams < 1) {
+ ALOGW("%s: requested %d streams, clamping to 1", __func__, streams);
+ streams = 1;
+ }
+ mStreamPoolSize = streams * 2;
+ mStreamPool = std::make_unique<Stream[]>(mStreamPoolSize); // create array of streams.
+ // we use a perfect hash table with 2x size to map StreamIDs to Stream pointers.
+ mPerfectHash = std::make_unique<PerfectHash<int32_t, Stream *>>(roundup(mStreamPoolSize * 2));
+}
+
+Stream* StreamMap::findStream(int32_t streamID) const
+{
+ Stream *stream = lookupStreamFromId(streamID);
+ return stream != nullptr && stream->getStreamID() == streamID ? stream : nullptr;
+}
+
+size_t StreamMap::streamPosition(const Stream* stream) const
+{
+ ptrdiff_t index = stream - mStreamPool.get();
+ LOG_ALWAYS_FATAL_IF(index < 0 || (size_t)index >= mStreamPoolSize,
+ "%s: stream position out of range: %td", __func__, index);
+ return (size_t)index;
+}
+
+Stream* StreamMap::lookupStreamFromId(int32_t streamID) const
+{
+ return streamID > 0 ? mPerfectHash->getValue(streamID).load() : nullptr;
+}
+
+int32_t StreamMap::getNextIdForStream(Stream* stream) const {
+ // even though it is const, it mutates the internal hash table.
+ const int32_t id = mPerfectHash->generateKey(
+ stream,
+ [] (Stream *stream) {
+ return stream == nullptr ? 0 : stream->getStreamID();
+ }, /* getKforV() */
+ stream->getStreamID() /* oldID */);
+ return id;
+}
+
+////////////
+
+// Thread safety analysis is supposed to be disabled for constructors and destructors
+// but clang in R seems to have a bug. We use pragma to disable.
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wthread-safety-analysis"
+
+StreamManager::StreamManager(
+ int32_t streams, size_t threads, const audio_attributes_t* attributes)
+ : StreamMap(streams)
+ , mAttributes(*attributes)
+{
+ ALOGV("%s(%d, %zu, ...)", __func__, streams, threads);
+ forEach([this](Stream *stream) {
+ stream->setStreamManager(this);
+ if ((streamPosition(stream) & 1) == 0) { // put the first stream of pair as available.
+ mAvailableStreams.insert(stream);
+ }
+ });
+
+ mThreadPool = std::make_unique<ThreadPool>(
+ std::min(threads, (size_t)std::thread::hardware_concurrency()),
+ "SoundPool_");
+}
+
+#pragma clang diagnostic pop
+
+StreamManager::~StreamManager()
+{
+ ALOGV("%s", __func__);
+ {
+ std::unique_lock lock(mStreamManagerLock);
+ mQuit = true;
+ mStreamManagerCondition.notify_all();
+ }
+ mThreadPool->quit();
+
+ // call stop on the stream pool
+ forEach([](Stream *stream) { stream->stop(); });
+
+ // This invokes the destructor on the AudioTracks -
+ // we do it here to ensure that AudioTrack callbacks will not occur
+ // afterwards.
+ forEach([](Stream *stream) { stream->clearAudioTrack(); });
+}
+
+
+int32_t StreamManager::queueForPlay(const std::shared_ptr<Sound> &sound,
+ int32_t soundID, float leftVolume, float rightVolume,
+ int32_t priority, int32_t loop, float rate)
+{
+ ALOGV("%s(sound=%p, soundID=%d, leftVolume=%f, rightVolume=%f, priority=%d, loop=%d, rate=%f)",
+ __func__, sound.get(), soundID, leftVolume, rightVolume, priority, loop, rate);
+ bool launchThread = false;
+ int32_t streamID = 0;
+
+ { // for lock
+ std::unique_lock lock(mStreamManagerLock);
+ Stream *newStream = nullptr;
+ bool fromAvailableQueue = false;
+ ALOGV("%s: mStreamManagerLock lock acquired", __func__);
+
+ sanityCheckQueue_l();
+ // find an available stream, prefer one that has matching sound id.
+ if (mAvailableStreams.size() > 0) {
+ for (auto stream : mAvailableStreams) {
+ if (stream->getSoundID() == soundID) {
+ newStream = stream;
+ ALOGV("%s: found soundID %d in available queue", __func__, soundID);
+ break;
+ }
+ }
+ if (newStream == nullptr) {
+ ALOGV("%s: found stream in available queue", __func__);
+ newStream = *mAvailableStreams.begin();
+ }
+ newStream->setStopTimeNs(systemTime());
+ fromAvailableQueue = true;
+ }
+
+ // also look in the streams restarting (if the paired stream doesn't have a pending play)
+ if (newStream == nullptr || newStream->getSoundID() != soundID) {
+ for (auto [unused , stream] : mRestartStreams) {
+ if (!stream->getPairStream()->hasSound()) {
+ if (stream->getSoundID() == soundID) {
+ ALOGV("%s: found soundID %d in restart queue", __func__, soundID);
+ newStream = stream;
+ fromAvailableQueue = false;
+ break;
+ } else if (newStream == nullptr) {
+ ALOGV("%s: found stream in restart queue", __func__);
+ newStream = stream;
+ }
+ }
+ }
+ }
+
+ // no available streams, look for one to steal from the active list
+ if (newStream == nullptr) {
+ for (auto stream : mActiveStreams) {
+ if (stream->getPriority() <= priority) {
+ if (newStream == nullptr
+ || newStream->getPriority() > stream->getPriority()) {
+ newStream = stream;
+ ALOGV("%s: found stream in active queue", __func__);
+ }
+ }
+ }
+ if (newStream != nullptr) { // we need to mute as it is still playing.
+ (void)newStream->requestStop(newStream->getStreamID());
+ }
+ }
+
+ // none found, look for a stream that is restarting, evict one.
+ if (newStream == nullptr) {
+ for (auto [unused, stream] : mRestartStreams) {
+ if (stream->getPairPriority() <= priority) {
+ ALOGV("%s: evict stream from restart queue", __func__);
+ newStream = stream;
+ break;
+ }
+ }
+ }
+
+ // DO NOT LOOK into mProcessingStreams as those are held by the StreamManager threads.
+
+ if (newStream == nullptr) {
+ ALOGD("%s: unable to find stream, returning 0", __func__);
+ return 0; // unable to find available stream
+ }
+
+ Stream *pairStream = newStream->getPairStream();
+ streamID = getNextIdForStream(pairStream);
+ ALOGV("%s: newStream:%p pairStream:%p, streamID:%d",
+ __func__, newStream, pairStream, streamID);
+ pairStream->setPlay(
+ streamID, sound, soundID, leftVolume, rightVolume, priority, loop, rate);
+ if (fromAvailableQueue && kPlayOnCallingThread) {
+ removeFromQueues_l(newStream);
+ mProcessingStreams.emplace(newStream);
+ lock.unlock();
+ if (Stream* nextStream = newStream->playPairStream()) {
+ lock.lock();
+ ALOGV("%s: starting streamID:%d", __func__, nextStream->getStreamID());
+ addToActiveQueue_l(nextStream);
+ } else {
+ lock.lock();
+ mAvailableStreams.insert(newStream);
+ streamID = 0;
+ }
+ mProcessingStreams.erase(newStream);
+ } else {
+ launchThread = moveToRestartQueue_l(newStream) && needMoreThreads_l();
+ }
+ sanityCheckQueue_l();
+ ALOGV("%s: mStreamManagerLock released", __func__);
+ } // lock
+
+ if (launchThread) {
+ const int32_t id = mThreadPool->launch([this](int32_t id) { run(id); });
+ (void)id; // avoid clang warning -Wunused-variable -Wused-but-marked-unused
+ ALOGV_IF(id != 0, "%s: launched thread %d", __func__, id);
+ }
+ ALOGV("%s: returning %d", __func__, streamID);
+ return streamID;
+}
+
+void StreamManager::moveToRestartQueue(
+ Stream* stream, int32_t activeStreamIDToMatch)
+{
+ ALOGV("%s(stream(ID)=%d, activeStreamIDToMatch=%d)",
+ __func__, stream->getStreamID(), activeStreamIDToMatch);
+ bool restart;
+ {
+ std::lock_guard lock(mStreamManagerLock);
+ sanityCheckQueue_l();
+ if (mProcessingStreams.count(stream) > 0 ||
+ mProcessingStreams.count(stream->getPairStream()) > 0) {
+ ALOGD("%s: attempting to restart processing stream(%d)",
+ __func__, stream->getStreamID());
+ restart = false;
+ } else {
+ moveToRestartQueue_l(stream, activeStreamIDToMatch);
+ restart = needMoreThreads_l();
+ }
+ sanityCheckQueue_l();
+ }
+ if (restart) {
+ const int32_t id = mThreadPool->launch([this](int32_t id) { run(id); });
+ (void)id; // avoid clang warning -Wunused-variable -Wused-but-marked-unused
+ ALOGV_IF(id != 0, "%s: launched thread %d", __func__, id);
+ }
+}
+
+bool StreamManager::moveToRestartQueue_l(
+ Stream* stream, int32_t activeStreamIDToMatch)
+{
+ ALOGV("%s(stream(ID)=%d, activeStreamIDToMatch=%d)",
+ __func__, stream->getStreamID(), activeStreamIDToMatch);
+ if (activeStreamIDToMatch > 0 && stream->getStreamID() != activeStreamIDToMatch) {
+ return false;
+ }
+ const ssize_t found = removeFromQueues_l(stream, activeStreamIDToMatch);
+ if (found < 0) return false;
+
+ LOG_ALWAYS_FATAL_IF(found > 1, "stream on %zd > 1 stream lists", found);
+
+ addToRestartQueue_l(stream);
+ mStreamManagerCondition.notify_one();
+ return true;
+}
+
+ssize_t StreamManager::removeFromQueues_l(
+ Stream* stream, int32_t activeStreamIDToMatch) {
+ size_t found = 0;
+ for (auto it = mActiveStreams.begin(); it != mActiveStreams.end(); ++it) {
+ if (*it == stream) {
+ mActiveStreams.erase(it); // we erase the iterator and break (otherwise it not safe).
+ ++found;
+ break;
+ }
+ }
+ // activeStreamIDToMatch is nonzero indicates we proceed only if found.
+ if (found == 0 && activeStreamIDToMatch > 0) {
+ return -1; // special code: not present on active streams, ignore restart request
+ }
+
+ for (auto it = mRestartStreams.begin(); it != mRestartStreams.end(); ++it) {
+ if (it->second == stream) {
+ mRestartStreams.erase(it);
+ ++found;
+ break;
+ }
+ }
+ found += mAvailableStreams.erase(stream);
+
+ // streams on mProcessingStreams are undergoing processing by the StreamManager thread
+ // and do not participate in normal stream migration.
+ return found;
+}
+
+void StreamManager::addToRestartQueue_l(Stream *stream) {
+ mRestartStreams.emplace(stream->getStopTimeNs(), stream);
+}
+
+void StreamManager::addToActiveQueue_l(Stream *stream) {
+ if (kStealActiveStream_OldestFirst) {
+ mActiveStreams.push_back(stream); // oldest to newest
+ } else {
+ mActiveStreams.push_front(stream); // newest to oldest
+ }
+}
+
+void StreamManager::run(int32_t id)
+{
+ ALOGV("%s(%d) entering", __func__, id);
+ int64_t waitTimeNs = kWaitTimeBeforeCloseNs;
+ std::unique_lock lock(mStreamManagerLock);
+ while (!mQuit) {
+ if (mRestartStreams.empty()) { // on thread start, mRestartStreams can be non-empty.
+ mStreamManagerCondition.wait_for(
+ lock, std::chrono::duration<int64_t, std::nano>(waitTimeNs));
+ }
+ ALOGV("%s(%d) awake", __func__, id);
+
+ sanityCheckQueue_l();
+
+ if (mQuit || (mRestartStreams.empty() && waitTimeNs == kWaitTimeBeforeCloseNs)) {
+ break; // end the thread
+ }
+
+ waitTimeNs = kWaitTimeBeforeCloseNs;
+ while (!mQuit && !mRestartStreams.empty()) {
+ const nsecs_t nowNs = systemTime();
+ auto it = mRestartStreams.begin();
+ Stream* const stream = it->second;
+ const int64_t diffNs = stream->getStopTimeNs() - nowNs;
+ if (diffNs > 0) {
+ waitTimeNs = std::min(waitTimeNs, diffNs);
+ break;
+ }
+ mRestartStreams.erase(it);
+ mProcessingStreams.emplace(stream);
+ lock.unlock();
+ stream->stop();
+ ALOGV("%s(%d) stopping streamID:%d", __func__, id, stream->getStreamID());
+ if (Stream* nextStream = stream->playPairStream()) {
+ ALOGV("%s(%d) starting streamID:%d", __func__, id, nextStream->getStreamID());
+ lock.lock();
+ if (nextStream->getStopTimeNs() > 0) {
+ // the next stream was stopped before we can move it to the active queue.
+ ALOGV("%s(%d) stopping started streamID:%d",
+ __func__, id, nextStream->getStreamID());
+ moveToRestartQueue_l(nextStream);
+ } else {
+ addToActiveQueue_l(nextStream);
+ }
+ } else {
+ lock.lock();
+ mAvailableStreams.insert(stream);
+ }
+ mProcessingStreams.erase(stream);
+ sanityCheckQueue_l();
+ }
+ }
+ ALOGV("%s(%d) exiting", __func__, id);
+}
+
+void StreamManager::dump() const
+{
+ forEach([](const Stream *stream) { stream->dump(); });
+}
+
+void StreamManager::sanityCheckQueue_l() const
+{
+ // We want to preserve the invariant that each stream pair is exactly on one of the queues.
+ const size_t availableStreams = mAvailableStreams.size();
+ const size_t restartStreams = mRestartStreams.size();
+ const size_t activeStreams = mActiveStreams.size();
+ const size_t processingStreams = mProcessingStreams.size();
+ const size_t managedStreams = availableStreams + restartStreams + activeStreams
+ + processingStreams;
+ const size_t totalStreams = getStreamMapSize() >> 1;
+ LOG_ALWAYS_FATAL_IF(managedStreams != totalStreams,
+ "%s: mAvailableStreams:%zu + mRestartStreams:%zu + "
+ "mActiveStreams:%zu + mProcessingStreams:%zu = %zu != total streams %zu",
+ __func__, availableStreams, restartStreams, activeStreams, processingStreams,
+ managedStreams, totalStreams);
+ ALOGV("%s: mAvailableStreams:%zu + mRestartStreams:%zu + "
+ "mActiveStreams:%zu + mProcessingStreams:%zu = %zu (total streams: %zu)",
+ __func__, availableStreams, restartStreams, activeStreams, processingStreams,
+ managedStreams, totalStreams);
+}
+
+} // namespace android::soundpool
diff --git a/media/jni/soundpool/StreamManager.h b/media/jni/soundpool/StreamManager.h
new file mode 100644
index 000000000000..59ae2f9d108b
--- /dev/null
+++ b/media/jni/soundpool/StreamManager.h
@@ -0,0 +1,478 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "Stream.h"
+
+#include <condition_variable>
+#include <future>
+#include <list>
+#include <map>
+#include <memory>
+#include <mutex>
+#include <unordered_set>
+#include <vector>
+
+#include <utils/AndroidThreads.h>
+
+namespace android::soundpool {
+
+// TODO: Move helper classes to a utility file, with separate test.
+
+/**
+ * JavaThread is used like std::thread but for threads that may call the JVM.
+ *
+ * std::thread does not easily attach to the JVM. We need JVM capable threads
+ * from createThreadEtc() since android binder call optimization may attempt to
+ * call back into Java if the SoundPool runs in system server.
+ *
+ *
+ * No locking is required - the member variables are inherently thread-safe.
+ */
+class JavaThread {
+public:
+ JavaThread(std::function<void()> f, const char *name)
+ : mF{std::move(f)} {
+ createThreadEtc(staticFunction, this, name);
+ }
+
+ JavaThread(JavaThread &&) = delete; // uses "this" ptr, not moveable.
+
+ ~JavaThread() {
+ join(); // manually block until the future is ready as std::future
+ // destructor doesn't block unless it comes from std::async
+ // and it is the last reference to shared state.
+ }
+
+ void join() const {
+ mFuture.wait();
+ }
+
+ bool isClosed() const {
+ return mIsClosed;
+ }
+
+private:
+ static int staticFunction(void *data) {
+ JavaThread *jt = static_cast<JavaThread *>(data);
+ jt->mF();
+ jt->mIsClosed = true; // set the flag that we are closed
+ // now before we allow the destructor to execute;
+ // otherwise there may be a use after free.
+ jt->mPromise.set_value();
+ return 0;
+ }
+
+ // No locking is provided as these variables are initialized in the constructor
+ // and the members referenced are thread-safe objects.
+ // (mFuture.wait() can block multiple threads.)
+ // Note the order of member variables is reversed for destructor.
+ const std::function<void()> mF;
+ // Used in join() to block until the thread completes.
+ // See https://en.cppreference.com/w/cpp/thread/promise for the void specialization of
+ // promise.
+ std::promise<void> mPromise;
+ std::future<void> mFuture{mPromise.get_future()};
+ std::atomic_bool mIsClosed = false;
+};
+
+/**
+ * The ThreadPool manages thread lifetimes of SoundPool worker threads.
+ *
+ * TODO: the (eventual) goal of ThreadPool is to transparently and cooperatively
+ * maximize CPU utilization while avoiding starvation of other applications.
+ * Some possibilities:
+ *
+ * We should create worker threads when we have SoundPool work and the system is idle.
+ * CPU cycles are "use-it-or-lose-it" when the system is idle.
+ *
+ * We should adjust the priority of worker threads so that the second (and subsequent) worker
+ * threads have lower priority (should we try to promote priority also?).
+ *
+ * We should throttle the spawning of new worker threads, spacing over time, to avoid
+ * creating too many new threads all at once, on initialization.
+ */
+class ThreadPool {
+public:
+ ThreadPool(size_t maxThreadCount, std::string name)
+ : mMaxThreadCount(maxThreadCount)
+ , mName{std::move(name)} { }
+
+ ~ThreadPool() { quit(); }
+
+ size_t getActiveThreadCount() const { return mActiveThreadCount; }
+ size_t getMaxThreadCount() const { return mMaxThreadCount; }
+
+ void quit() {
+ std::list<std::unique_ptr<JavaThread>> threads;
+ {
+ std::lock_guard lock(mThreadLock);
+ if (mQuit) return; // already joined.
+ mQuit = true;
+ threads = std::move(mThreads);
+ mThreads.clear();
+ }
+ // mQuit set under lock, no more threads will be created.
+ for (auto &thread : threads) {
+ thread->join();
+ thread.reset();
+ }
+ LOG_ALWAYS_FATAL_IF(mActiveThreadCount != 0,
+ "Invalid Active Threads: %zu", (size_t)mActiveThreadCount);
+ }
+
+ // returns a non-zero id if successful, the id is to help logging messages.
+ int32_t launch(std::function<void(int32_t /* id */)> f) {
+ std::list<std::unique_ptr<JavaThread>> threadsToRelease; // release outside of lock.
+ std::lock_guard lock(mThreadLock);
+ if (mQuit) return 0; // ignore if we have quit
+
+ // clean up threads.
+ for (auto it = mThreads.begin(); it != mThreads.end(); ) {
+ if ((*it)->isClosed()) {
+ threadsToRelease.emplace_back(std::move(*it));
+ it = mThreads.erase(it);
+ } else {
+ ++it;
+ }
+ }
+
+ const size_t threadCount = mThreads.size();
+ if (threadCount < mMaxThreadCount) {
+ // if the id wraps, we don't care about collisions. it's just for logging.
+ mNextThreadId = mNextThreadId == INT32_MAX ? 1 : ++mNextThreadId;
+ const int32_t id = mNextThreadId;
+ mThreads.emplace_back(std::make_unique<JavaThread>(
+ [this, id, mf = std::move(f)] { mf(id); --mActiveThreadCount; },
+ (mName + std::to_string(id)).c_str()));
+ ++mActiveThreadCount;
+ return id;
+ }
+ return 0;
+ }
+
+ // TODO: launch only if load average is low.
+ // This gets the load average
+ // See also std::thread::hardware_concurrency() for the concurrent capability.
+ static double getLoadAvg() {
+ double loadAvg[1];
+ if (getloadavg(loadAvg, std::size(loadAvg)) > 0) {
+ return loadAvg[0];
+ }
+ return -1.;
+ }
+
+private:
+ const size_t mMaxThreadCount;
+ const std::string mName;
+
+ std::atomic_size_t mActiveThreadCount = 0;
+
+ std::mutex mThreadLock;
+ bool mQuit GUARDED_BY(mThreadLock) = false;
+ int32_t mNextThreadId GUARDED_BY(mThreadLock) = 0;
+ std::list<std::unique_ptr<JavaThread>> mThreads GUARDED_BY(mThreadLock);
+};
+
+/**
+ * A Perfect HashTable for IDs (key) to pointers (value).
+ *
+ * There are no collisions. Why? because we generate the IDs for you to look up :-).
+ *
+ * The goal of this hash table is to map an integer ID handle > 0 to a pointer.
+ * We give these IDs in monotonic order (though we may skip if it were to cause a collision).
+ *
+ * The size of the hashtable must be large enough to accommodate the max number of keys.
+ * We suggest 2x.
+ *
+ * Readers are lockless
+ * Single writer could be lockless, but we allow multiple writers through an internal lock.
+ *
+ * For the Key type K, valid keys generated are > 0 (signed or unsigned)
+ * For the Value type V, values are pointers - nullptr means empty.
+ */
+template <typename K, typename V>
+class PerfectHash {
+public:
+ PerfectHash(size_t hashCapacity)
+ : mHashCapacity(hashCapacity)
+ , mK2V{new std::atomic<V>[hashCapacity]()} {
+ }
+
+ // Generate a key for a value V.
+ // There is a testing function getKforV() which checks what the value reports as its key.
+ //
+ // Calls back into getKforV under lock.
+ //
+ // We expect that the hashCapacity is 2x the number of stored keys in order
+ // to have one or two tries to find an empty slot
+ K generateKey(V value, std::function<K(V)> getKforV, K oldKey = 0) {
+ std::lock_guard lock(mHashLock);
+ // try to remove the old key.
+ if (oldKey > 0) { // key valid
+ const V v = getValue(oldKey);
+ if (v != nullptr) { // value still valid
+ const K atPosition = getKforV(v);
+ if (atPosition < 0 || // invalid value
+ atPosition == oldKey || // value's key still valid and matches old key
+ ((atPosition ^ oldKey) & (mHashCapacity - 1)) != 0) { // stale key entry
+ getValue(oldKey) = nullptr; // invalidate
+ }
+ } // else if value is invalid, no need to invalidate.
+ }
+ // check if we are invalidating only.
+ if (value == nullptr) return 0;
+ // now insert the new value and return the key.
+ size_t tries = 0;
+ for (; tries < mHashCapacity; ++tries) {
+ mNextKey = mNextKey == std::numeric_limits<K>::max() ? 1 : mNextKey + 1;
+ const V v = getValue(mNextKey);
+ //ALOGD("tries: %zu, key:%d value:%p", tries, (int)mNextKey, v);
+ if (v == nullptr) break; // empty
+ const K atPosition = getKforV(v);
+ //ALOGD("tries: %zu key atPosition:%d", tries, (int)atPosition);
+ if (atPosition < 0 || // invalid value
+ ((atPosition ^ mNextKey) & (mHashCapacity - 1)) != 0) { // stale key entry
+ break;
+ }
+ }
+ LOG_ALWAYS_FATAL_IF(tries == mHashCapacity, "hash table overflow!");
+ //ALOGD("%s: found after %zu tries", __func__, tries);
+ getValue(mNextKey) = value;
+ return mNextKey;
+ }
+
+ std::atomic<V> &getValue(K key) { return mK2V[key & (mHashCapacity - 1)]; }
+ const std::atomic_int32_t &getValue(K key) const { return mK2V[key & (mHashCapacity - 1)]; }
+
+private:
+ mutable std::mutex mHashLock;
+ const size_t mHashCapacity; // size of mK2V no lock needed.
+ std::unique_ptr<std::atomic<V>[]> mK2V; // no lock needed for read access.
+ K mNextKey GUARDED_BY(mHashLock) {};
+};
+
+/**
+ * StreamMap contains the all the valid streams available to SoundPool.
+ *
+ * There is no Lock required for this class because the streams are
+ * allocated in the constructor, the lookup is lockless, and the Streams
+ * returned are locked internally.
+ *
+ * The lookup uses a perfect hash.
+ * It is possible to use a lockless hash table or to use a stripe-locked concurrent
+ * hashmap for essentially lock-free lookup.
+ *
+ * This follows Map-Reduce parallelism model.
+ * https://en.wikipedia.org/wiki/MapReduce
+ *
+ * Conceivably the forEach could be parallelized using std::for_each with a
+ * std::execution::par policy.
+ *
+ * https://en.cppreference.com/w/cpp/algorithm/for_each
+ */
+class StreamMap {
+public:
+ explicit StreamMap(int32_t streams);
+
+ // Returns the stream associated with streamID or nullptr if not found.
+ // This need not be locked.
+ // The stream ID will never migrate to another Stream, but it may change
+ // underneath you. The Stream operations that take a streamID will confirm
+ // that the streamID matches under the Stream lock before executing otherwise
+ // it ignores the command as stale.
+ Stream* findStream(int32_t streamID) const;
+
+ // Iterates through the stream pool applying the function f.
+ // Since this enumerates over every single stream, it is unlocked.
+ //
+ // See related: https://en.cppreference.com/w/cpp/algorithm/for_each
+ void forEach(std::function<void(const Stream *)>f) const {
+ for (size_t i = 0; i < mStreamPoolSize; ++i) {
+ f(&mStreamPool[i]);
+ }
+ }
+
+ void forEach(std::function<void(Stream *)>f) {
+ for (size_t i = 0; i < mStreamPoolSize; ++i) {
+ f(&mStreamPool[i]);
+ }
+ }
+
+ // Returns the pair stream for a given Stream.
+ // This need not be locked as it is a property of the pointer address.
+ Stream* getPairStream(const Stream* stream) const {
+ const size_t index = streamPosition(stream);
+ return &mStreamPool[index ^ 1];
+ }
+
+ // find the position of the stream in mStreamPool array.
+ size_t streamPosition(const Stream* stream) const; // no lock needed
+
+ size_t getStreamMapSize() const {
+ return mStreamPoolSize;
+ }
+
+ // find the next valid ID for a stream and store in hash table.
+ int32_t getNextIdForStream(Stream* stream) const;
+
+private:
+
+ // use the hash table to attempt to find the stream.
+ // nullptr is returned if the lookup fails.
+ Stream* lookupStreamFromId(int32_t streamID) const;
+
+ // The stream pool is initialized in the constructor, effectively const.
+ // no locking required for access.
+ //
+ // The constructor parameter "streams" results in streams pairs of streams.
+ // We have twice as many streams because we wish to return a streamID "handle"
+ // back to the app immediately, while we may be stopping the other stream in the
+ // pair to get its AudioTrack :-).
+ //
+ // Of the stream pair, only one of the streams may have an AudioTrack.
+ // The fixed association of a stream pair allows callbacks from the AudioTrack
+ // to be associated properly to either one or the other of the stream pair.
+ //
+ // TODO: The stream pair arrangement can be removed if we have better AudioTrack
+ // callback handling (being able to remove and change the callback after construction).
+ //
+ // Streams may be accessed anytime off of the stream pool
+ // as there is internal locking on each stream.
+ std::unique_ptr<Stream[]> mStreamPool; // no lock needed for access.
+ size_t mStreamPoolSize; // no lock needed for access.
+
+ // In order to find the Stream from a StreamID, we could do a linear lookup in mStreamPool.
+ // As an alternative, one could use stripe-locked or lock-free concurrent hashtables.
+ //
+ // When considering linear search vs hashmap, verify the typical use-case size.
+ // Linear search is faster than std::unordered_map (circa 2018) for less than 40 elements.
+ // [ Skarupke, M. (2018), "You Can Do Better than std::unordered_map: New and Recent
+ // Improvements to Hash Table Performance." C++Now 2018. cppnow.org, see
+ // https://www.youtube.com/watch?v=M2fKMP47slQ ]
+ //
+ // Here, we use a PerfectHash of Id to Stream *, since we can control the
+ // StreamID returned to the user. This allows O(1) read access to mStreamPool lock-free.
+ //
+ // We prefer that the next stream ID is monotonic for aesthetic reasons
+ // (if we didn't care about monotonicity, a simple method is to apply a generation count
+ // to each stream in the unused upper bits of its index in mStreamPool for the id).
+ //
+ std::unique_ptr<PerfectHash<int32_t, Stream *>> mPerfectHash;
+};
+
+/**
+ * StreamManager is used to manage the streams (accessed by StreamID from Java).
+ *
+ * Locking order (proceeds from application to component).
+ * SoundPool mApiLock (if needed) -> StreamManager mStreamManagerLock
+ * -> pair Stream mLock -> queued Stream mLock
+ */
+class StreamManager : public StreamMap {
+public:
+ // Note: the SoundPool pointer is only used for stream initialization.
+ // It is not stored in StreamManager.
+ StreamManager(int32_t streams, size_t threads, const audio_attributes_t* attributes);
+ ~StreamManager();
+
+ // Returns positive streamID on success, 0 on failure. This is locked.
+ int32_t queueForPlay(const std::shared_ptr<Sound> &sound,
+ int32_t soundID, float leftVolume, float rightVolume,
+ int32_t priority, int32_t loop, float rate)
+ NO_THREAD_SAFETY_ANALYSIS; // uses unique_lock
+
+ ///////////////////////////////////////////////////////////////////////
+ // Called from soundpool::Stream
+
+ const audio_attributes_t* getAttributes() const { return &mAttributes; }
+
+ // Moves the stream to the restart queue (called upon BUFFER_END of the static track)
+ // this is locked internally.
+ // If activeStreamIDToMatch is nonzero, it will only move to the restart queue
+ // if the streamIDToMatch is found on the active queue.
+ void moveToRestartQueue(Stream* stream, int32_t activeStreamIDToMatch = 0);
+
+private:
+
+ void run(int32_t id) NO_THREAD_SAFETY_ANALYSIS; // worker thread, takes unique_lock.
+ void dump() const; // no lock needed
+
+ // returns true if more worker threads are needed.
+ bool needMoreThreads_l() REQUIRES(mStreamManagerLock) {
+ return mRestartStreams.size() > 0 &&
+ (mThreadPool->getActiveThreadCount() == 0
+ || std::distance(mRestartStreams.begin(),
+ mRestartStreams.upper_bound(systemTime()))
+ > (ptrdiff_t)mThreadPool->getActiveThreadCount());
+ }
+
+ // returns true if the stream was added.
+ bool moveToRestartQueue_l(
+ Stream* stream, int32_t activeStreamIDToMatch = 0) REQUIRES(mStreamManagerLock);
+ // returns number of queues the stream was removed from (should be 0 or 1);
+ // a special code of -1 is returned if activeStreamIDToMatch is > 0 and
+ // the stream wasn't found on the active queue.
+ ssize_t removeFromQueues_l(
+ Stream* stream, int32_t activeStreamIDToMatch = 0) REQUIRES(mStreamManagerLock);
+ void addToRestartQueue_l(Stream *stream) REQUIRES(mStreamManagerLock);
+ void addToActiveQueue_l(Stream *stream) REQUIRES(mStreamManagerLock);
+ void sanityCheckQueue_l() const REQUIRES(mStreamManagerLock);
+
+ const audio_attributes_t mAttributes;
+ std::unique_ptr<ThreadPool> mThreadPool; // locked internally
+
+ // mStreamManagerLock is used to lock access for transitions between the
+ // 4 stream queues by the Manager Thread or by the user initiated play().
+ // A stream pair has exactly one stream on exactly one of the queues.
+ std::mutex mStreamManagerLock;
+ std::condition_variable mStreamManagerCondition GUARDED_BY(mStreamManagerLock);
+
+ bool mQuit GUARDED_BY(mStreamManagerLock) = false;
+
+ // There are constructor arg "streams" pairs of streams, only one of each
+ // pair on the 4 stream queues below. The other stream in the pair serves as
+ // placeholder to accumulate user changes, pending actual availability of the
+ // AudioTrack, as it may be in use, requiring stop-then-restart.
+ //
+ // The 4 queues are implemented in the appropriate STL container based on perceived
+ // optimality.
+
+ // 1) mRestartStreams: Streams awaiting stop.
+ // The paired stream may be active (but with no AudioTrack), and will be restarted
+ // with an active AudioTrack when the current stream is stopped.
+ std::multimap<int64_t /* stopTimeNs */, Stream*>
+ mRestartStreams GUARDED_BY(mStreamManagerLock);
+
+ // 2) mActiveStreams: Streams that are active.
+ // The paired stream will be inactive.
+ // This is in order of specified by kStealActiveStream_OldestFirst
+ std::list<Stream*> mActiveStreams GUARDED_BY(mStreamManagerLock);
+
+ // 3) mAvailableStreams: Streams that are inactive.
+ // The paired stream will also be inactive.
+ // No particular order.
+ std::unordered_set<Stream*> mAvailableStreams GUARDED_BY(mStreamManagerLock);
+
+ // 4) mProcessingStreams: Streams that are being processed by the ManagerThreads
+ // When on this queue, the stream and its pair are not available for stealing.
+ // Each ManagerThread will have at most one stream on the mProcessingStreams queue.
+ // The paired stream may be active or restarting.
+ // No particular order.
+ std::unordered_set<Stream*> mProcessingStreams GUARDED_BY(mStreamManagerLock);
+};
+
+} // namespace android::soundpool
diff --git a/media/jni/soundpool/android_media_SoundPool.cpp b/media/jni/soundpool/android_media_SoundPool.cpp
index bfecd6b30179..ca3cc8552990 100644
--- a/media/jni/soundpool/android_media_SoundPool.cpp
+++ b/media/jni/soundpool/android_media_SoundPool.cpp
@@ -52,7 +52,7 @@ android_media_SoundPool_load_FD(JNIEnv *env, jobject thiz, jobject fileDescripto
{
ALOGV("android_media_SoundPool_load_FD");
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap == NULL) return 0;
+ if (ap == nullptr) return 0;
return (jint) ap->load(jniGetFDFromFileDescriptor(env, fileDescriptor),
int64_t(offset), int64_t(length), int(priority));
}
@@ -61,7 +61,7 @@ static jboolean
android_media_SoundPool_unload(JNIEnv *env, jobject thiz, jint sampleID) {
ALOGV("android_media_SoundPool_unload\n");
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap == NULL) return JNI_FALSE;
+ if (ap == nullptr) return JNI_FALSE;
return ap->unload(sampleID) ? JNI_TRUE : JNI_FALSE;
}
@@ -72,7 +72,7 @@ android_media_SoundPool_play(JNIEnv *env, jobject thiz, jint sampleID,
{
ALOGV("android_media_SoundPool_play\n");
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap == NULL) return 0;
+ if (ap == nullptr) return 0;
return (jint) ap->play(sampleID, leftVolume, rightVolume, priority, loop, rate);
}
@@ -81,7 +81,7 @@ android_media_SoundPool_pause(JNIEnv *env, jobject thiz, jint channelID)
{
ALOGV("android_media_SoundPool_pause");
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap == NULL) return;
+ if (ap == nullptr) return;
ap->pause(channelID);
}
@@ -90,7 +90,7 @@ android_media_SoundPool_resume(JNIEnv *env, jobject thiz, jint channelID)
{
ALOGV("android_media_SoundPool_resume");
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap == NULL) return;
+ if (ap == nullptr) return;
ap->resume(channelID);
}
@@ -99,7 +99,7 @@ android_media_SoundPool_autoPause(JNIEnv *env, jobject thiz)
{
ALOGV("android_media_SoundPool_autoPause");
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap == NULL) return;
+ if (ap == nullptr) return;
ap->autoPause();
}
@@ -108,7 +108,7 @@ android_media_SoundPool_autoResume(JNIEnv *env, jobject thiz)
{
ALOGV("android_media_SoundPool_autoResume");
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap == NULL) return;
+ if (ap == nullptr) return;
ap->autoResume();
}
@@ -117,7 +117,7 @@ android_media_SoundPool_stop(JNIEnv *env, jobject thiz, jint channelID)
{
ALOGV("android_media_SoundPool_stop");
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap == NULL) return;
+ if (ap == nullptr) return;
ap->stop(channelID);
}
@@ -127,7 +127,7 @@ android_media_SoundPool_setVolume(JNIEnv *env, jobject thiz, jint channelID,
{
ALOGV("android_media_SoundPool_setVolume");
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap == NULL) return;
+ if (ap == nullptr) return;
ap->setVolume(channelID, (float) leftVolume, (float) rightVolume);
}
@@ -136,7 +136,7 @@ android_media_SoundPool_mute(JNIEnv *env, jobject thiz, jboolean muting)
{
ALOGV("android_media_SoundPool_mute(%d)", muting);
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap == NULL) return;
+ if (ap == nullptr) return;
ap->mute(muting == JNI_TRUE);
}
@@ -146,7 +146,7 @@ android_media_SoundPool_setPriority(JNIEnv *env, jobject thiz, jint channelID,
{
ALOGV("android_media_SoundPool_setPriority");
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap == NULL) return;
+ if (ap == nullptr) return;
ap->setPriority(channelID, (int) priority);
}
@@ -156,7 +156,7 @@ android_media_SoundPool_setLoop(JNIEnv *env, jobject thiz, jint channelID,
{
ALOGV("android_media_SoundPool_setLoop");
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap == NULL) return;
+ if (ap == nullptr) return;
ap->setLoop(channelID, loop);
}
@@ -166,7 +166,7 @@ android_media_SoundPool_setRate(JNIEnv *env, jobject thiz, jint channelID,
{
ALOGV("android_media_SoundPool_setRate");
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap == NULL) return;
+ if (ap == nullptr) return;
ap->setRate(channelID, (float) rate);
}
@@ -174,24 +174,26 @@ static void android_media_callback(SoundPoolEvent event, SoundPool* soundPool, v
{
ALOGV("callback: (%d, %d, %d, %p, %p)", event.mMsg, event.mArg1, event.mArg2, soundPool, user);
JNIEnv *env = AndroidRuntime::getJNIEnv();
- env->CallStaticVoidMethod(fields.mSoundPoolClass, fields.mPostEvent, user, event.mMsg, event.mArg1, event.mArg2, NULL);
+ env->CallStaticVoidMethod(
+ fields.mSoundPoolClass, fields.mPostEvent, user, event.mMsg, event.mArg1, event.mArg2,
+ nullptr /* object */);
}
static jint
android_media_SoundPool_native_setup(JNIEnv *env, jobject thiz, jobject weakRef,
jint maxChannels, jobject jaa)
{
- if (jaa == 0) {
+ if (jaa == nullptr) {
ALOGE("Error creating SoundPool: invalid audio attributes");
return -1;
}
- audio_attributes_t *paa = NULL;
+ audio_attributes_t *paa = nullptr;
// read the AudioAttributes values
paa = (audio_attributes_t *) calloc(1, sizeof(audio_attributes_t));
- const jstring jtags =
+ const auto jtags =
(jstring) env->GetObjectField(jaa, javaAudioAttrFields.fieldFormattedTags);
- const char* tags = env->GetStringUTFChars(jtags, NULL);
+ const char* tags = env->GetStringUTFChars(jtags, nullptr);
// copying array size -1, char array for tags was calloc'd, no need to NULL-terminate it
strncpy(paa->tags, tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE - 1);
env->ReleaseStringUTFChars(jtags, tags);
@@ -201,8 +203,8 @@ android_media_SoundPool_native_setup(JNIEnv *env, jobject thiz, jobject weakRef,
paa->flags = env->GetIntField(jaa, javaAudioAttrFields.fieldFlags);
ALOGV("android_media_SoundPool_native_setup");
- SoundPool *ap = new SoundPool(maxChannels, paa);
- if (ap == NULL) {
+ auto *ap = new SoundPool(maxChannels, paa);
+ if (ap == nullptr) {
return -1;
}
@@ -224,12 +226,12 @@ android_media_SoundPool_release(JNIEnv *env, jobject thiz)
{
ALOGV("android_media_SoundPool_release");
SoundPool *ap = MusterSoundPool(env, thiz);
- if (ap != NULL) {
+ if (ap != nullptr) {
// release weak reference and clear callback
- jobject weakRef = (jobject) ap->getUserData();
- ap->setCallback(NULL, NULL);
- if (weakRef != NULL) {
+ auto weakRef = (jobject) ap->getUserData();
+ ap->setCallback(nullptr /* callback */, nullptr /* user */);
+ if (weakRef != nullptr) {
env->DeleteGlobalRef(weakRef);
}
@@ -309,7 +311,7 @@ static const char* const kClassPathName = "android/media/SoundPool";
jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
{
- JNIEnv* env = NULL;
+ JNIEnv* env = nullptr;
jint result = -1;
jclass clazz;
@@ -317,23 +319,23 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
ALOGE("ERROR: GetEnv failed\n");
return result;
}
- assert(env != NULL);
+ assert(env != nullptr);
clazz = env->FindClass(kClassPathName);
- if (clazz == NULL) {
+ if (clazz == nullptr) {
ALOGE("Can't find %s", kClassPathName);
return result;
}
fields.mNativeContext = env->GetFieldID(clazz, "mNativeContext", "J");
- if (fields.mNativeContext == NULL) {
+ if (fields.mNativeContext == nullptr) {
ALOGE("Can't find SoundPool.mNativeContext");
return result;
}
fields.mPostEvent = env->GetStaticMethodID(clazz, "postEventFromNative",
"(Ljava/lang/Object;IIILjava/lang/Object;)V");
- if (fields.mPostEvent == NULL) {
+ if (fields.mPostEvent == nullptr) {
ALOGE("Can't find android/media/SoundPool.postEventFromNative");
return result;
}
@@ -342,16 +344,18 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
// since it's a static object.
fields.mSoundPoolClass = (jclass) env->NewGlobalRef(clazz);
- if (AndroidRuntime::registerNativeMethods(env, kClassPathName, gMethods, NELEM(gMethods)) < 0)
+ if (AndroidRuntime::registerNativeMethods(
+ env, kClassPathName, gMethods, NELEM(gMethods)) < 0) {
return result;
+ }
// Get the AudioAttributes class and fields
jclass audioAttrClass = env->FindClass(kAudioAttributesClassPathName);
- if (audioAttrClass == NULL) {
+ if (audioAttrClass == nullptr) {
ALOGE("Can't find %s", kAudioAttributesClassPathName);
return result;
}
- jclass audioAttributesClassRef = (jclass)env->NewGlobalRef(audioAttrClass);
+ auto audioAttributesClassRef = (jclass)env->NewGlobalRef(audioAttrClass);
javaAudioAttrFields.fieldUsage = env->GetFieldID(audioAttributesClassRef, "mUsage", "I");
javaAudioAttrFields.fieldContentType
= env->GetFieldID(audioAttributesClassRef, "mContentType", "I");
@@ -359,9 +363,10 @@ jint JNI_OnLoad(JavaVM* vm, void* /* reserved */)
javaAudioAttrFields.fieldFormattedTags =
env->GetFieldID(audioAttributesClassRef, "mFormattedTags", "Ljava/lang/String;");
env->DeleteGlobalRef(audioAttributesClassRef);
- if (javaAudioAttrFields.fieldUsage == NULL || javaAudioAttrFields.fieldContentType == NULL
- || javaAudioAttrFields.fieldFlags == NULL
- || javaAudioAttrFields.fieldFormattedTags == NULL) {
+ if (javaAudioAttrFields.fieldUsage == nullptr
+ || javaAudioAttrFields.fieldContentType == nullptr
+ || javaAudioAttrFields.fieldFlags == nullptr
+ || javaAudioAttrFields.fieldFormattedTags == nullptr) {
ALOGE("Can't initialize AudioAttributes fields");
return result;
}
diff --git a/media/jni/soundpool/tests/Android.bp b/media/jni/soundpool/tests/Android.bp
new file mode 100644
index 000000000000..52f59ed69503
--- /dev/null
+++ b/media/jni/soundpool/tests/Android.bp
@@ -0,0 +1,32 @@
+cc_binary {
+ name: "soundpool_stress",
+ host_supported: false,
+
+ include_dirs: [
+ "frameworks/base/media/jni/"
+ ],
+
+ shared_libs: [
+ "libaudioutils",
+ "libbinder",
+ "liblog",
+ "libmedia",
+ "libsoundpool",
+ "libstagefright",
+ "libutils",
+ ],
+
+ header_libs: [
+ "libmediametrics_headers",
+ ],
+
+ srcs: [
+ "soundpool_stress.cpp"
+ ],
+
+ cflags: [
+ "-Wall",
+ "-Werror",
+ "-Wextra",
+ ],
+}
diff --git a/media/jni/soundpool/tests/build_and_run.sh b/media/jni/soundpool/tests/build_and_run.sh
new file mode 100755
index 000000000000..72fd52814202
--- /dev/null
+++ b/media/jni/soundpool/tests/build_and_run.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+#
+# Run samples from this directory
+#
+
+if [ -z "$ANDROID_BUILD_TOP" ]; then
+ echo "Android build environment not set"
+ exit -1
+fi
+
+# ensure we have mm
+. $ANDROID_BUILD_TOP/build/envsetup.sh
+
+mm
+
+echo "waiting for device"
+
+adb root && adb wait-for-device remount
+
+echo "========================================"
+echo "testing soundpool_stress"
+uidir="/product/media/audio/notifications"
+adb push $OUT/system/bin/soundpool_stress /system/bin
+
+# test SoundPool playback of all the UI sound samples (loaded twice) looping 10s 1 thread.
+adb shell /system/bin/soundpool_stress -l -1 $uidir/*.ogg $uidir/*.ogg
+
+# test SoundPool playback of all the UI sound samples (repeating 3 times) looping 10s 1 thread.
+adb shell /system/bin/soundpool_stress -l 1 -r 3 $uidir/*.ogg
+
+# performance test SoundPool playback of all the UI sound samples (x2)
+# 1 iterations, looping, 1 second playback, 4 threads.
+adb shell /system/bin/soundpool_stress -i 1 -l -1 -p 1 -t 4 $uidir/*.ogg $uidir/*.ogg
diff --git a/media/jni/soundpool/tests/soundpool_stress.cpp b/media/jni/soundpool/tests/soundpool_stress.cpp
new file mode 100644
index 000000000000..7d9b6a21b5c4
--- /dev/null
+++ b/media/jni/soundpool/tests/soundpool_stress.cpp
@@ -0,0 +1,319 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "soundpool"
+
+#include <fcntl.h>
+#include <stdio.h>
+#include <sys/stat.h>
+
+#include <atomic>
+#include <future>
+#include <mutex>
+#include <set>
+#include <vector>
+
+#include <audio_utils/clock.h>
+#include <binder/ProcessState.h>
+#include <media/stagefright/MediaExtractorFactory.h>
+#include <soundpool/SoundPool.h> // direct include, this is not an NDK feature.
+#include <system/audio.h>
+#include <utils/Log.h>
+
+using namespace android;
+
+// Errors and diagnostic messages all go to stdout.
+
+namespace {
+
+void usage(const char *name)
+{
+ printf("Usage: %s "
+ "[-i #iterations] [-l #loop] [-p #playback_seconds] [-s #streams] [-t #threads] "
+ "[-z #snoozeSec] <input-file>+\n", name);
+ printf("Uses soundpool to load and play a file (the first 10 seconds)\n");
+ printf(" -i #iterations, default 1\n");
+ printf(" -l #loop looping mode, -1 forever\n");
+ printf(" -p #playback_seconds, default 10\n");
+ printf(" -r #repeat soundIDs (0 or more times), default 0\n");
+ printf(" -s #streams for concurrent sound playback, default 20\n");
+ printf(" -t #threads, default 1\n");
+ printf(" -z #snoozeSec after stopping, -1 forever, default 0\n");
+ printf(" <input-file>+ files to be played\n");
+}
+
+std::atomic_int32_t gErrors{};
+std::atomic_int32_t gWarnings{};
+
+void printEvent(const SoundPoolEvent *event) {
+ printf("{ msg:%d id:%d status:%d }\n", event->mMsg, event->mArg1, event->mArg2);
+}
+
+class CallbackManager {
+public:
+ int32_t getNumberEvents(int32_t soundID) {
+ std::lock_guard lock(mLock);
+ return mEvents[soundID] > 0;
+ }
+
+ void setSoundPool(SoundPool* soundPool) {
+ std::lock_guard lock(mLock);
+ mSoundPool = soundPool;
+ }
+
+ void callback(SoundPoolEvent event, const SoundPool *soundPool) {
+ std::lock_guard lock(mLock);
+ printEvent(&event);
+ if (soundPool != mSoundPool) {
+ printf("ERROR: mismatched soundpool: %p\n", soundPool);
+ ++gErrors;
+ return;
+ }
+ if (event.mMsg != 1 /* SoundPoolEvent::SOUND_LOADED */) {
+ printf("ERROR: invalid event msg: %d\n", event.mMsg);
+ ++gErrors;
+ return;
+ }
+ if (event.mArg2 != 0) {
+ printf("ERROR: event status(%d) != 0\n", event.mArg2);
+ ++gErrors;
+ return;
+ }
+ if (event.mArg1 <= 0) {
+ printf("ERROR: event soundID(%d) < 0\n", event.mArg1);
+ ++gErrors;
+ return;
+ }
+ ++mEvents[event.mArg1];
+ }
+
+private:
+ std::mutex mLock;
+ SoundPool *mSoundPool = nullptr;
+ std::map<int32_t /* soundID */, int32_t /* count */> mEvents;
+} gCallbackManager;
+
+
+void StaticCallbackManager(SoundPoolEvent event, SoundPool* soundPool, void* user) {
+ ((CallbackManager *)user)->callback(event, soundPool);
+}
+
+void testStreams(SoundPool *soundPool, const std::vector<const char *> &filenames,
+ int loop, int repeat, int playSec)
+{
+ const int64_t startTimeNs = systemTime();
+ std::vector<int32_t> soundIDs;
+ for (auto filename : filenames) {
+ struct stat st;
+ if (stat(filename, &st) < 0) {
+ printf("ERROR: cannot stat %s\n", filename);
+ return;
+ }
+ const uint64_t length = uint64_t(st.st_size);
+ const int inp = open(filename, O_RDONLY);
+ if (inp < 0) {
+ printf("ERROR: cannot open %s\n", filename);
+ return;
+ }
+ printf("loading (%s) size (%llu)\n", filename, (unsigned long long)length);
+ const int32_t soundID = soundPool->load(
+ inp, 0 /*offset*/, length, 0 /*priority - unused*/);
+ if (soundID == 0) {
+ printf("ERROR: cannot load %s\n", filename);
+ return;
+ }
+ close(inp);
+ soundIDs.emplace_back(soundID);
+ printf("loaded %s soundID(%d)\n", filename, soundID);
+ }
+ const int64_t requestLoadTimeNs = systemTime();
+ printf("\nrequestLoadTimeMs: %d\n",
+ (int)((requestLoadTimeNs - startTimeNs) / NANOS_PER_MILLISECOND));
+
+ // create stream & get Id (playing)
+ const float maxVol = 1.f;
+ const float silentVol = 0.f;
+ const int priority = 0; // lowest
+ const float rate = 1.f; // normal
+
+ // Loading is done by a SoundPool Worker thread.
+ // TODO: Use SoundPool::setCallback() for wait
+
+ for (int32_t soundID : soundIDs) {
+ for (int i = 0; i <= repeat; ++i) {
+ while (true) {
+ const int32_t streamID =
+ soundPool->play(soundID, silentVol, silentVol, priority, 0 /*loop*/, rate);
+ if (streamID != 0) {
+ const int32_t events = gCallbackManager.getNumberEvents(soundID);
+ if (events != 1) {
+ printf("WARNING: successful play for streamID:%d soundID:%d"
+ " but callback events(%d) != 1\n", streamID, soundID, events);
+ ++gWarnings;
+ }
+ soundPool->stop(streamID);
+ break;
+ }
+ usleep(1000);
+ }
+ printf("[%d]", soundID);
+ fflush(stdout);
+ }
+ }
+
+ const int64_t loadTimeNs = systemTime();
+ printf("\nloadTimeMs: %d\n", (int)((loadTimeNs - startTimeNs) / NANOS_PER_MILLISECOND));
+
+ // check and play (overlap with above).
+ std::vector<int32_t> streamIDs;
+ for (int32_t soundID : soundIDs) {
+ for (int i = 0; i <= repeat; ++i) {
+ printf("\nplaying soundID=%d", soundID);
+ const int32_t streamID =
+ soundPool->play(soundID, maxVol, maxVol, priority, loop, rate);
+ if (streamID == 0) {
+ printf(" failed! ERROR");
+ ++gErrors;
+ } else {
+ printf(" streamID=%d", streamID);
+ streamIDs.emplace_back(streamID);
+ }
+ }
+ }
+ const int64_t playTimeNs = systemTime();
+ printf("\nplayTimeMs: %d\n", (int)((playTimeNs - loadTimeNs) / NANOS_PER_MILLISECOND));
+
+ for (int i = 0; i < playSec; ++i) {
+ sleep(1);
+ printf(".");
+ fflush(stdout);
+ }
+
+ for (int32_t streamID : streamIDs) {
+ soundPool->stop(streamID);
+ }
+
+ for (int32_t soundID : soundIDs) {
+ soundPool->unload(soundID);
+ }
+ printf("\nDone!\n");
+}
+
+} // namespace
+
+int main(int argc, char *argv[])
+{
+ const char * const me = argv[0];
+
+ int iterations = 1;
+ int loop = 0; // disable looping
+ int maxStreams = 40; // change to have more concurrent playback streams
+ int playSec = 10;
+ int repeat = 0;
+ int snoozeSec = 0;
+ int threadCount = 1;
+ for (int ch; (ch = getopt(argc, argv, "i:l:p:r:s:t:z:")) != -1; ) {
+ switch (ch) {
+ case 'i':
+ iterations = atoi(optarg);
+ break;
+ case 'l':
+ loop = atoi(optarg);
+ break;
+ case 'p':
+ playSec = atoi(optarg);
+ break;
+ case 'r':
+ repeat = atoi(optarg);
+ break;
+ case 's':
+ maxStreams = atoi(optarg);
+ break;
+ case 't':
+ threadCount = atoi(optarg);
+ break;
+ case 'z':
+ snoozeSec = atoi(optarg);
+ break;
+ default:
+ usage(me);
+ return EXIT_FAILURE;
+ }
+ }
+
+ argc -= optind;
+ argv += optind;
+ if (argc <= 0) {
+ usage(me);
+ return EXIT_FAILURE;
+ }
+
+ std::vector<const char *> filenames(argv, argv + argc);
+
+ android::ProcessState::self()->startThreadPool();
+
+ // O and later requires data sniffer registration for proper file type detection
+ MediaExtractorFactory::LoadExtractors();
+
+ // create soundpool
+ audio_attributes_t aa = {
+ .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+ .usage = AUDIO_USAGE_MEDIA,
+ };
+ auto soundPool = std::make_unique<SoundPool>(maxStreams, &aa);
+
+ gCallbackManager.setSoundPool(soundPool.get());
+ soundPool->setCallback(StaticCallbackManager, &gCallbackManager);
+
+ const int64_t startTimeNs = systemTime();
+
+ for (int it = 0; it < iterations; ++it) {
+ // One instance:
+ // testStreams(soundPool.get(), filenames, loop, playSec);
+
+ // Test multiple instances
+ std::vector<std::future<void>> threads(threadCount);
+ printf("testing %zu threads\n", threads.size());
+ for (auto &thread : threads) {
+ thread = std::async(std::launch::async,
+ [&]{ testStreams(soundPool.get(), filenames, loop, repeat, playSec);});
+ }
+ // automatically joins.
+ }
+
+ const int64_t endTimeNs = systemTime();
+
+ // snooze before cleaning up to examine soundpool dumpsys state after stop
+ for (int i = 0; snoozeSec < 0 || i < snoozeSec; ++i) {
+ printf("z");
+ fflush(stdout);
+ sleep(1);
+ };
+
+ gCallbackManager.setSoundPool(nullptr);
+ soundPool.reset();
+
+ printf("total time in ms: %lld\n", (endTimeNs - startTimeNs) / NANOS_PER_MILLISECOND);
+ if (gWarnings != 0) {
+ printf("%d warnings!\n", gWarnings.load());
+ }
+ if (gErrors != 0) {
+ printf("%d errors!\n", gErrors.load());
+ return EXIT_FAILURE;
+ }
+ return EXIT_SUCCESS;
+}