summaryrefslogtreecommitdiff
path: root/media/java/android
diff options
context:
space:
mode:
Diffstat (limited to 'media/java/android')
-rw-r--r--media/java/android/media/AudioAttributes.java50
-rw-r--r--media/java/android/media/AudioDeviceInfo.java8
-rw-r--r--media/java/android/media/AudioFocusRequest.java6
-rw-r--r--media/java/android/media/AudioManager.java40
-rw-r--r--media/java/android/media/AudioPortEventHandler.java26
-rw-r--r--media/java/android/media/AudioRecord.java55
-rw-r--r--media/java/android/media/AudioTrack.java55
-rw-r--r--media/java/android/media/BufferingParams.java384
-rw-r--r--media/java/android/media/ExifInterface.java86
-rw-r--r--media/java/android/media/IPlayer.aidl33
-rw-r--r--media/java/android/media/ImageReader.java1
-rw-r--r--media/java/android/media/MediaDrm.java74
-rw-r--r--media/java/android/media/MediaFormat.java98
-rw-r--r--media/java/android/media/MediaMetadata.java47
-rw-r--r--media/java/android/media/MediaMetadataRetriever.java164
-rw-r--r--media/java/android/media/MediaMuxer.java16
-rw-r--r--media/java/android/media/MediaPlayer.java275
-rw-r--r--media/java/android/media/MediaRecorder.java171
-rw-r--r--media/java/android/media/MediaRouter.java6
-rw-r--r--media/java/android/media/NativeRoutingEventHandlerDelegate.java51
-rw-r--r--media/java/android/media/VolumeShaper.aidl21
-rw-r--r--media/java/android/media/browse/MediaBrowser.java15
-rw-r--r--media/java/android/media/midi/package.html13
-rw-r--r--media/java/android/media/projection/MediaProjectionManager.java9
-rw-r--r--media/java/android/media/session/MediaSession.java22
-rw-r--r--media/java/android/media/session/PlaybackState.java3
-rw-r--r--media/java/android/media/tv/ITvInputHardware.aidl6
-rw-r--r--media/java/android/media/tv/TvContract.java28
-rw-r--r--media/java/android/media/tv/TvInputManager.java11
29 files changed, 1032 insertions, 742 deletions
diff --git a/media/java/android/media/AudioAttributes.java b/media/java/android/media/AudioAttributes.java
index 3b9a5de00707..e0289f0bf336 100644
--- a/media/java/android/media/AudioAttributes.java
+++ b/media/java/android/media/AudioAttributes.java
@@ -19,12 +19,14 @@ package android.media;
import android.annotation.IntDef;
import android.annotation.NonNull;
import android.annotation.SystemApi;
+import android.media.AudioAttributesProto;
import android.os.Bundle;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.TextUtils;
import android.util.Log;
import android.util.SparseIntArray;
+import android.util.proto.ProtoOutputStream;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
@@ -177,7 +179,7 @@ public final class AudioAttributes implements Parcelable {
/**
* IMPORTANT: when adding new usage types, add them to SDK_USAGES and update SUPPRESSIBLE_USAGES
- * if applicable.
+ * if applicable, as well as audioattributes.proto.
*/
/**
@@ -200,6 +202,22 @@ public final class AudioAttributes implements Parcelable {
* @see #SUPPRESSIBLE_USAGES
*/
public final static int SUPPRESSIBLE_NEVER = 3;
+ /**
+ * @hide
+ * Denotes a usage for alarms,
+ * will be muted when the Zen mode doesn't allow alarms
+ * @see #SUPPRESSIBLE_USAGES
+ */
+ public final static int SUPPRESSIBLE_ALARM = 4;
+ /**
+ * @hide
+ * Denotes a usage for all other sounds not caught in SUPPRESSIBLE_NOTIFICATION,
+ * SUPPRESSIBLE_CALL,SUPPRESSIBLE_NEVER or SUPPRESSIBLE_ALARM.
+ * This includes media, system, game, navigation, the assistant, and more.
+ * These will be muted when the Zen mode doesn't allow media/system/other.
+ * @see #SUPPRESSIBLE_USAGES
+ */
+ public final static int SUPPRESSIBLE_MEDIA_SYSTEM_OTHER = 5;
/**
* @hide
@@ -219,6 +237,14 @@ public final class AudioAttributes implements Parcelable {
SUPPRESSIBLE_USAGES.put(USAGE_NOTIFICATION_EVENT, SUPPRESSIBLE_NOTIFICATION);
SUPPRESSIBLE_USAGES.put(USAGE_ASSISTANCE_ACCESSIBILITY, SUPPRESSIBLE_NEVER);
SUPPRESSIBLE_USAGES.put(USAGE_VOICE_COMMUNICATION, SUPPRESSIBLE_NEVER);
+ SUPPRESSIBLE_USAGES.put(USAGE_ALARM, SUPPRESSIBLE_ALARM);
+ SUPPRESSIBLE_USAGES.put(USAGE_MEDIA, SUPPRESSIBLE_MEDIA_SYSTEM_OTHER);
+ SUPPRESSIBLE_USAGES.put(USAGE_ASSISTANCE_SONIFICATION, SUPPRESSIBLE_MEDIA_SYSTEM_OTHER);
+ SUPPRESSIBLE_USAGES.put(USAGE_ASSISTANCE_NAVIGATION_GUIDANCE, SUPPRESSIBLE_MEDIA_SYSTEM_OTHER);
+ SUPPRESSIBLE_USAGES.put(USAGE_GAME, SUPPRESSIBLE_MEDIA_SYSTEM_OTHER);
+ SUPPRESSIBLE_USAGES.put(USAGE_VOICE_COMMUNICATION_SIGNALLING, SUPPRESSIBLE_MEDIA_SYSTEM_OTHER);
+ SUPPRESSIBLE_USAGES.put(USAGE_ASSISTANT, SUPPRESSIBLE_MEDIA_SYSTEM_OTHER);
+ SUPPRESSIBLE_USAGES.put(USAGE_UNKNOWN, SUPPRESSIBLE_MEDIA_SYSTEM_OTHER);
}
/**
@@ -715,7 +741,7 @@ public final class AudioAttributes implements Parcelable {
/**
* @hide
* Same as {@link #setCapturePreset(int)} but authorizes the use of HOTWORD,
- * REMOTE_SUBMIX and RADIO_TUNER.
+ * REMOTE_SUBMIX, RADIO_TUNER, VOICE_DOWNLINK, VOICE_UPLINK and VOICE_CALL.
* @param preset
* @return the same Builder instance.
*/
@@ -723,7 +749,10 @@ public final class AudioAttributes implements Parcelable {
public Builder setInternalCapturePreset(int preset) {
if ((preset == MediaRecorder.AudioSource.HOTWORD)
|| (preset == MediaRecorder.AudioSource.REMOTE_SUBMIX)
- || (preset == MediaRecorder.AudioSource.RADIO_TUNER)) {
+ || (preset == MediaRecorder.AudioSource.RADIO_TUNER)
+ || (preset == MediaRecorder.AudioSource.VOICE_DOWNLINK)
+ || (preset == MediaRecorder.AudioSource.VOICE_UPLINK)
+ || (preset == MediaRecorder.AudioSource.VOICE_CALL)) {
mSource = preset;
} else {
setCapturePreset(preset);
@@ -850,6 +879,21 @@ public final class AudioAttributes implements Parcelable {
}
/** @hide */
+ public void toProto(ProtoOutputStream proto) {
+ proto.write(AudioAttributesProto.USAGE, mUsage);
+ proto.write(AudioAttributesProto.CONTENT_TYPE, mContentType);
+ proto.write(AudioAttributesProto.FLAGS, mFlags);
+ // mFormattedTags is never null due to assignment in Builder or unmarshalling.
+ for (String t : mFormattedTags.split(";")) {
+ t = t.trim();
+ if (t != "") {
+ proto.write(AudioAttributesProto.TAGS, t);
+ }
+ }
+ // TODO: is the data in mBundle useful for debugging?
+ }
+
+ /** @hide */
public String usageToString() {
return usageToString(mUsage);
}
diff --git a/media/java/android/media/AudioDeviceInfo.java b/media/java/android/media/AudioDeviceInfo.java
index 1b89c96602d3..19d467a69c29 100644
--- a/media/java/android/media/AudioDeviceInfo.java
+++ b/media/java/android/media/AudioDeviceInfo.java
@@ -127,6 +127,14 @@ public final class AudioDeviceInfo {
}
/**
+ * @hide
+ * @return The underlying {@link AudioDevicePort} instance.
+ */
+ public AudioDevicePort getPort() {
+ return mPort;
+ }
+
+ /**
* @return The internal device ID.
*/
public int getId() {
diff --git a/media/java/android/media/AudioFocusRequest.java b/media/java/android/media/AudioFocusRequest.java
index 9841815a52d1..de59ac39abf6 100644
--- a/media/java/android/media/AudioFocusRequest.java
+++ b/media/java/android/media/AudioFocusRequest.java
@@ -163,12 +163,12 @@ import android.os.Looper;
* // requesting audio focus
* int res = mAudioManager.requestAudioFocus(mFocusRequest);
* synchronized (mFocusLock) {
- * if (res == AUDIOFOCUS_REQUEST_FAILED) {
+ * if (res == AudioManager.AUDIOFOCUS_REQUEST_FAILED) {
* mPlaybackDelayed = false;
- * } else if (res == AUDIOFOCUS_REQUEST_GRANTED) {
+ * } else if (res == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
* mPlaybackDelayed = false;
* playbackNow();
- * } else if (res == AUDIOFOCUS_REQUEST_DELAYED) {
+ * } else if (res == AudioManager.AUDIOFOCUS_REQUEST_DELAYED) {
* mPlaybackDelayed = true;
* }
* }
diff --git a/media/java/android/media/AudioManager.java b/media/java/android/media/AudioManager.java
index 3df17068725e..58976ca0d5cc 100644
--- a/media/java/android/media/AudioManager.java
+++ b/media/java/android/media/AudioManager.java
@@ -43,17 +43,16 @@ import android.os.Looper;
import android.os.Message;
import android.os.Process;
import android.os.RemoteException;
-import android.os.SystemClock;
import android.os.ServiceManager;
+import android.os.SystemClock;
import android.os.UserHandle;
import android.provider.Settings;
import android.util.ArrayMap;
import android.util.Log;
-import android.util.Pair;
+import android.util.Slog;
import android.view.KeyEvent;
import java.util.ArrayList;
-import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
@@ -1248,7 +1247,7 @@ public class AudioManager {
//====================================================================
// Bluetooth SCO control
/**
- * Sticky broadcast intent action indicating that the bluetoooth SCO audio
+ * Sticky broadcast intent action indicating that the Bluetooth SCO audio
* connection state has changed. The intent contains on extra {@link #EXTRA_SCO_AUDIO_STATE}
* indicating the new state which is either {@link #SCO_AUDIO_STATE_DISCONNECTED}
* or {@link #SCO_AUDIO_STATE_CONNECTED}
@@ -1262,7 +1261,7 @@ public class AudioManager {
"android.media.SCO_AUDIO_STATE_CHANGED";
/**
- * Sticky broadcast intent action indicating that the bluetoooth SCO audio
+ * Sticky broadcast intent action indicating that the Bluetooth SCO audio
* connection state has been updated.
* <p>This intent has two extras:
* <ul>
@@ -1966,9 +1965,28 @@ public class AudioManager {
*/
private boolean querySoundEffectsEnabled(int user) {
return Settings.System.getIntForUser(getContext().getContentResolver(),
- Settings.System.SOUND_EFFECTS_ENABLED, 0, user) != 0;
+ Settings.System.SOUND_EFFECTS_ENABLED, 0, user) != 0
+ && !areSystemSoundsZenModeBlocked(getContext());
}
+ private boolean areSystemSoundsZenModeBlocked(Context context) {
+ int zenMode = Settings.Global.getInt(context.getContentResolver(),
+ Settings.Global.ZEN_MODE, 0);
+
+ switch (zenMode) {
+ case Settings.Global.ZEN_MODE_NO_INTERRUPTIONS:
+ case Settings.Global.ZEN_MODE_ALARMS:
+ return true;
+ case Settings.Global.ZEN_MODE_IMPORTANT_INTERRUPTIONS:
+ final NotificationManager noMan = (NotificationManager) context
+ .getSystemService(Context.NOTIFICATION_SERVICE);
+ return (noMan.getNotificationPolicy().priorityCategories
+ & NotificationManager.Policy.PRIORITY_CATEGORY_MEDIA_SYSTEM_OTHER) == 0;
+ case Settings.Global.ZEN_MODE_OFF:
+ default:
+ return false;
+ }
+ }
/**
* Load Sound effects.
@@ -4119,7 +4137,15 @@ public class AudioManager {
Log.w(TAG, "updateAudioPortCache: listAudioPatches failed");
return status;
}
- } while (patchGeneration[0] != portGeneration[0]);
+ // Loop until patch generation is the same as port generation unless audio ports
+ // and audio patches are not null.
+ } while (patchGeneration[0] != portGeneration[0]
+ && (ports == null || patches == null));
+ // If the patch generation doesn't equal port generation, return ERROR here in case
+ // of mismatch between audio ports and audio patches.
+ if (patchGeneration[0] != portGeneration[0]) {
+ return ERROR;
+ }
for (int i = 0; i < newPatches.size(); i++) {
for (int j = 0; j < newPatches.get(i).sources().length; j++) {
diff --git a/media/java/android/media/AudioPortEventHandler.java b/media/java/android/media/AudioPortEventHandler.java
index c152245d4f9a..ac3904a22882 100644
--- a/media/java/android/media/AudioPortEventHandler.java
+++ b/media/java/android/media/AudioPortEventHandler.java
@@ -17,6 +17,7 @@
package android.media;
import android.os.Handler;
+import android.os.HandlerThread;
import android.os.Looper;
import android.os.Message;
import java.util.ArrayList;
@@ -30,6 +31,7 @@ import java.lang.ref.WeakReference;
class AudioPortEventHandler {
private Handler mHandler;
+ private HandlerThread mHandlerThread;
private final ArrayList<AudioManager.OnAudioPortUpdateListener> mListeners =
new ArrayList<AudioManager.OnAudioPortUpdateListener>();
@@ -40,6 +42,8 @@ class AudioPortEventHandler {
private static final int AUDIOPORT_EVENT_SERVICE_DIED = 3;
private static final int AUDIOPORT_EVENT_NEW_LISTENER = 4;
+ private static final long RESCHEDULE_MESSAGE_DELAY_MS = 100;
+
/**
* Accessed by native methods: JNI Callback context.
*/
@@ -51,11 +55,12 @@ class AudioPortEventHandler {
if (mHandler != null) {
return;
}
- // find the looper for our new event handler
- Looper looper = Looper.getMainLooper();
+ // create a new thread for our new event handler
+ mHandlerThread = new HandlerThread(TAG);
+ mHandlerThread.start();
- if (looper != null) {
- mHandler = new Handler(looper) {
+ if (mHandlerThread.getLooper() != null) {
+ mHandler = new Handler(mHandlerThread.getLooper()) {
@Override
public void handleMessage(Message msg) {
ArrayList<AudioManager.OnAudioPortUpdateListener> listeners;
@@ -86,6 +91,12 @@ class AudioPortEventHandler {
if (msg.what != AUDIOPORT_EVENT_SERVICE_DIED) {
int status = AudioManager.updateAudioPortCache(ports, patches, null);
if (status != AudioManager.SUCCESS) {
+ // Since audio ports and audio patches are not null, the return
+ // value could be ERROR due to inconsistency between port generation
+ // and patch generation. In this case, we need to reschedule the
+ // message to make sure the native callback is done.
+ sendMessageDelayed(obtainMessage(msg.what, msg.obj),
+ RESCHEDULE_MESSAGE_DELAY_MS);
return;
}
}
@@ -132,6 +143,9 @@ class AudioPortEventHandler {
@Override
protected void finalize() {
native_finalize();
+ if (mHandlerThread.isAlive()) {
+ mHandlerThread.quit();
+ }
}
private native void native_finalize();
@@ -168,6 +182,10 @@ class AudioPortEventHandler {
Handler handler = eventHandler.handler();
if (handler != null) {
Message m = handler.obtainMessage(what, arg1, arg2, obj);
+ if (what != AUDIOPORT_EVENT_NEW_LISTENER) {
+ // Except AUDIOPORT_EVENT_NEW_LISTENER, we can only respect the last message.
+ handler.removeMessages(what);
+ }
handler.sendMessage(m);
}
}
diff --git a/media/java/android/media/AudioRecord.java b/media/java/android/media/AudioRecord.java
index 0906ba50f7df..27784e96d9f9 100644
--- a/media/java/android/media/AudioRecord.java
+++ b/media/java/android/media/AudioRecord.java
@@ -1516,66 +1516,13 @@ public class AudioRecord implements AudioRouting
}
/**
- * Helper class to handle the forwarding of native events to the appropriate listener
- * (potentially) handled in a different thread
- */
- private class NativeRoutingEventHandlerDelegate {
- private final Handler mHandler;
-
- NativeRoutingEventHandlerDelegate(final AudioRecord record,
- final AudioRouting.OnRoutingChangedListener listener,
- Handler handler) {
- // find the looper for our new event handler
- Looper looper;
- if (handler != null) {
- looper = handler.getLooper();
- } else {
- // no given handler, use the looper the AudioRecord was created in
- looper = mInitializationLooper;
- }
-
- // construct the event handler with this looper
- if (looper != null) {
- // implement the event handler delegate
- mHandler = new Handler(looper) {
- @Override
- public void handleMessage(Message msg) {
- if (record == null) {
- return;
- }
- switch(msg.what) {
- case AudioSystem.NATIVE_EVENT_ROUTING_CHANGE:
- if (listener != null) {
- listener.onRoutingChanged(record);
- }
- break;
- default:
- loge("Unknown native event type: " + msg.what);
- break;
- }
- }
- };
- } else {
- mHandler = null;
- }
- }
-
- Handler getHandler() {
- return mHandler;
- }
- }
-
- /**
* Sends device list change notification to all listeners.
*/
private void broadcastRoutingChange() {
AudioManager.resetAudioPortGeneration();
synchronized (mRoutingChangeListeners) {
for (NativeRoutingEventHandlerDelegate delegate : mRoutingChangeListeners.values()) {
- Handler handler = delegate.getHandler();
- if (handler != null) {
- handler.sendEmptyMessage(AudioSystem.NATIVE_EVENT_ROUTING_CHANGE);
- }
+ delegate.notifyClient();
}
}
}
diff --git a/media/java/android/media/AudioTrack.java b/media/java/android/media/AudioTrack.java
index 50145f8a9886..e535fdf53d01 100644
--- a/media/java/android/media/AudioTrack.java
+++ b/media/java/android/media/AudioTrack.java
@@ -2856,10 +2856,7 @@ public class AudioTrack extends PlayerBase
AudioManager.resetAudioPortGeneration();
synchronized (mRoutingChangeListeners) {
for (NativeRoutingEventHandlerDelegate delegate : mRoutingChangeListeners.values()) {
- Handler handler = delegate.getHandler();
- if (handler != null) {
- handler.sendEmptyMessage(AudioSystem.NATIVE_EVENT_ROUTING_CHANGE);
- }
+ delegate.notifyClient();
}
}
}
@@ -2943,56 +2940,6 @@ public class AudioTrack extends PlayerBase
}
}
- /**
- * Helper class to handle the forwarding of native events to the appropriate listener
- * (potentially) handled in a different thread
- */
- private class NativeRoutingEventHandlerDelegate {
- private final Handler mHandler;
-
- NativeRoutingEventHandlerDelegate(final AudioTrack track,
- final AudioRouting.OnRoutingChangedListener listener,
- Handler handler) {
- // find the looper for our new event handler
- Looper looper;
- if (handler != null) {
- looper = handler.getLooper();
- } else {
- // no given handler, use the looper the AudioTrack was created in
- looper = mInitializationLooper;
- }
-
- // construct the event handler with this looper
- if (looper != null) {
- // implement the event handler delegate
- mHandler = new Handler(looper) {
- @Override
- public void handleMessage(Message msg) {
- if (track == null) {
- return;
- }
- switch(msg.what) {
- case AudioSystem.NATIVE_EVENT_ROUTING_CHANGE:
- if (listener != null) {
- listener.onRoutingChanged(track);
- }
- break;
- default:
- loge("Unknown native event type: " + msg.what);
- break;
- }
- }
- };
- } else {
- mHandler = null;
- }
- }
-
- Handler getHandler() {
- return mHandler;
- }
- }
-
//---------------------------------------------------------
// Methods for IPlayer interface
//--------------------
diff --git a/media/java/android/media/BufferingParams.java b/media/java/android/media/BufferingParams.java
index 681271b1a6f0..521e89756f5d 100644
--- a/media/java/android/media/BufferingParams.java
+++ b/media/java/android/media/BufferingParams.java
@@ -26,170 +26,68 @@ import java.lang.annotation.RetentionPolicy;
/**
* Structure for source buffering management params.
*
- * Used by {@link MediaPlayer#getDefaultBufferingParams()},
- * {@link MediaPlayer#getBufferingParams()} and
+ * Used by {@link MediaPlayer#getBufferingParams()} and
* {@link MediaPlayer#setBufferingParams(BufferingParams)}
* to control source buffering behavior.
*
* <p>There are two stages of source buffering in {@link MediaPlayer}: initial buffering
* (when {@link MediaPlayer} is being prepared) and rebuffering (when {@link MediaPlayer}
- * is playing back source). {@link BufferingParams} includes mode and corresponding
- * watermarks for each stage of source buffering. The watermarks could be either size
- * based (in milliseconds), or time based (in kilobytes) or both, depending on the mode.
+ * is playing back source). {@link BufferingParams} includes corresponding marks for each
+ * stage of source buffering. The marks are time based (in milliseconds).
*
- * <p>There are 4 buffering modes: {@link #BUFFERING_MODE_NONE},
- * {@link #BUFFERING_MODE_TIME_ONLY}, {@link #BUFFERING_MODE_SIZE_ONLY} and
- * {@link #BUFFERING_MODE_TIME_THEN_SIZE}.
- * {@link MediaPlayer} source component has default buffering modes which can be queried
- * by calling {@link MediaPlayer#getDefaultBufferingParams()}.
- * Users should always use those default modes or their downsized version when trying to
- * change buffering params. For example, {@link #BUFFERING_MODE_TIME_THEN_SIZE} can be
- * downsized to {@link #BUFFERING_MODE_NONE}, {@link #BUFFERING_MODE_TIME_ONLY} or
- * {@link #BUFFERING_MODE_SIZE_ONLY}. But {@link #BUFFERING_MODE_TIME_ONLY} can not be
- * downsized to {@link #BUFFERING_MODE_SIZE_ONLY}.
+ * <p>{@link MediaPlayer} source component has default marks which can be queried by
+ * calling {@link MediaPlayer#getBufferingParams()} before any change is made by
+ * {@link MediaPlayer#setBufferingParams()}.
* <ul>
- * <li><strong>initial buffering stage:</strong> has one watermark which is used when
- * {@link MediaPlayer} is being prepared. When cached data amount exceeds this watermark,
- * {@link MediaPlayer} is prepared.</li>
- * <li><strong>rebuffering stage:</strong> has two watermarks, low and high, which are
- * used when {@link MediaPlayer} is playing back content.
+ * <li><strong>initial buffering:</strong> initialMarkMs is used when
+ * {@link MediaPlayer} is being prepared. When cached data amount exceeds this mark
+ * {@link MediaPlayer} is prepared. </li>
+ * <li><strong>rebuffering during playback:</strong> resumePlaybackMarkMs is used when
+ * {@link MediaPlayer} is playing back content.
* <ul>
- * <li> When cached data amount exceeds high watermark, {@link MediaPlayer} will pause
- * buffering. Buffering will resume when cache runs below some limit which could be low
- * watermark or some intermediate value decided by the source component.</li>
- * <li> When cached data amount runs below low watermark, {@link MediaPlayer} will paused
- * playback. Playback will resume when cached data amount exceeds high watermark
- * or reaches end of stream.</li>
- * </ul>
+ * <li> {@link MediaPlayer} has internal mark, namely pausePlaybackMarkMs, to decide when
+ * to pause playback if cached data amount runs low. This internal mark varies based on
+ * type of data source. </li>
+ * <li> When cached data amount exceeds resumePlaybackMarkMs, {@link MediaPlayer} will
+ * resume playback if it has been paused due to low cached data amount. The internal mark
+ * pausePlaybackMarkMs shall be less than resumePlaybackMarkMs. </li>
+ * <li> {@link MediaPlayer} has internal mark, namely pauseRebufferingMarkMs, to decide
+ * when to pause rebuffering. Apparently, this internal mark shall be no less than
+ * resumePlaybackMarkMs. </li>
+ * <li> {@link MediaPlayer} has internal mark, namely resumeRebufferingMarkMs, to decide
+ * when to resume buffering. This internal mark varies based on type of data source. This
+ * mark shall be larger than pausePlaybackMarkMs, and less than pauseRebufferingMarkMs.
+ * </li>
+ * </ul> </li>
* </ul>
* <p>Users should use {@link Builder} to change {@link BufferingParams}.
* @hide
*/
public final class BufferingParams implements Parcelable {
- /**
- * This mode indicates that source buffering is not supported.
- */
- public static final int BUFFERING_MODE_NONE = 0;
- /**
- * This mode indicates that only time based source buffering is supported. This means
- * the watermark(s) are time based.
- */
- public static final int BUFFERING_MODE_TIME_ONLY = 1;
- /**
- * This mode indicates that only size based source buffering is supported. This means
- * the watermark(s) are size based.
- */
- public static final int BUFFERING_MODE_SIZE_ONLY = 2;
- /**
- * This mode indicates that both time and size based source buffering are supported,
- * and time based calculation precedes size based. Size based calculation will be used
- * only when time information is not available from the source.
- */
- public static final int BUFFERING_MODE_TIME_THEN_SIZE = 3;
-
- /** @hide */
- @IntDef(
- value = {
- BUFFERING_MODE_NONE,
- BUFFERING_MODE_TIME_ONLY,
- BUFFERING_MODE_SIZE_ONLY,
- BUFFERING_MODE_TIME_THEN_SIZE,
- }
- )
- @Retention(RetentionPolicy.SOURCE)
- public @interface BufferingMode {}
-
- private static final int BUFFERING_NO_WATERMARK = -1;
+ private static final int BUFFERING_NO_MARK = -1;
// params
- private int mInitialBufferingMode = BUFFERING_MODE_NONE;
- private int mRebufferingMode = BUFFERING_MODE_NONE;
-
- private int mInitialWatermarkMs = BUFFERING_NO_WATERMARK;
- private int mInitialWatermarkKB = BUFFERING_NO_WATERMARK;
+ private int mInitialMarkMs = BUFFERING_NO_MARK;
- private int mRebufferingWatermarkLowMs = BUFFERING_NO_WATERMARK;
- private int mRebufferingWatermarkHighMs = BUFFERING_NO_WATERMARK;
- private int mRebufferingWatermarkLowKB = BUFFERING_NO_WATERMARK;
- private int mRebufferingWatermarkHighKB = BUFFERING_NO_WATERMARK;
+ private int mResumePlaybackMarkMs = BUFFERING_NO_MARK;
private BufferingParams() {
}
/**
- * Return the initial buffering mode used when {@link MediaPlayer} is being prepared.
- * @return one of the values that can be set in {@link Builder#setInitialBufferingMode(int)}
- */
- public int getInitialBufferingMode() {
- return mInitialBufferingMode;
- }
-
- /**
- * Return the rebuffering mode used when {@link MediaPlayer} is playing back source.
- * @return one of the values that can be set in {@link Builder#setRebufferingMode(int)}
- */
- public int getRebufferingMode() {
- return mRebufferingMode;
- }
-
- /**
- * Return the time based initial buffering watermark in milliseconds.
- * It is meaningful only when initial buffering mode obatined from
- * {@link #getInitialBufferingMode()} is time based.
- * @return time based initial buffering watermark in milliseconds
- */
- public int getInitialBufferingWatermarkMs() {
- return mInitialWatermarkMs;
- }
-
- /**
- * Return the size based initial buffering watermark in kilobytes.
- * It is meaningful only when initial buffering mode obatined from
- * {@link #getInitialBufferingMode()} is size based.
- * @return size based initial buffering watermark in kilobytes
+ * Return initial buffering mark in milliseconds.
+ * @return initial buffering mark in milliseconds
*/
- public int getInitialBufferingWatermarkKB() {
- return mInitialWatermarkKB;
+ public int getInitialMarkMs() {
+ return mInitialMarkMs;
}
/**
- * Return the time based low watermark in milliseconds for rebuffering.
- * It is meaningful only when rebuffering mode obatined from
- * {@link #getRebufferingMode()} is time based.
- * @return time based low watermark for rebuffering in milliseconds
+ * Return the mark in milliseconds for resuming playback.
+ * @return the mark for resuming playback in milliseconds
*/
- public int getRebufferingWatermarkLowMs() {
- return mRebufferingWatermarkLowMs;
- }
-
- /**
- * Return the time based high watermark in milliseconds for rebuffering.
- * It is meaningful only when rebuffering mode obatined from
- * {@link #getRebufferingMode()} is time based.
- * @return time based high watermark for rebuffering in milliseconds
- */
- public int getRebufferingWatermarkHighMs() {
- return mRebufferingWatermarkHighMs;
- }
-
- /**
- * Return the size based low watermark in kilobytes for rebuffering.
- * It is meaningful only when rebuffering mode obatined from
- * {@link #getRebufferingMode()} is size based.
- * @return size based low watermark for rebuffering in kilobytes
- */
- public int getRebufferingWatermarkLowKB() {
- return mRebufferingWatermarkLowKB;
- }
-
- /**
- * Return the size based high watermark in kilobytes for rebuffering.
- * It is meaningful only when rebuffering mode obatined from
- * {@link #getRebufferingMode()} is size based.
- * @return size based high watermark for rebuffering in kilobytes
- */
- public int getRebufferingWatermarkHighKB() {
- return mRebufferingWatermarkHighKB;
+ public int getResumePlaybackMarkMs() {
+ return mResumePlaybackMarkMs;
}
/**
@@ -200,27 +98,19 @@ public final class BufferingParams implements Parcelable {
* <pre class="prettyprint">
* BufferingParams myParams = mediaplayer.getDefaultBufferingParams();
* myParams = new BufferingParams.Builder(myParams)
- * .setInitialBufferingWatermarkMs(10000)
- * .build();
+ * .setInitialMarkMs(10000)
+ * .setResumePlaybackMarkMs(15000)
+ * .build();
* mediaplayer.setBufferingParams(myParams);
* </pre>
*/
public static class Builder {
- private int mInitialBufferingMode = BUFFERING_MODE_NONE;
- private int mRebufferingMode = BUFFERING_MODE_NONE;
-
- private int mInitialWatermarkMs = BUFFERING_NO_WATERMARK;
- private int mInitialWatermarkKB = BUFFERING_NO_WATERMARK;
-
- private int mRebufferingWatermarkLowMs = BUFFERING_NO_WATERMARK;
- private int mRebufferingWatermarkHighMs = BUFFERING_NO_WATERMARK;
- private int mRebufferingWatermarkLowKB = BUFFERING_NO_WATERMARK;
- private int mRebufferingWatermarkHighKB = BUFFERING_NO_WATERMARK;
+ private int mInitialMarkMs = BUFFERING_NO_MARK;
+ private int mResumePlaybackMarkMs = BUFFERING_NO_MARK;
/**
* Constructs a new Builder with the defaults.
- * By default, both initial buffering mode and rebuffering mode are
- * {@link BufferingParams#BUFFERING_MODE_NONE}, and all watermarks are -1.
+ * By default, all marks are -1.
*/
public Builder() {
}
@@ -231,16 +121,8 @@ public final class BufferingParams implements Parcelable {
* in the new Builder.
*/
public Builder(BufferingParams bp) {
- mInitialBufferingMode = bp.mInitialBufferingMode;
- mRebufferingMode = bp.mRebufferingMode;
-
- mInitialWatermarkMs = bp.mInitialWatermarkMs;
- mInitialWatermarkKB = bp.mInitialWatermarkKB;
-
- mRebufferingWatermarkLowMs = bp.mRebufferingWatermarkLowMs;
- mRebufferingWatermarkHighMs = bp.mRebufferingWatermarkHighMs;
- mRebufferingWatermarkLowKB = bp.mRebufferingWatermarkLowKB;
- mRebufferingWatermarkHighKB = bp.mRebufferingWatermarkHighKB;
+ mInitialMarkMs = bp.mInitialMarkMs;
+ mResumePlaybackMarkMs = bp.mResumePlaybackMarkMs;
}
/**
@@ -250,179 +132,37 @@ public final class BufferingParams implements Parcelable {
* @return a new {@link BufferingParams} object
*/
public BufferingParams build() {
- if (isTimeBasedMode(mRebufferingMode)
- && mRebufferingWatermarkLowMs > mRebufferingWatermarkHighMs) {
- throw new IllegalStateException("Illegal watermark:"
- + mRebufferingWatermarkLowMs + " : " + mRebufferingWatermarkHighMs);
- }
- if (isSizeBasedMode(mRebufferingMode)
- && mRebufferingWatermarkLowKB > mRebufferingWatermarkHighKB) {
- throw new IllegalStateException("Illegal watermark:"
- + mRebufferingWatermarkLowKB + " : " + mRebufferingWatermarkHighKB);
- }
-
BufferingParams bp = new BufferingParams();
- bp.mInitialBufferingMode = mInitialBufferingMode;
- bp.mRebufferingMode = mRebufferingMode;
-
- bp.mInitialWatermarkMs = mInitialWatermarkMs;
- bp.mInitialWatermarkKB = mInitialWatermarkKB;
+ bp.mInitialMarkMs = mInitialMarkMs;
+ bp.mResumePlaybackMarkMs = mResumePlaybackMarkMs;
- bp.mRebufferingWatermarkLowMs = mRebufferingWatermarkLowMs;
- bp.mRebufferingWatermarkHighMs = mRebufferingWatermarkHighMs;
- bp.mRebufferingWatermarkLowKB = mRebufferingWatermarkLowKB;
- bp.mRebufferingWatermarkHighKB = mRebufferingWatermarkHighKB;
return bp;
}
- private boolean isTimeBasedMode(int mode) {
- return (mode == BUFFERING_MODE_TIME_ONLY || mode == BUFFERING_MODE_TIME_THEN_SIZE);
- }
-
- private boolean isSizeBasedMode(int mode) {
- return (mode == BUFFERING_MODE_SIZE_ONLY || mode == BUFFERING_MODE_TIME_THEN_SIZE);
- }
-
/**
- * Sets the initial buffering mode.
- * @param mode one of {@link BufferingParams#BUFFERING_MODE_NONE},
- * {@link BufferingParams#BUFFERING_MODE_TIME_ONLY},
- * {@link BufferingParams#BUFFERING_MODE_SIZE_ONLY},
- * {@link BufferingParams#BUFFERING_MODE_TIME_THEN_SIZE},
+ * Sets the time based mark in milliseconds for initial buffering.
+ * @param markMs time based mark in milliseconds
* @return the same Builder instance.
*/
- public Builder setInitialBufferingMode(@BufferingMode int mode) {
- switch (mode) {
- case BUFFERING_MODE_NONE:
- case BUFFERING_MODE_TIME_ONLY:
- case BUFFERING_MODE_SIZE_ONLY:
- case BUFFERING_MODE_TIME_THEN_SIZE:
- mInitialBufferingMode = mode;
- break;
- default:
- throw new IllegalArgumentException("Illegal buffering mode " + mode);
- }
+ public Builder setInitialMarkMs(int markMs) {
+ mInitialMarkMs = markMs;
return this;
}
/**
- * Sets the rebuffering mode.
- * @param mode one of {@link BufferingParams#BUFFERING_MODE_NONE},
- * {@link BufferingParams#BUFFERING_MODE_TIME_ONLY},
- * {@link BufferingParams#BUFFERING_MODE_SIZE_ONLY},
- * {@link BufferingParams#BUFFERING_MODE_TIME_THEN_SIZE},
+ * Sets the time based mark in milliseconds for resuming playback.
+ * @param markMs time based mark in milliseconds for resuming playback
* @return the same Builder instance.
*/
- public Builder setRebufferingMode(@BufferingMode int mode) {
- switch (mode) {
- case BUFFERING_MODE_NONE:
- case BUFFERING_MODE_TIME_ONLY:
- case BUFFERING_MODE_SIZE_ONLY:
- case BUFFERING_MODE_TIME_THEN_SIZE:
- mRebufferingMode = mode;
- break;
- default:
- throw new IllegalArgumentException("Illegal buffering mode " + mode);
- }
- return this;
- }
-
- /**
- * Sets the time based watermark in milliseconds for initial buffering.
- * @param watermarkMs time based watermark in milliseconds
- * @return the same Builder instance.
- */
- public Builder setInitialBufferingWatermarkMs(int watermarkMs) {
- mInitialWatermarkMs = watermarkMs;
- return this;
- }
-
- /**
- * Sets the size based watermark in kilobytes for initial buffering.
- * @param watermarkKB size based watermark in kilobytes
- * @return the same Builder instance.
- */
- public Builder setInitialBufferingWatermarkKB(int watermarkKB) {
- mInitialWatermarkKB = watermarkKB;
- return this;
- }
-
- /**
- * Sets the time based low watermark in milliseconds for rebuffering.
- * @param watermarkMs time based low watermark in milliseconds
- * @return the same Builder instance.
- */
- public Builder setRebufferingWatermarkLowMs(int watermarkMs) {
- mRebufferingWatermarkLowMs = watermarkMs;
- return this;
- }
-
- /**
- * Sets the time based high watermark in milliseconds for rebuffering.
- * @param watermarkMs time based high watermark in milliseconds
- * @return the same Builder instance.
- */
- public Builder setRebufferingWatermarkHighMs(int watermarkMs) {
- mRebufferingWatermarkHighMs = watermarkMs;
- return this;
- }
-
- /**
- * Sets the size based low watermark in milliseconds for rebuffering.
- * @param watermarkKB size based low watermark in milliseconds
- * @return the same Builder instance.
- */
- public Builder setRebufferingWatermarkLowKB(int watermarkKB) {
- mRebufferingWatermarkLowKB = watermarkKB;
- return this;
- }
-
- /**
- * Sets the size based high watermark in milliseconds for rebuffering.
- * @param watermarkKB size based high watermark in milliseconds
- * @return the same Builder instance.
- */
- public Builder setRebufferingWatermarkHighKB(int watermarkKB) {
- mRebufferingWatermarkHighKB = watermarkKB;
- return this;
- }
-
- /**
- * Sets the time based low and high watermarks in milliseconds for rebuffering.
- * @param lowWatermarkMs time based low watermark in milliseconds
- * @param highWatermarkMs time based high watermark in milliseconds
- * @return the same Builder instance.
- */
- public Builder setRebufferingWatermarksMs(int lowWatermarkMs, int highWatermarkMs) {
- mRebufferingWatermarkLowMs = lowWatermarkMs;
- mRebufferingWatermarkHighMs = highWatermarkMs;
- return this;
- }
-
- /**
- * Sets the size based low and high watermarks in kilobytes for rebuffering.
- * @param lowWatermarkKB size based low watermark in kilobytes
- * @param highWatermarkKB size based high watermark in kilobytes
- * @return the same Builder instance.
- */
- public Builder setRebufferingWatermarksKB(int lowWatermarkKB, int highWatermarkKB) {
- mRebufferingWatermarkLowKB = lowWatermarkKB;
- mRebufferingWatermarkHighKB = highWatermarkKB;
+ public Builder setResumePlaybackMarkMs(int markMs) {
+ mResumePlaybackMarkMs = markMs;
return this;
}
}
private BufferingParams(Parcel in) {
- mInitialBufferingMode = in.readInt();
- mRebufferingMode = in.readInt();
-
- mInitialWatermarkMs = in.readInt();
- mInitialWatermarkKB = in.readInt();
-
- mRebufferingWatermarkLowMs = in.readInt();
- mRebufferingWatermarkHighMs = in.readInt();
- mRebufferingWatermarkLowKB = in.readInt();
- mRebufferingWatermarkHighKB = in.readInt();
+ mInitialMarkMs = in.readInt();
+ mResumePlaybackMarkMs = in.readInt();
}
public static final Parcelable.Creator<BufferingParams> CREATOR =
@@ -446,15 +186,7 @@ public final class BufferingParams implements Parcelable {
@Override
public void writeToParcel(Parcel dest, int flags) {
- dest.writeInt(mInitialBufferingMode);
- dest.writeInt(mRebufferingMode);
-
- dest.writeInt(mInitialWatermarkMs);
- dest.writeInt(mInitialWatermarkKB);
-
- dest.writeInt(mRebufferingWatermarkLowMs);
- dest.writeInt(mRebufferingWatermarkHighMs);
- dest.writeInt(mRebufferingWatermarkLowKB);
- dest.writeInt(mRebufferingWatermarkHighKB);
+ dest.writeInt(mInitialMarkMs);
+ dest.writeInt(mResumePlaybackMarkMs);
}
}
diff --git a/media/java/android/media/ExifInterface.java b/media/java/android/media/ExifInterface.java
index bf3a3b9b07bb..91754162180f 100644
--- a/media/java/android/media/ExifInterface.java
+++ b/media/java/android/media/ExifInterface.java
@@ -2078,7 +2078,8 @@ public class ExifInterface {
}
}
- private static float convertRationalLatLonToFloat(String rationalString, String ref) {
+ /** {@hide} */
+ public static float convertRationalLatLonToFloat(String rationalString, String ref) {
try {
String [] parts = rationalString.split(",");
@@ -2563,51 +2564,66 @@ public class ExifInterface {
});
}
+ String hasImage = retriever.extractMetadata(
+ MediaMetadataRetriever.METADATA_KEY_HAS_IMAGE);
String hasVideo = retriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_HAS_VIDEO);
- final String METADATA_HAS_VIDEO_VALUE_YES = "yes";
- if (METADATA_HAS_VIDEO_VALUE_YES.equals(hasVideo)) {
- String width = retriever.extractMetadata(
+ String width = null;
+ String height = null;
+ String rotation = null;
+ final String METADATA_VALUE_YES = "yes";
+ // If the file has both image and video, prefer image info over video info.
+ // App querying ExifInterface is most likely using the bitmap path which
+ // picks the image first.
+ if (METADATA_VALUE_YES.equals(hasImage)) {
+ width = retriever.extractMetadata(
+ MediaMetadataRetriever.METADATA_KEY_IMAGE_WIDTH);
+ height = retriever.extractMetadata(
+ MediaMetadataRetriever.METADATA_KEY_IMAGE_HEIGHT);
+ rotation = retriever.extractMetadata(
+ MediaMetadataRetriever.METADATA_KEY_IMAGE_ROTATION);
+ } else if (METADATA_VALUE_YES.equals(hasVideo)) {
+ width = retriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH);
- String height = retriever.extractMetadata(
+ height = retriever.extractMetadata(
MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT);
+ rotation = retriever.extractMetadata(
+ MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
+ }
- if (width != null) {
- mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_WIDTH,
- ExifAttribute.createUShort(Integer.parseInt(width), mExifByteOrder));
- }
-
- if (height != null) {
- mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_LENGTH,
- ExifAttribute.createUShort(Integer.parseInt(height), mExifByteOrder));
- }
+ if (width != null) {
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_WIDTH,
+ ExifAttribute.createUShort(Integer.parseInt(width), mExifByteOrder));
+ }
- String rotation = retriever.extractMetadata(
- MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
- if (rotation != null) {
- int orientation = ExifInterface.ORIENTATION_NORMAL;
+ if (height != null) {
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_LENGTH,
+ ExifAttribute.createUShort(Integer.parseInt(height), mExifByteOrder));
+ }
- // all rotation angles in CW
- switch (Integer.parseInt(rotation)) {
- case 90:
- orientation = ExifInterface.ORIENTATION_ROTATE_90;
- break;
- case 180:
- orientation = ExifInterface.ORIENTATION_ROTATE_180;
- break;
- case 270:
- orientation = ExifInterface.ORIENTATION_ROTATE_270;
- break;
- }
+ if (rotation != null) {
+ int orientation = ExifInterface.ORIENTATION_NORMAL;
- mAttributes[IFD_TYPE_PRIMARY].put(TAG_ORIENTATION,
- ExifAttribute.createUShort(orientation, mExifByteOrder));
+ // all rotation angles in CW
+ switch (Integer.parseInt(rotation)) {
+ case 90:
+ orientation = ExifInterface.ORIENTATION_ROTATE_90;
+ break;
+ case 180:
+ orientation = ExifInterface.ORIENTATION_ROTATE_180;
+ break;
+ case 270:
+ orientation = ExifInterface.ORIENTATION_ROTATE_270;
+ break;
}
- if (DEBUG) {
- Log.d(TAG, "Heif meta: " + width + "x" + height + ", rotation " + rotation);
- }
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_ORIENTATION,
+ ExifAttribute.createUShort(orientation, mExifByteOrder));
+ }
+
+ if (DEBUG) {
+ Log.d(TAG, "Heif meta: " + width + "x" + height + ", rotation " + rotation);
}
} finally {
retriever.release();
diff --git a/media/java/android/media/IPlayer.aidl b/media/java/android/media/IPlayer.aidl
deleted file mode 100644
index 2d60bf956904..000000000000
--- a/media/java/android/media/IPlayer.aidl
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (C) 2016 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-import android.media.VolumeShaper;
-
-/**
- * @hide
- */
-interface IPlayer {
- oneway void start();
- oneway void pause();
- oneway void stop();
- oneway void setVolume(float vol);
- oneway void setPan(float pan);
- oneway void setStartDelayMs(int delayMs);
- oneway void applyVolumeShaper(in VolumeShaper.Configuration configuration,
- in VolumeShaper.Operation operation);
-}
diff --git a/media/java/android/media/ImageReader.java b/media/java/android/media/ImageReader.java
index c78c99f7a228..1019580589ab 100644
--- a/media/java/android/media/ImageReader.java
+++ b/media/java/android/media/ImageReader.java
@@ -640,7 +640,6 @@ public class ImageReader implements AutoCloseable {
* The ImageReader continues to be usable after this call, but may need to reallocate buffers
* when more buffers are needed for rendering.
* </p>
- * @hide
*/
public void discardFreeBuffers() {
synchronized (mCloseLock) {
diff --git a/media/java/android/media/MediaDrm.java b/media/java/android/media/MediaDrm.java
index 88b1c5ffcc7e..e2f9b47ed482 100644
--- a/media/java/android/media/MediaDrm.java
+++ b/media/java/android/media/MediaDrm.java
@@ -91,10 +91,10 @@ import android.util.Log;
* are only decrypted when the samples are delivered to the decoder.
* <p>
* MediaDrm methods throw {@link android.media.MediaDrm.MediaDrmStateException}
- * when a method is called on a MediaDrm object that has had an unrecoverable failure
- * in the DRM plugin or security hardware.
- * {@link android.media.MediaDrm.MediaDrmStateException} extends
- * {@link java.lang.IllegalStateException} with the addition of a developer-readable
+ * when a method is called on a MediaDrm object that has had an unrecoverable failure
+ * in the DRM plugin or security hardware.
+ * {@link android.media.MediaDrm.MediaDrmStateException} extends
+ * {@link java.lang.IllegalStateException} with the addition of a developer-readable
* diagnostic information string associated with the exception.
* <p>
* In the event of a mediaserver process crash or restart while a MediaDrm object
@@ -102,9 +102,9 @@ import android.util.Log;
* To recover, the app must release the MediaDrm object, then create and initialize
* a new one.
* <p>
- * As {@link android.media.MediaDrmResetException} and
- * {@link android.media.MediaDrm.MediaDrmStateException} both extend
- * {@link java.lang.IllegalStateException}, they should be in an earlier catch()
+ * As {@link android.media.MediaDrmResetException} and
+ * {@link android.media.MediaDrm.MediaDrmStateException} both extend
+ * {@link java.lang.IllegalStateException}, they should be in an earlier catch()
* block than {@link java.lang.IllegalStateException} if handled separately.
* <p>
* <a name="Callbacks"></a>
@@ -165,7 +165,7 @@ public final class MediaDrm {
/**
* Query if the given scheme identified by its UUID is supported on
- * this device, and whether the drm plugin is able to handle the
+ * this device, and whether the DRM plugin is able to handle the
* media container format specified by mimeType.
* @param uuid The UUID of the crypto scheme.
* @param mimeType The MIME type of the media container, e.g. "video/mp4"
@@ -745,7 +745,7 @@ public final class MediaDrm {
* returned in KeyRequest.defaultUrl.
* <p>
* After the app has received the key request response from the server,
- * it should deliver to the response to the DRM engine plugin using the method
+ * it should deliver to the response to the MediaDrm instance using the method
* {@link #provideKeyResponse}.
*
* @param scope may be a sessionId or a keySetId, depending on the specified keyType.
@@ -781,7 +781,7 @@ public final class MediaDrm {
/**
* A key response is received from the license server by the app, then it is
- * provided to the DRM engine plugin using provideKeyResponse. When the
+ * provided to the MediaDrm instance using provideKeyResponse. When the
* response is for an offline key request, a keySetId is returned that can be
* used to later restore the keys to a new session with the method
* {@link #restoreKeys}.
@@ -829,7 +829,7 @@ public final class MediaDrm {
* in the form of {name, value} pairs. Since DRM license policies vary by vendor,
* the specific status field names are determined by each DRM vendor. Refer to your
* DRM provider documentation for definitions of the field names for a particular
- * DRM engine plugin.
+ * DRM plugin.
*
* @param sessionId the session ID for the DRM session
*/
@@ -897,11 +897,11 @@ public final class MediaDrm {
@NonNull String certAuthority);
/**
- * After a provision response is received by the app, it is provided to the DRM
- * engine plugin using this method.
+ * After a provision response is received by the app, it is provided to the
+ * MediaDrm instance using this method.
*
* @param response the opaque provisioning response byte array to provide to the
- * DRM engine plugin.
+ * MediaDrm instance.
*
* @throws DeniedByServerException if the response indicates that the
* server rejected the request
@@ -912,7 +912,6 @@ public final class MediaDrm {
}
@NonNull
- /* could there be a valid response with 0-sized certificate or key? */
private native Certificate provideProvisionResponseNative(@NonNull byte[] response)
throws DeniedByServerException;
@@ -953,32 +952,32 @@ public final class MediaDrm {
/**
* Remove all secure stops without requiring interaction with the server.
*/
- public native void releaseAllSecureStops();
+ public native void releaseAllSecureStops();
/**
- * String property name: identifies the maker of the DRM engine plugin
+ * String property name: identifies the maker of the DRM plugin
*/
public static final String PROPERTY_VENDOR = "vendor";
/**
- * String property name: identifies the version of the DRM engine plugin
+ * String property name: identifies the version of the DRM plugin
*/
public static final String PROPERTY_VERSION = "version";
/**
- * String property name: describes the DRM engine plugin
+ * String property name: describes the DRM plugin
*/
public static final String PROPERTY_DESCRIPTION = "description";
/**
* String property name: a comma-separated list of cipher and mac algorithms
- * supported by CryptoSession. The list may be empty if the DRM engine
+ * supported by CryptoSession. The list may be empty if the DRM
* plugin does not support CryptoSession operations.
*/
public static final String PROPERTY_ALGORITHMS = "algorithms";
/** @hide */
- @StringDef({
+ @StringDef(prefix = { "PROPERTY_" }, value = {
PROPERTY_VENDOR,
PROPERTY_VERSION,
PROPERTY_DESCRIPTION,
@@ -988,32 +987,37 @@ public final class MediaDrm {
public @interface StringProperty {}
/**
- * Read a DRM engine plugin String property value, given the property name string.
+ * Read a MediaDrm String property value, given the property name string.
* <p>
* Standard fields names are:
* {@link #PROPERTY_VENDOR}, {@link #PROPERTY_VERSION},
* {@link #PROPERTY_DESCRIPTION}, {@link #PROPERTY_ALGORITHMS}
*/
- /* FIXME this throws IllegalStateException for invalid property names */
@NonNull
public native String getPropertyString(@NonNull @StringProperty String propertyName);
/**
+ * Set a MediaDrm String property value, given the property name string
+ * and new value for the property.
+ */
+ public native void setPropertyString(@NonNull @StringProperty String propertyName,
+ @NonNull String value);
+
+ /**
* Byte array property name: the device unique identifier is established during
* device provisioning and provides a means of uniquely identifying each device.
*/
- /* FIXME this throws IllegalStateException for invalid property names */
public static final String PROPERTY_DEVICE_UNIQUE_ID = "deviceUniqueId";
/** @hide */
- @StringDef({
+ @StringDef(prefix = { "PROPERTY_" }, value = {
PROPERTY_DEVICE_UNIQUE_ID,
})
@Retention(RetentionPolicy.SOURCE)
public @interface ArrayProperty {}
/**
- * Read a DRM engine plugin byte array property value, given the property name string.
+ * Read a MediaDrm byte array property value, given the property name string.
* <p>
* Standard fields names are {@link #PROPERTY_DEVICE_UNIQUE_ID}
*/
@@ -1021,17 +1025,13 @@ public final class MediaDrm {
public native byte[] getPropertyByteArray(@ArrayProperty String propertyName);
/**
- * Set a DRM engine plugin String property value.
- */
- public native void setPropertyString(
- String propertyName, @NonNull String value);
-
- /**
- * Set a DRM engine plugin byte array property value.
- */
- public native void setPropertyByteArray(
+ * Set a MediaDrm byte array property value, given the property name string
+ * and new value for the property.
+ */
+ public native void setPropertyByteArray(@NonNull @ArrayProperty
String propertyName, @NonNull byte[] value);
+
private static final native void setCipherAlgorithmNative(
@NonNull MediaDrm drm, @NonNull byte[] sessionId, @NonNull String algorithm);
@@ -1160,7 +1160,7 @@ public final class MediaDrm {
* The algorithm string conforms to JCA Standard Names for Mac
* Algorithms and is case insensitive. For example "HmacSHA256".
* <p>
- * The list of supported algorithms for a DRM engine plugin can be obtained
+ * The list of supported algorithms for a DRM plugin can be obtained
* using the method {@link #getPropertyString} with the property name
* "algorithms".
*/
@@ -1274,7 +1274,7 @@ public final class MediaDrm {
* storage, and used when invoking the signRSA method.
*
* @param response the opaque certificate response byte array to provide to the
- * DRM engine plugin.
+ * MediaDrm instance.
*
* @throws DeniedByServerException if the response indicates that the
* server rejected the request
diff --git a/media/java/android/media/MediaFormat.java b/media/java/android/media/MediaFormat.java
index ed5f7d848663..306ed83c426b 100644
--- a/media/java/android/media/MediaFormat.java
+++ b/media/java/android/media/MediaFormat.java
@@ -96,6 +96,19 @@ import java.util.Map;
* <tr><td>{@link #KEY_MIME}</td><td>String</td><td>The type of the format.</td></tr>
* <tr><td>{@link #KEY_LANGUAGE}</td><td>String</td><td>The language of the content.</td></tr>
* </table>
+ *
+ * Image formats have the following keys:
+ * <table>
+ * <tr><td>{@link #KEY_MIME}</td><td>String</td><td>The type of the format.</td></tr>
+ * <tr><td>{@link #KEY_WIDTH}</td><td>Integer</td><td></td></tr>
+ * <tr><td>{@link #KEY_HEIGHT}</td><td>Integer</td><td></td></tr>
+ * <tr><td>{@link #KEY_COLOR_FORMAT}</td><td>Integer</td><td>set by the user
+ * for encoders, readable in the output format of decoders</b></td></tr>
+ * <tr><td>{@link #KEY_GRID_WIDTH}</td><td>Integer</td><td>required if the image has grid</td></tr>
+ * <tr><td>{@link #KEY_GRID_HEIGHT}</td><td>Integer</td><td>required if the image has grid</td></tr>
+ * <tr><td>{@link #KEY_GRID_ROWS}</td><td>Integer</td><td>required if the image has grid</td></tr>
+ * <tr><td>{@link #KEY_GRID_COLS}</td><td>Integer</td><td>required if the image has grid</td></tr>
+ * </table>
*/
public final class MediaFormat {
public static final String MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
@@ -126,6 +139,35 @@ public final class MediaFormat {
public static final String MIMETYPE_AUDIO_SCRAMBLED = "audio/scrambled";
/**
+ * MIME type for HEIF still image data encoded in HEVC.
+ *
+ * To decode such an image, {@link MediaCodec} decoder for
+ * {@ #MIMETYPE_VIDEO_HEVC} shall be used. The client needs to form
+ * the correct {@link #MediaFormat} based on additional information in
+ * the track format, and send it to {@link MediaCodec#configure}.
+ *
+ * The track's MediaFormat will come with {@link #KEY_WIDTH} and
+ * {@link #KEY_HEIGHT} keys, which describes the width and height
+ * of the image. If the image doesn't contain grid (i.e. none of
+ * {@link #KEY_GRID_WIDTH}, {@link #KEY_GRID_HEIGHT},
+ * {@link #KEY_GRID_ROWS}, {@link #KEY_GRID_COLS} are present}), the
+ * track will contain a single sample of coded data for the entire image,
+ * and the image width and height should be used to set up the decoder.
+ *
+ * If the image does come with grid, each sample from the track will
+ * contain one tile in the grid, of which the size is described by
+ * {@link #KEY_GRID_WIDTH} and {@link #KEY_GRID_HEIGHT}. This size
+ * (instead of {@link #KEY_WIDTH} and {@link #KEY_HEIGHT}) should be
+ * used to set up the decoder. The track contains {@link #KEY_GRID_ROWS}
+ * by {@link #KEY_GRID_COLS} samples in row-major, top-row first,
+ * left-to-right order. The output image should be reconstructed by
+ * first tiling the decoding results of the tiles in the correct order,
+ * then trimming (before rotation is applied) on the bottom and right
+ * side, if the tiled area is larger than the image width and height.
+ */
+ public static final String MIMETYPE_IMAGE_ANDROID_HEIC = "image/vnd.android.heic";
+
+ /**
* MIME type for WebVTT subtitle data.
*/
public static final String MIMETYPE_TEXT_VTT = "text/vtt";
@@ -232,6 +274,54 @@ public final class MediaFormat {
public static final String KEY_FRAME_RATE = "frame-rate";
/**
+ * A key describing the grid width of the content in a {@link #MIMETYPE_IMAGE_ANDROID_HEIC}
+ * track. The associated value is an integer.
+ *
+ * Refer to {@link #MIMETYPE_IMAGE_ANDROID_HEIC} on decoding instructions of such tracks.
+ *
+ * @see #KEY_GRID_HEIGHT
+ * @see #KEY_GRID_ROWS
+ * @see #KEY_GRID_COLS
+ */
+ public static final String KEY_GRID_WIDTH = "grid-width";
+
+ /**
+ * A key describing the grid height of the content in a {@link #MIMETYPE_IMAGE_ANDROID_HEIC}
+ * track. The associated value is an integer.
+ *
+ * Refer to {@link #MIMETYPE_IMAGE_ANDROID_HEIC} on decoding instructions of such tracks.
+ *
+ * @see #KEY_GRID_WIDTH
+ * @see #KEY_GRID_ROWS
+ * @see #KEY_GRID_COLS
+ */
+ public static final String KEY_GRID_HEIGHT = "grid-height";
+
+ /**
+ * A key describing the number of grid rows in the content in a
+ * {@link #MIMETYPE_IMAGE_ANDROID_HEIC} track. The associated value is an integer.
+ *
+ * Refer to {@link #MIMETYPE_IMAGE_ANDROID_HEIC} on decoding instructions of such tracks.
+ *
+ * @see #KEY_GRID_WIDTH
+ * @see #KEY_GRID_HEIGHT
+ * @see #KEY_GRID_COLS
+ */
+ public static final String KEY_GRID_ROWS = "grid-rows";
+
+ /**
+ * A key describing the number of grid columns in the content in a
+ * {@link #MIMETYPE_IMAGE_ANDROID_HEIC} track. The associated value is an integer.
+ *
+ * Refer to {@link #MIMETYPE_IMAGE_ANDROID_HEIC} on decoding instructions of such tracks.
+ *
+ * @see #KEY_GRID_WIDTH
+ * @see #KEY_GRID_HEIGHT
+ * @see #KEY_GRID_ROWS
+ */
+ public static final String KEY_GRID_COLS = "grid-cols";
+
+ /**
* A key describing the raw audio sample encoding/format.
*
* <p>The associated value is an integer, using one of the
@@ -631,14 +721,16 @@ public final class MediaFormat {
/**
* A key for boolean DEFAULT behavior for the track. The track with DEFAULT=true is
* selected in the absence of a specific user choice.
- * This is currently only used for subtitle tracks, when the user selected
- * 'Default' for the captioning locale.
+ * This is currently used in two scenarios:
+ * 1) for subtitle tracks, when the user selected 'Default' for the captioning locale.
+ * 2) for a {@link #MIMETYPE_IMAGE_ANDROID_HEIC} track, indicating the image is the
+ * primary item in the file.
+
* The associated value is an integer, where non-0 means TRUE. This is an optional
* field; if not specified, DEFAULT is considered to be FALSE.
*/
public static final String KEY_IS_DEFAULT = "is-default";
-
/**
* A key for the FORCED field for subtitle tracks. True if it is a
* forced subtitle track. Forced subtitle tracks are essential for the
diff --git a/media/java/android/media/MediaMetadata.java b/media/java/android/media/MediaMetadata.java
index 31eb948dcd09..94d4d55639dc 100644
--- a/media/java/android/media/MediaMetadata.java
+++ b/media/java/android/media/MediaMetadata.java
@@ -45,34 +45,61 @@ public final class MediaMetadata implements Parcelable {
/**
* @hide
*/
- @StringDef({METADATA_KEY_TITLE, METADATA_KEY_ARTIST, METADATA_KEY_ALBUM, METADATA_KEY_AUTHOR,
- METADATA_KEY_WRITER, METADATA_KEY_COMPOSER, METADATA_KEY_COMPILATION,
- METADATA_KEY_DATE, METADATA_KEY_GENRE, METADATA_KEY_ALBUM_ARTIST, METADATA_KEY_ART_URI,
- METADATA_KEY_ALBUM_ART_URI, METADATA_KEY_DISPLAY_TITLE, METADATA_KEY_DISPLAY_SUBTITLE,
- METADATA_KEY_DISPLAY_DESCRIPTION, METADATA_KEY_DISPLAY_ICON_URI,
- METADATA_KEY_MEDIA_ID, METADATA_KEY_MEDIA_URI})
+ @StringDef(prefix = { "METADATA_KEY_" }, value = {
+ METADATA_KEY_TITLE,
+ METADATA_KEY_ARTIST,
+ METADATA_KEY_ALBUM,
+ METADATA_KEY_AUTHOR,
+ METADATA_KEY_WRITER,
+ METADATA_KEY_COMPOSER,
+ METADATA_KEY_COMPILATION,
+ METADATA_KEY_DATE,
+ METADATA_KEY_GENRE,
+ METADATA_KEY_ALBUM_ARTIST,
+ METADATA_KEY_ART_URI,
+ METADATA_KEY_ALBUM_ART_URI,
+ METADATA_KEY_DISPLAY_TITLE,
+ METADATA_KEY_DISPLAY_SUBTITLE,
+ METADATA_KEY_DISPLAY_DESCRIPTION,
+ METADATA_KEY_DISPLAY_ICON_URI,
+ METADATA_KEY_MEDIA_ID,
+ METADATA_KEY_MEDIA_URI,
+ })
@Retention(RetentionPolicy.SOURCE)
public @interface TextKey {}
/**
* @hide
*/
- @StringDef({METADATA_KEY_DURATION, METADATA_KEY_YEAR, METADATA_KEY_TRACK_NUMBER,
- METADATA_KEY_NUM_TRACKS, METADATA_KEY_DISC_NUMBER, METADATA_KEY_BT_FOLDER_TYPE})
+ @StringDef(prefix = { "METADATA_KEY_" }, value = {
+ METADATA_KEY_DURATION,
+ METADATA_KEY_YEAR,
+ METADATA_KEY_TRACK_NUMBER,
+ METADATA_KEY_NUM_TRACKS,
+ METADATA_KEY_DISC_NUMBER,
+ METADATA_KEY_BT_FOLDER_TYPE,
+ })
@Retention(RetentionPolicy.SOURCE)
public @interface LongKey {}
/**
* @hide
*/
- @StringDef({METADATA_KEY_ART, METADATA_KEY_ALBUM_ART, METADATA_KEY_DISPLAY_ICON})
+ @StringDef(prefix = { "METADATA_KEY_" }, value = {
+ METADATA_KEY_ART,
+ METADATA_KEY_ALBUM_ART,
+ METADATA_KEY_DISPLAY_ICON,
+ })
@Retention(RetentionPolicy.SOURCE)
public @interface BitmapKey {}
/**
* @hide
*/
- @StringDef({METADATA_KEY_USER_RATING, METADATA_KEY_RATING})
+ @StringDef(prefix = { "METADATA_KEY_" }, value = {
+ METADATA_KEY_USER_RATING,
+ METADATA_KEY_RATING,
+ })
@Retention(RetentionPolicy.SOURCE)
public @interface RatingKey {}
diff --git a/media/java/android/media/MediaMetadataRetriever.java b/media/java/android/media/MediaMetadataRetriever.java
index 571b41b3673c..745eb74d6e20 100644
--- a/media/java/android/media/MediaMetadataRetriever.java
+++ b/media/java/android/media/MediaMetadataRetriever.java
@@ -47,7 +47,7 @@ public class MediaMetadataRetriever
// The field below is accessed by native methods
@SuppressWarnings("unused")
private long mNativeContext;
-
+
private static final int EMBEDDED_PICTURE_TYPE_ANY = 0xFFFF;
public MediaMetadataRetriever() {
@@ -58,7 +58,7 @@ public class MediaMetadataRetriever
* Sets the data source (file pathname) to use. Call this
* method before the rest of the methods in this class. This method may be
* time-consuming.
- *
+ *
* @param path The path of the input media file.
* @throws IllegalArgumentException If the path is invalid.
*/
@@ -113,7 +113,7 @@ public class MediaMetadataRetriever
* responsibility to close the file descriptor. It is safe to do so as soon
* as this call returns. Call this method before the rest of the methods in
* this class. This method may be time-consuming.
- *
+ *
* @param fd the FileDescriptor for the file you want to play
* @param offset the offset into the file where the data to be played starts,
* in bytes. It must be non-negative
@@ -123,13 +123,13 @@ public class MediaMetadataRetriever
*/
public native void setDataSource(FileDescriptor fd, long offset, long length)
throws IllegalArgumentException;
-
+
/**
* Sets the data source (FileDescriptor) to use. It is the caller's
* responsibility to close the file descriptor. It is safe to do so as soon
* as this call returns. Call this method before the rest of the methods in
* this class. This method may be time-consuming.
- *
+ *
* @param fd the FileDescriptor for the file you want to play
* @throws IllegalArgumentException if the FileDescriptor is invalid
*/
@@ -138,11 +138,11 @@ public class MediaMetadataRetriever
// intentionally less than LONG_MAX
setDataSource(fd, 0, 0x7ffffffffffffffL);
}
-
+
/**
- * Sets the data source as a content Uri. Call this method before
+ * Sets the data source as a content Uri. Call this method before
* the rest of the methods in this class. This method may be time-consuming.
- *
+ *
* @param context the Context to use when resolving the Uri
* @param uri the Content URI of the data you want to play
* @throws IllegalArgumentException if the Uri is invalid
@@ -154,7 +154,7 @@ public class MediaMetadataRetriever
if (uri == null) {
throw new IllegalArgumentException();
}
-
+
String scheme = uri.getScheme();
if(scheme == null || scheme.equals("file")) {
setDataSource(uri.getPath());
@@ -213,12 +213,12 @@ public class MediaMetadataRetriever
/**
* Call this method after setDataSource(). This method retrieves the
* meta data value associated with the keyCode.
- *
+ *
* The keyCode currently supported is listed below as METADATA_XXX
* constants. With any other value, it returns a null pointer.
- *
+ *
* @param keyCode One of the constants listed below at the end of the class.
- * @return The meta data value associate with the given keyCode on success;
+ * @return The meta data value associate with the given keyCode on success;
* null on failure.
*/
public native String extractMetadata(int keyCode);
@@ -368,6 +368,109 @@ public class MediaMetadataRetriever
private native Bitmap _getFrameAtTime(long timeUs, int option, int width, int height);
/**
+ * This method retrieves a video frame by its index. It should only be called
+ * after {@link #setDataSource}.
+ *
+ * @param frameIndex 0-based index of the video frame. The frame index must be that of
+ * a valid frame. The total number of frames available for retrieval can be queried
+ * via the {@link #METADATA_KEY_VIDEO_FRAME_COUNT} key.
+ *
+ * @throws IllegalStateException if the container doesn't contain video or image sequences.
+ * @throws IllegalArgumentException if the requested frame index does not exist.
+ *
+ * @return A Bitmap containing the requested video frame, or null if the retrieval fails.
+ *
+ * @see #getFramesAtIndex(int, int)
+ */
+ public Bitmap getFrameAtIndex(int frameIndex) {
+ Bitmap[] bitmaps = getFramesAtIndex(frameIndex, 1);
+ if (bitmaps == null || bitmaps.length < 1) {
+ return null;
+ }
+ return bitmaps[0];
+ }
+
+ /**
+ * This method retrieves a consecutive set of video frames starting at the
+ * specified index. It should only be called after {@link #setDataSource}.
+ *
+ * If the caller intends to retrieve more than one consecutive video frames,
+ * this method is preferred over {@link #getFrameAtIndex(int)} for efficiency.
+ *
+ * @param frameIndex 0-based index of the first video frame to retrieve. The frame index
+ * must be that of a valid frame. The total number of frames available for retrieval
+ * can be queried via the {@link #METADATA_KEY_VIDEO_FRAME_COUNT} key.
+ * @param numFrames number of consecutive video frames to retrieve. Must be a positive
+ * value. The stream must contain at least numFrames frames starting at frameIndex.
+ *
+ * @throws IllegalStateException if the container doesn't contain video or image sequences.
+ * @throws IllegalArgumentException if the frameIndex or numFrames is invalid, or the
+ * stream doesn't contain at least numFrames starting at frameIndex.
+
+ * @return An array of Bitmaps containing the requested video frames. The returned
+ * array could contain less frames than requested if the retrieval fails.
+ *
+ * @see #getFrameAtIndex(int)
+ */
+ public Bitmap[] getFramesAtIndex(int frameIndex, int numFrames) {
+ if (!"yes".equals(extractMetadata(MediaMetadataRetriever.METADATA_KEY_HAS_VIDEO))) {
+ throw new IllegalStateException("Does not contail video or image sequences");
+ }
+ int frameCount = Integer.parseInt(
+ extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_FRAME_COUNT));
+ if (frameIndex < 0 || numFrames < 1
+ || frameIndex >= frameCount
+ || frameIndex > frameCount - numFrames) {
+ throw new IllegalArgumentException("Invalid frameIndex or numFrames: "
+ + frameIndex + ", " + numFrames);
+ }
+ return _getFrameAtIndex(frameIndex, numFrames);
+ }
+ private native Bitmap[] _getFrameAtIndex(int frameIndex, int numFrames);
+
+ /**
+ * This method retrieves a still image by its index. It should only be called
+ * after {@link #setDataSource}.
+ *
+ * @param imageIndex 0-based index of the image, with negative value indicating
+ * the primary image.
+ * @throws IllegalStateException if the container doesn't contain still images.
+ * @throws IllegalArgumentException if the requested image does not exist.
+ *
+ * @return the requested still image, or null if the image cannot be retrieved.
+ *
+ * @see #getPrimaryImage
+ */
+ public Bitmap getImageAtIndex(int imageIndex) {
+ if (!"yes".equals(extractMetadata(MediaMetadataRetriever.METADATA_KEY_HAS_IMAGE))) {
+ throw new IllegalStateException("Does not contail still images");
+ }
+
+ String imageCount = extractMetadata(MediaMetadataRetriever.METADATA_KEY_IMAGE_COUNT);
+ if (imageIndex >= Integer.parseInt(imageCount)) {
+ throw new IllegalArgumentException("Invalid image index: " + imageCount);
+ }
+
+ return _getImageAtIndex(imageIndex);
+ }
+
+ /**
+ * This method retrieves the primary image of the media content. It should only
+ * be called after {@link #setDataSource}.
+ *
+ * @return the primary image, or null if it cannot be retrieved.
+ *
+ * @throws IllegalStateException if the container doesn't contain still images.
+ *
+ * @see #getImageAtIndex(int)
+ */
+ public Bitmap getPrimaryImage() {
+ return getImageAtIndex(-1);
+ }
+
+ private native Bitmap _getImageAtIndex(int imageIndex);
+
+ /**
* Call this method after setDataSource(). This method finds the optional
* graphic or album/cover art associated associated with the data source. If
* there are more than one pictures, (any) one of them is returned.
@@ -406,7 +509,7 @@ public class MediaMetadataRetriever
* @see #getFrameAtTime(long, int)
*/
/* Do not change these option values without updating their counterparts
- * in include/media/stagefright/MediaSource.h!
+ * in include/media/MediaSource.h!
*/
/**
* This option is used with {@link #getFrameAtTime(long, int)} to retrieve
@@ -583,5 +686,40 @@ public class MediaMetadataRetriever
* number.
*/
public static final int METADATA_KEY_CAPTURE_FRAMERATE = 25;
+ /**
+ * If this key exists the media contains still image content.
+ */
+ public static final int METADATA_KEY_HAS_IMAGE = 26;
+ /**
+ * If the media contains still images, this key retrieves the number
+ * of still images.
+ */
+ public static final int METADATA_KEY_IMAGE_COUNT = 27;
+ /**
+ * If the media contains still images, this key retrieves the image
+ * index of the primary image.
+ */
+ public static final int METADATA_KEY_IMAGE_PRIMARY = 28;
+ /**
+ * If the media contains still images, this key retrieves the width
+ * of the primary image.
+ */
+ public static final int METADATA_KEY_IMAGE_WIDTH = 29;
+ /**
+ * If the media contains still images, this key retrieves the height
+ * of the primary image.
+ */
+ public static final int METADATA_KEY_IMAGE_HEIGHT = 30;
+ /**
+ * If the media contains still images, this key retrieves the rotation
+ * of the primary image.
+ */
+ public static final int METADATA_KEY_IMAGE_ROTATION = 31;
+ /**
+ * If the media contains video and this key exists, it retrieves the
+ * total number of frames in the video sequence.
+ */
+ public static final int METADATA_KEY_VIDEO_FRAME_COUNT = 32;
+
// Add more here...
}
diff --git a/media/java/android/media/MediaMuxer.java b/media/java/android/media/MediaMuxer.java
index 91e57ee073b0..02c71b283b21 100644
--- a/media/java/android/media/MediaMuxer.java
+++ b/media/java/android/media/MediaMuxer.java
@@ -258,12 +258,18 @@ final public class MediaMuxer {
* in include/media/stagefright/MediaMuxer.h!
*/
private OutputFormat() {}
+ /** @hide */
+ public static final int MUXER_OUTPUT_FIRST = 0;
/** MPEG4 media file format*/
- public static final int MUXER_OUTPUT_MPEG_4 = 0;
+ public static final int MUXER_OUTPUT_MPEG_4 = MUXER_OUTPUT_FIRST;
/** WEBM media file format*/
- public static final int MUXER_OUTPUT_WEBM = 1;
+ public static final int MUXER_OUTPUT_WEBM = MUXER_OUTPUT_FIRST + 1;
/** 3GPP media file format*/
- public static final int MUXER_OUTPUT_3GPP = 2;
+ public static final int MUXER_OUTPUT_3GPP = MUXER_OUTPUT_FIRST + 2;
+ /** HEIF media file format*/
+ public static final int MUXER_OUTPUT_HEIF = MUXER_OUTPUT_FIRST + 3;
+ /** @hide */
+ public static final int MUXER_OUTPUT_LAST = MUXER_OUTPUT_HEIF;
};
/** @hide */
@@ -271,6 +277,7 @@ final public class MediaMuxer {
OutputFormat.MUXER_OUTPUT_MPEG_4,
OutputFormat.MUXER_OUTPUT_WEBM,
OutputFormat.MUXER_OUTPUT_3GPP,
+ OutputFormat.MUXER_OUTPUT_HEIF,
})
@Retention(RetentionPolicy.SOURCE)
public @interface Format {}
@@ -347,8 +354,7 @@ final public class MediaMuxer {
}
private void setUpMediaMuxer(@NonNull FileDescriptor fd, @Format int format) throws IOException {
- if (format != OutputFormat.MUXER_OUTPUT_MPEG_4 && format != OutputFormat.MUXER_OUTPUT_WEBM
- && format != OutputFormat.MUXER_OUTPUT_3GPP) {
+ if (format < OutputFormat.MUXER_OUTPUT_FIRST || format > OutputFormat.MUXER_OUTPUT_LAST) {
throw new IllegalArgumentException("format: " + format + " is invalid");
}
mNativeObject = nativeSetup(fd, format);
diff --git a/media/java/android/media/MediaPlayer.java b/media/java/android/media/MediaPlayer.java
index 31ffc4b59ea8..1bc3dfa4bdbc 100644
--- a/media/java/android/media/MediaPlayer.java
+++ b/media/java/android/media/MediaPlayer.java
@@ -43,6 +43,7 @@ import android.system.Os;
import android.system.OsConstants;
import android.util.Log;
import android.util.Pair;
+import android.util.ArrayMap;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.widget.VideoView;
@@ -58,6 +59,7 @@ import android.media.SubtitleData;
import android.media.SubtitleTrack.RenderingWidget;
import android.media.SyncParams;
+import com.android.internal.annotations.GuardedBy;
import com.android.internal.util.Preconditions;
import libcore.io.IoBridge;
@@ -577,6 +579,7 @@ import java.util.Vector;
public class MediaPlayer extends PlayerBase
implements SubtitleController.Listener
, VolumeAutomation
+ , AudioRouting
{
/**
Constant to retrieve only the new metadata since the last
@@ -1417,6 +1420,126 @@ public class MediaPlayer extends PlayerBase
private native @Nullable VolumeShaper.State native_getVolumeShaperState(int id);
+ //--------------------------------------------------------------------------
+ // Explicit Routing
+ //--------------------
+ private AudioDeviceInfo mPreferredDevice = null;
+
+ /**
+ * Specifies an audio device (via an {@link AudioDeviceInfo} object) to route
+ * the output from this MediaPlayer.
+ * @param deviceInfo The {@link AudioDeviceInfo} specifying the audio sink or source.
+ * If deviceInfo is null, default routing is restored.
+ * @return true if succesful, false if the specified {@link AudioDeviceInfo} is non-null and
+ * does not correspond to a valid audio device.
+ */
+ @Override
+ public boolean setPreferredDevice(AudioDeviceInfo deviceInfo) {
+ if (deviceInfo != null && !deviceInfo.isSink()) {
+ return false;
+ }
+ int preferredDeviceId = deviceInfo != null ? deviceInfo.getId() : 0;
+ boolean status = native_setOutputDevice(preferredDeviceId);
+ if (status == true) {
+ synchronized (this) {
+ mPreferredDevice = deviceInfo;
+ }
+ }
+ return status;
+ }
+
+ /**
+ * Returns the selected output specified by {@link #setPreferredDevice}. Note that this
+ * is not guaranteed to correspond to the actual device being used for playback.
+ */
+ @Override
+ public AudioDeviceInfo getPreferredDevice() {
+ synchronized (this) {
+ return mPreferredDevice;
+ }
+ }
+
+ /**
+ * Returns an {@link AudioDeviceInfo} identifying the current routing of this MediaPlayer
+ * Note: The query is only valid if the MediaPlayer is currently playing.
+ * If the player is not playing, the returned device can be null or correspond to previously
+ * selected device when the player was last active.
+ */
+ @Override
+ public AudioDeviceInfo getRoutedDevice() {
+ int deviceId = native_getRoutedDeviceId();
+ if (deviceId == 0) {
+ return null;
+ }
+ AudioDeviceInfo[] devices =
+ AudioManager.getDevicesStatic(AudioManager.GET_DEVICES_OUTPUTS);
+ for (int i = 0; i < devices.length; i++) {
+ if (devices[i].getId() == deviceId) {
+ return devices[i];
+ }
+ }
+ return null;
+ }
+
+ /*
+ * Call BEFORE adding a routing callback handler or AFTER removing a routing callback handler.
+ */
+ private void enableNativeRoutingCallbacksLocked(boolean enabled) {
+ if (mRoutingChangeListeners.size() == 0) {
+ native_enableDeviceCallback(enabled);
+ }
+ }
+
+ /**
+ * The list of AudioRouting.OnRoutingChangedListener interfaces added (with
+ * {@link #addOnRoutingChangedListener(android.media.AudioRouting.OnRoutingChangedListener, Handler)}
+ * by an app to receive (re)routing notifications.
+ */
+ @GuardedBy("mRoutingChangeListeners")
+ private ArrayMap<AudioRouting.OnRoutingChangedListener,
+ NativeRoutingEventHandlerDelegate> mRoutingChangeListeners = new ArrayMap<>();
+
+ /**
+ * Adds an {@link AudioRouting.OnRoutingChangedListener} to receive notifications of routing
+ * changes on this MediaPlayer.
+ * @param listener The {@link AudioRouting.OnRoutingChangedListener} interface to receive
+ * notifications of rerouting events.
+ * @param handler Specifies the {@link Handler} object for the thread on which to execute
+ * the callback. If <code>null</code>, the handler on the main looper will be used.
+ */
+ @Override
+ public void addOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener,
+ Handler handler) {
+ synchronized (mRoutingChangeListeners) {
+ if (listener != null && !mRoutingChangeListeners.containsKey(listener)) {
+ enableNativeRoutingCallbacksLocked(true);
+ mRoutingChangeListeners.put(
+ listener, new NativeRoutingEventHandlerDelegate(this, listener,
+ handler != null ? handler : mEventHandler));
+ }
+ }
+ }
+
+ /**
+ * Removes an {@link AudioRouting.OnRoutingChangedListener} which has been previously added
+ * to receive rerouting notifications.
+ * @param listener The previously added {@link AudioRouting.OnRoutingChangedListener} interface
+ * to remove.
+ */
+ @Override
+ public void removeOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener) {
+ synchronized (mRoutingChangeListeners) {
+ if (mRoutingChangeListeners.containsKey(listener)) {
+ mRoutingChangeListeners.remove(listener);
+ enableNativeRoutingCallbacksLocked(false);
+ }
+ }
+ }
+
+ private native final boolean native_setOutputDevice(int deviceId);
+ private native final int native_getRoutedDeviceId();
+ private native final void native_enableDeviceCallback(boolean enabled);
+
/**
* Set the low-level power management behavior for this MediaPlayer. This
* can be used when the MediaPlayer is not playing through a SurfaceHolder
@@ -1546,21 +1669,9 @@ public class MediaPlayer extends PlayerBase
public native boolean isPlaying();
/**
- * Gets the default buffering management params.
- * Calling it only after {@code setDataSource} has been called.
- * Each type of data source might have different set of default params.
- *
- * @return the default buffering management params supported by the source component.
- * @throws IllegalStateException if the internal player engine has not been
- * initialized, or {@code setDataSource} has not been called.
- * @hide
- */
- @NonNull
- public native BufferingParams getDefaultBufferingParams();
-
- /**
* Gets the current buffering management params used by the source component.
* Calling it only after {@code setDataSource} has been called.
+ * Each type of data source might have different set of default params.
*
* @return the current buffering management params used by the source component.
* @throws IllegalStateException if the internal player engine has not been
@@ -1575,8 +1686,7 @@ public class MediaPlayer extends PlayerBase
* The object sets its internal BufferingParams to the input, except that the input is
* invalid or not supported.
* Call it only after {@code setDataSource} has been called.
- * Users should only use supported mode returned by {@link #getDefaultBufferingParams()}
- * or its downsized version as described in {@link BufferingParams}.
+ * The input is a hint to MediaPlayer.
*
* @param params the buffering management params.
*
@@ -2072,6 +2182,20 @@ public class MediaPlayer extends PlayerBase
private native void _reset();
/**
+ * Set up a timer for {@link #TimeProvider}. {@link #TimeProvider} will be
+ * notified when the presentation time reaches (becomes greater than or equal to)
+ * the value specified.
+ *
+ * @param mediaTimeUs presentation time to get timed event callback at
+ * @hide
+ */
+ public void notifyAt(long mediaTimeUs) {
+ _notifyAt(mediaTimeUs);
+ }
+
+ private native void _notifyAt(long mediaTimeUs);
+
+ /**
* Sets the audio stream type for this MediaPlayer. See {@link AudioManager}
* for a list of stream types. Must call this method before prepare() or
* prepareAsync() in order for the target stream type to become effective
@@ -3155,12 +3279,14 @@ public class MediaPlayer extends PlayerBase
private static final int MEDIA_PAUSED = 7;
private static final int MEDIA_STOPPED = 8;
private static final int MEDIA_SKIPPED = 9;
+ private static final int MEDIA_NOTIFY_TIME = 98;
private static final int MEDIA_TIMED_TEXT = 99;
private static final int MEDIA_ERROR = 100;
private static final int MEDIA_INFO = 200;
private static final int MEDIA_SUBTITLE_DATA = 201;
private static final int MEDIA_META_DATA = 202;
private static final int MEDIA_DRM_INFO = 210;
+ private static final int MEDIA_AUDIO_ROUTING_CHANGED = 10000;
private TimeProvider mTimeProvider;
@@ -3345,6 +3471,14 @@ public class MediaPlayer extends PlayerBase
}
// No real default action so far.
return;
+
+ case MEDIA_NOTIFY_TIME:
+ TimeProvider timeProvider = mTimeProvider;
+ if (timeProvider != null) {
+ timeProvider.onNotifyTime();
+ }
+ return;
+
case MEDIA_TIMED_TEXT:
OnTimedTextListener onTimedTextListener = mOnTimedTextListener;
if (onTimedTextListener == null)
@@ -3391,6 +3525,16 @@ public class MediaPlayer extends PlayerBase
case MEDIA_NOP: // interface test message - ignore
break;
+ case MEDIA_AUDIO_ROUTING_CHANGED:
+ AudioManager.resetAudioPortGeneration();
+ synchronized (mRoutingChangeListeners) {
+ for (NativeRoutingEventHandlerDelegate delegate
+ : mRoutingChangeListeners.values()) {
+ delegate.notifyClient();
+ }
+ }
+ return;
+
default:
Log.e(TAG, "Unknown message type " + msg.what);
return;
@@ -5144,19 +5288,16 @@ public class MediaPlayer extends PlayerBase
private boolean mStopped = true;
private boolean mBuffering;
private long mLastReportedTime;
- private long mTimeAdjustment;
// since we are expecting only a handful listeners per stream, there is
// no need for log(N) search performance
private MediaTimeProvider.OnMediaTimeListener mListeners[];
private long mTimes[];
- private long mLastNanoTime;
private Handler mEventHandler;
private boolean mRefresh = false;
private boolean mPausing = false;
private boolean mSeeking = false;
private static final int NOTIFY = 1;
private static final int NOTIFY_TIME = 0;
- private static final int REFRESH_AND_NOTIFY_TIME = 1;
private static final int NOTIFY_STOP = 2;
private static final int NOTIFY_SEEK = 3;
private static final int NOTIFY_TRACK_DATA = 4;
@@ -5188,13 +5329,11 @@ public class MediaPlayer extends PlayerBase
mListeners = new MediaTimeProvider.OnMediaTimeListener[0];
mTimes = new long[0];
mLastTimeUs = 0;
- mTimeAdjustment = 0;
}
private void scheduleNotification(int type, long delayUs) {
// ignore time notifications until seek is handled
- if (mSeeking &&
- (type == NOTIFY_TIME || type == REFRESH_AND_NOTIFY_TIME)) {
+ if (mSeeking && type == NOTIFY_TIME) {
return;
}
@@ -5221,6 +5360,14 @@ public class MediaPlayer extends PlayerBase
}
/** @hide */
+ public void onNotifyTime() {
+ synchronized (this) {
+ if (DEBUG) Log.d(TAG, "onNotifyTime: ");
+ scheduleNotification(NOTIFY_TIME, 0 /* delay */);
+ }
+ }
+
+ /** @hide */
public void onPaused(boolean paused) {
synchronized(this) {
if (DEBUG) Log.d(TAG, "onPaused: " + paused);
@@ -5231,7 +5378,7 @@ public class MediaPlayer extends PlayerBase
} else {
mPausing = paused; // special handling if player disappeared
mSeeking = false;
- scheduleNotification(REFRESH_AND_NOTIFY_TIME, 0 /* delay */);
+ scheduleNotification(NOTIFY_TIME, 0 /* delay */);
}
}
}
@@ -5241,7 +5388,7 @@ public class MediaPlayer extends PlayerBase
synchronized (this) {
if (DEBUG) Log.d(TAG, "onBuffering: " + buffering);
mBuffering = buffering;
- scheduleNotification(REFRESH_AND_NOTIFY_TIME, 0 /* delay */);
+ scheduleNotification(NOTIFY_TIME, 0 /* delay */);
}
}
@@ -5438,7 +5585,7 @@ public class MediaPlayer extends PlayerBase
if (nextTimeUs > nowUs && !mPaused) {
// schedule callback at nextTimeUs
if (DEBUG) Log.d(TAG, "scheduling for " + nextTimeUs + " and " + nowUs);
- scheduleNotification(NOTIFY_TIME, nextTimeUs - nowUs);
+ mPlayer.notifyAt(nextTimeUs);
} else {
mEventHandler.removeMessages(NOTIFY);
// no more callbacks
@@ -5449,25 +5596,6 @@ public class MediaPlayer extends PlayerBase
}
}
- private long getEstimatedTime(long nanoTime, boolean monotonic) {
- if (mPaused) {
- mLastReportedTime = mLastTimeUs + mTimeAdjustment;
- } else {
- long timeSinceRead = (nanoTime - mLastNanoTime) / 1000;
- mLastReportedTime = mLastTimeUs + timeSinceRead;
- if (mTimeAdjustment > 0) {
- long adjustment =
- mTimeAdjustment - timeSinceRead / TIME_ADJUSTMENT_RATE;
- if (adjustment <= 0) {
- mTimeAdjustment = 0;
- } else {
- mLastReportedTime += adjustment;
- }
- }
- }
- return mLastReportedTime;
- }
-
public long getCurrentTimeUs(boolean refreshTime, boolean monotonic)
throws IllegalStateException {
synchronized (this) {
@@ -5477,42 +5605,38 @@ public class MediaPlayer extends PlayerBase
return mLastReportedTime;
}
- long nanoTime = System.nanoTime();
- if (refreshTime ||
- nanoTime >= mLastNanoTime + MAX_NS_WITHOUT_POSITION_CHECK) {
- try {
- mLastTimeUs = mPlayer.getCurrentPosition() * 1000L;
- mPaused = !mPlayer.isPlaying() || mBuffering;
- if (DEBUG) Log.v(TAG, (mPaused ? "paused" : "playing") + " at " + mLastTimeUs);
- } catch (IllegalStateException e) {
- if (mPausing) {
- // if we were pausing, get last estimated timestamp
- mPausing = false;
- getEstimatedTime(nanoTime, monotonic);
- mPaused = true;
- if (DEBUG) Log.d(TAG, "illegal state, but pausing: estimating at " + mLastReportedTime);
- return mLastReportedTime;
+ try {
+ mLastTimeUs = mPlayer.getCurrentPosition() * 1000L;
+ mPaused = !mPlayer.isPlaying() || mBuffering;
+ if (DEBUG) Log.v(TAG, (mPaused ? "paused" : "playing") + " at " + mLastTimeUs);
+ } catch (IllegalStateException e) {
+ if (mPausing) {
+ // if we were pausing, get last estimated timestamp
+ mPausing = false;
+ if (!monotonic || mLastReportedTime < mLastTimeUs) {
+ mLastReportedTime = mLastTimeUs;
}
- // TODO get time when prepared
- throw e;
+ mPaused = true;
+ if (DEBUG) Log.d(TAG, "illegal state, but pausing: estimating at " + mLastReportedTime);
+ return mLastReportedTime;
}
- mLastNanoTime = nanoTime;
- if (monotonic && mLastTimeUs < mLastReportedTime) {
- /* have to adjust time */
- mTimeAdjustment = mLastReportedTime - mLastTimeUs;
- if (mTimeAdjustment > 1000000) {
- // schedule seeked event if time jumped significantly
- // TODO: do this properly by introducing an exception
- mStopped = false;
- mSeeking = true;
- scheduleNotification(NOTIFY_SEEK, 0 /* delay */);
- }
- } else {
- mTimeAdjustment = 0;
+ // TODO get time when prepared
+ throw e;
+ }
+ if (monotonic && mLastTimeUs < mLastReportedTime) {
+ /* have to adjust time */
+ if (mLastReportedTime - mLastTimeUs > 1000000) {
+ // schedule seeked event if time jumped significantly
+ // TODO: do this properly by introducing an exception
+ mStopped = false;
+ mSeeking = true;
+ scheduleNotification(NOTIFY_SEEK, 0 /* delay */);
}
+ } else {
+ mLastReportedTime = mLastTimeUs;
}
- return getEstimatedTime(nanoTime, monotonic);
+ return mLastReportedTime;
}
}
@@ -5526,9 +5650,6 @@ public class MediaPlayer extends PlayerBase
if (msg.what == NOTIFY) {
switch (msg.arg1) {
case NOTIFY_TIME:
- notifyTimedEvent(false /* refreshTime */);
- break;
- case REFRESH_AND_NOTIFY_TIME:
notifyTimedEvent(true /* refreshTime */);
break;
case NOTIFY_STOP:
diff --git a/media/java/android/media/MediaRecorder.java b/media/java/android/media/MediaRecorder.java
index 59a124fa434f..3c49b80b4b5e 100644
--- a/media/java/android/media/MediaRecorder.java
+++ b/media/java/android/media/MediaRecorder.java
@@ -25,6 +25,7 @@ import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.os.PersistableBundle;
+import android.util.ArrayMap;
import android.util.Log;
import android.view.Surface;
@@ -34,6 +35,8 @@ import java.io.IOException;
import java.io.RandomAccessFile;
import java.lang.ref.WeakReference;
+import com.android.internal.annotations.GuardedBy;
+
/**
* Used to record audio and video. The recording control is based on a
* simple state machine (see below).
@@ -76,7 +79,7 @@ import java.lang.ref.WeakReference;
* <a href="{@docRoot}guide/topics/media/audio-capture.html">Audio Capture</a> developer guide.</p>
* </div>
*/
-public class MediaRecorder
+public class MediaRecorder implements AudioRouting
{
static {
System.loadLibrary("media_jni");
@@ -917,7 +920,7 @@ public class MediaRecorder
*/
public void setNextOutputFile(File file) throws IOException
{
- RandomAccessFile f = new RandomAccessFile(file, "rws");
+ RandomAccessFile f = new RandomAccessFile(file, "rw");
try {
_setNextOutputFile(f.getFD());
} finally {
@@ -942,7 +945,7 @@ public class MediaRecorder
public void prepare() throws IllegalStateException, IOException
{
if (mPath != null) {
- RandomAccessFile file = new RandomAccessFile(mPath, "rws");
+ RandomAccessFile file = new RandomAccessFile(mPath, "rw");
try {
_setOutputFile(file.getFD());
} finally {
@@ -951,7 +954,7 @@ public class MediaRecorder
} else if (mFd != null) {
_setOutputFile(mFd);
} else if (mFile != null) {
- RandomAccessFile file = new RandomAccessFile(mFile, "rws");
+ RandomAccessFile file = new RandomAccessFile(mFile, "rw");
try {
_setOutputFile(file.getFD());
} finally {
@@ -1243,6 +1246,7 @@ public class MediaRecorder
private static final int MEDIA_RECORDER_TRACK_EVENT_INFO = 101;
private static final int MEDIA_RECORDER_TRACK_EVENT_LIST_END = 1000;
+ private static final int MEDIA_RECORDER_AUDIO_ROUTING_CHANGED = 10000;
@Override
public void handleMessage(Message msg) {
@@ -1265,6 +1269,16 @@ public class MediaRecorder
return;
+ case MEDIA_RECORDER_AUDIO_ROUTING_CHANGED:
+ AudioManager.resetAudioPortGeneration();
+ synchronized (mRoutingChangeListeners) {
+ for (NativeRoutingEventHandlerDelegate delegate
+ : mRoutingChangeListeners.values()) {
+ delegate.notifyClient();
+ }
+ }
+ return;
+
default:
Log.e(TAG, "Unknown message type " + msg.what);
return;
@@ -1272,6 +1286,155 @@ public class MediaRecorder
}
}
+ //--------------------------------------------------------------------------
+ // Explicit Routing
+ //--------------------
+ private AudioDeviceInfo mPreferredDevice = null;
+
+ /**
+ * Specifies an audio device (via an {@link AudioDeviceInfo} object) to route
+ * the input from this MediaRecorder.
+ * @param deviceInfo The {@link AudioDeviceInfo} specifying the audio source.
+ * If deviceInfo is null, default routing is restored.
+ * @return true if succesful, false if the specified {@link AudioDeviceInfo} is non-null and
+ * does not correspond to a valid audio input device.
+ */
+ @Override
+ public boolean setPreferredDevice(AudioDeviceInfo deviceInfo) {
+ if (deviceInfo != null && !deviceInfo.isSource()) {
+ return false;
+ }
+ int preferredDeviceId = deviceInfo != null ? deviceInfo.getId() : 0;
+ boolean status = native_setInputDevice(preferredDeviceId);
+ if (status == true) {
+ synchronized (this) {
+ mPreferredDevice = deviceInfo;
+ }
+ }
+ return status;
+ }
+
+ /**
+ * Returns the selected input device specified by {@link #setPreferredDevice}. Note that this
+ * is not guaranteed to correspond to the actual device being used for recording.
+ */
+ @Override
+ public AudioDeviceInfo getPreferredDevice() {
+ synchronized (this) {
+ return mPreferredDevice;
+ }
+ }
+
+ /**
+ * Returns an {@link AudioDeviceInfo} identifying the current routing of this MediaRecorder
+ * Note: The query is only valid if the MediaRecorder is currently recording.
+ * If the recorder is not recording, the returned device can be null or correspond to previously
+ * selected device when the recorder was last active.
+ */
+ @Override
+ public AudioDeviceInfo getRoutedDevice() {
+ int deviceId = native_getRoutedDeviceId();
+ if (deviceId == 0) {
+ return null;
+ }
+ AudioDeviceInfo[] devices =
+ AudioManager.getDevicesStatic(AudioManager.GET_DEVICES_INPUTS);
+ for (int i = 0; i < devices.length; i++) {
+ if (devices[i].getId() == deviceId) {
+ return devices[i];
+ }
+ }
+ return null;
+ }
+
+ /*
+ * Call BEFORE adding a routing callback handler or AFTER removing a routing callback handler.
+ */
+ private void enableNativeRoutingCallbacksLocked(boolean enabled) {
+ if (mRoutingChangeListeners.size() == 0) {
+ native_enableDeviceCallback(enabled);
+ }
+ }
+
+ /**
+ * The list of AudioRouting.OnRoutingChangedListener interfaces added (with
+ * {@link #addOnRoutingChangedListener(android.media.AudioRouting.OnRoutingChangedListener, Handler)}
+ * by an app to receive (re)routing notifications.
+ */
+ @GuardedBy("mRoutingChangeListeners")
+ private ArrayMap<AudioRouting.OnRoutingChangedListener,
+ NativeRoutingEventHandlerDelegate> mRoutingChangeListeners = new ArrayMap<>();
+
+ /**
+ * Adds an {@link AudioRouting.OnRoutingChangedListener} to receive notifications of routing
+ * changes on this MediaRecorder.
+ * @param listener The {@link AudioRouting.OnRoutingChangedListener} interface to receive
+ * notifications of rerouting events.
+ * @param handler Specifies the {@link Handler} object for the thread on which to execute
+ * the callback. If <code>null</code>, the handler on the main looper will be used.
+ */
+ @Override
+ public void addOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener,
+ Handler handler) {
+ synchronized (mRoutingChangeListeners) {
+ if (listener != null && !mRoutingChangeListeners.containsKey(listener)) {
+ enableNativeRoutingCallbacksLocked(true);
+ mRoutingChangeListeners.put(
+ listener, new NativeRoutingEventHandlerDelegate(this, listener, handler));
+ }
+ }
+ }
+
+ /**
+ * Removes an {@link AudioRouting.OnRoutingChangedListener} which has been previously added
+ * to receive rerouting notifications.
+ * @param listener The previously added {@link AudioRouting.OnRoutingChangedListener} interface
+ * to remove.
+ */
+ @Override
+ public void removeOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener) {
+ synchronized (mRoutingChangeListeners) {
+ if (mRoutingChangeListeners.containsKey(listener)) {
+ mRoutingChangeListeners.remove(listener);
+ enableNativeRoutingCallbacksLocked(false);
+ }
+ }
+ }
+
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate listener
+ * (potentially) handled in a different thread
+ */
+ private class NativeRoutingEventHandlerDelegate {
+ private MediaRecorder mMediaRecorder;
+ private AudioRouting.OnRoutingChangedListener mOnRoutingChangedListener;
+ private Handler mHandler;
+
+ NativeRoutingEventHandlerDelegate(final MediaRecorder mediaRecorder,
+ final AudioRouting.OnRoutingChangedListener listener, Handler handler) {
+ mMediaRecorder = mediaRecorder;
+ mOnRoutingChangedListener = listener;
+ mHandler = handler != null ? handler : mEventHandler;
+ }
+
+ void notifyClient() {
+ if (mHandler != null) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (mOnRoutingChangedListener != null) {
+ mOnRoutingChangedListener.onRoutingChanged(mMediaRecorder);
+ }
+ }
+ });
+ }
+ }
+ }
+
+ private native final boolean native_setInputDevice(int deviceId);
+ private native final int native_getRoutedDeviceId();
+ private native final void native_enableDeviceCallback(boolean enabled);
+
/**
* Called from native code when an interesting event happens. This method
* just uses the EventHandler system to post the event back to the main app thread.
diff --git a/media/java/android/media/MediaRouter.java b/media/java/android/media/MediaRouter.java
index b4fff4839e9c..70ab8632a889 100644
--- a/media/java/android/media/MediaRouter.java
+++ b/media/java/android/media/MediaRouter.java
@@ -194,8 +194,10 @@ public class MediaRouter {
name = com.android.internal.R.string.default_audio_route_name_headphones;
} else if ((newRoutes.mainType & AudioRoutesInfo.MAIN_DOCK_SPEAKERS) != 0) {
name = com.android.internal.R.string.default_audio_route_name_dock_speakers;
- } else if ((newRoutes.mainType & AudioRoutesInfo.MAIN_HDMI) != 0) {
- name = com.android.internal.R.string.default_media_route_name_hdmi;
+ } else if ((newRoutes.mainType&AudioRoutesInfo.MAIN_HDMI) != 0) {
+ name = com.android.internal.R.string.default_audio_route_name_hdmi;
+ } else if ((newRoutes.mainType&AudioRoutesInfo.MAIN_USB) != 0) {
+ name = com.android.internal.R.string.default_audio_route_name_usb;
} else {
name = com.android.internal.R.string.default_audio_route_name;
}
diff --git a/media/java/android/media/NativeRoutingEventHandlerDelegate.java b/media/java/android/media/NativeRoutingEventHandlerDelegate.java
new file mode 100644
index 000000000000..9a6baf17e860
--- /dev/null
+++ b/media/java/android/media/NativeRoutingEventHandlerDelegate.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Handler;
+
+/**
+ * Helper class {@link AudioTrack}, {@link AudioRecord}, {@link MediaPlayer} and {@link MediaRecorder}
+ * to handle the forwarding of native events to the appropriate listener
+ * (potentially) handled in a different thread.
+ * @hide
+ */
+class NativeRoutingEventHandlerDelegate {
+ private AudioRouting mAudioRouting;
+ private AudioRouting.OnRoutingChangedListener mOnRoutingChangedListener;
+ private Handler mHandler;
+
+ NativeRoutingEventHandlerDelegate(final AudioRouting audioRouting,
+ final AudioRouting.OnRoutingChangedListener listener, Handler handler) {
+ mAudioRouting = audioRouting;
+ mOnRoutingChangedListener = listener;
+ mHandler = handler;
+ }
+
+ void notifyClient() {
+ if (mHandler != null) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (mOnRoutingChangedListener != null) {
+ mOnRoutingChangedListener.onRoutingChanged(mAudioRouting);
+ }
+ }
+ });
+ }
+ }
+}
diff --git a/media/java/android/media/VolumeShaper.aidl b/media/java/android/media/VolumeShaper.aidl
deleted file mode 100644
index ecf6a8f5d0f8..000000000000
--- a/media/java/android/media/VolumeShaper.aidl
+++ /dev/null
@@ -1,21 +0,0 @@
-/*
- * Copyright 2017 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.media;
-
-parcelable VolumeShaper.Configuration;
-parcelable VolumeShaper.Operation;
-parcelable VolumeShaper.State; \ No newline at end of file
diff --git a/media/java/android/media/browse/MediaBrowser.java b/media/java/android/media/browse/MediaBrowser.java
index c9b096fb124c..2bccd884bea4 100644
--- a/media/java/android/media/browse/MediaBrowser.java
+++ b/media/java/android/media/browse/MediaBrowser.java
@@ -494,7 +494,7 @@ public final class MediaBrowser {
sub = new Subscription();
mSubscriptions.put(parentId, sub);
}
- sub.putCallback(options, callback);
+ sub.putCallback(mContext, options, callback);
// If we are connected, tell the service that we are watching. If we aren't connected,
// the service will be told when we connect.
@@ -671,7 +671,8 @@ public final class MediaBrowser {
final Subscription subscription = mSubscriptions.get(parentId);
if (subscription != null) {
// Tell the app.
- SubscriptionCallback subscriptionCallback = subscription.getCallback(options);
+ SubscriptionCallback subscriptionCallback =
+ subscription.getCallback(mContext, options);
if (subscriptionCallback != null) {
List<MediaItem> data = list == null ? null : list.getList();
if (options == null) {
@@ -1141,7 +1142,10 @@ public final class MediaBrowser {
return mCallbacks;
}
- public SubscriptionCallback getCallback(Bundle options) {
+ public SubscriptionCallback getCallback(Context context, Bundle options) {
+ if (options != null) {
+ options.setClassLoader(context.getClassLoader());
+ }
for (int i = 0; i < mOptionsList.size(); ++i) {
if (MediaBrowserUtils.areSameOptions(mOptionsList.get(i), options)) {
return mCallbacks.get(i);
@@ -1150,7 +1154,10 @@ public final class MediaBrowser {
return null;
}
- public void putCallback(Bundle options, SubscriptionCallback callback) {
+ public void putCallback(Context context, Bundle options, SubscriptionCallback callback) {
+ if (options != null) {
+ options.setClassLoader(context.getClassLoader());
+ }
for (int i = 0; i < mOptionsList.size(); ++i) {
if (MediaBrowserUtils.areSameOptions(mOptionsList.get(i), options)) {
mCallbacks.set(i, callback);
diff --git a/media/java/android/media/midi/package.html b/media/java/android/media/midi/package.html
index 8c1010d610fe..33c54900cd07 100644
--- a/media/java/android/media/midi/package.html
+++ b/media/java/android/media/midi/package.html
@@ -138,9 +138,10 @@ int numOutputs = info.getOutputPortCount();
</pre>
-<p>Note that &ldquo;input&rdquo; and &ldquo;output&rdquo; are from the standpoint of the device. So a
-synthesizer will have an &ldquo;input&rdquo; port that receives messages. A keyboard will
-have an &ldquo;output&rdquo; port that sends messages.</p>
+<p>Note that &ldquo;input&rdquo; and &ldquo;output&rdquo; directions reflect the point of view
+of the MIDI device itself, not your app.
+For example, to send MIDI notes to a synthesizer, open the synth's INPUT port.
+To receive notes from a keyboard, open the keyboard's OUTPUT port.</p>
<p>The MidiDeviceInfo has a bundle of properties.</p>
@@ -359,8 +360,10 @@ public class MidiSynthDeviceService extends MidiDeviceService {
<p>MIDI devices can be connected to Android using Bluetooth LE.</p>
<p>Before using the device, the app must scan for available BTLE devices and then allow
-the user to connect. An example program
-will be provided so look for it on the Android developer website.</p>
+the user to connect.
+See the Android developer website for an
+<a href="https://source.android.com/devices/audio/midi_test#apps" target="_blank">example
+program</a>.</p>
<h2 id=btle_location_permissions>Request Location Permission for BTLE</h2>
diff --git a/media/java/android/media/projection/MediaProjectionManager.java b/media/java/android/media/projection/MediaProjectionManager.java
index 9f2c08e5c6ae..aa0d0cc090bc 100644
--- a/media/java/android/media/projection/MediaProjectionManager.java
+++ b/media/java/android/media/projection/MediaProjectionManager.java
@@ -20,8 +20,10 @@ import android.annotation.NonNull;
import android.annotation.Nullable;
import android.annotation.SystemService;
import android.app.Activity;
+import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
+import android.content.pm.PackageManager;
import android.media.projection.IMediaProjection;
import android.os.Handler;
import android.os.IBinder;
@@ -71,8 +73,11 @@ public final class MediaProjectionManager {
*/
public Intent createScreenCaptureIntent() {
Intent i = new Intent();
- i.setClassName("com.android.systemui",
- "com.android.systemui.media.MediaProjectionPermissionActivity");
+ final ComponentName mediaProjectionPermissionDialogComponent =
+ ComponentName.unflattenFromString(mContext.getResources().getString(
+ com.android.internal.R.string
+ .config_mediaProjectionPermissionDialogComponent));
+ i.setComponent(mediaProjectionPermissionDialogComponent);
return i;
}
diff --git a/media/java/android/media/session/MediaSession.java b/media/java/android/media/session/MediaSession.java
index 1291dfb59d2c..b8184a0789b6 100644
--- a/media/java/android/media/session/MediaSession.java
+++ b/media/java/android/media/session/MediaSession.java
@@ -119,7 +119,7 @@ public final class MediaSession {
private final ISession mBinder;
private final CallbackStub mCbStub;
- private CallbackMessageHandler mCallback;
+ private CallbackMessageHandler mCallbackHandler;
private VolumeProvider mVolumeProvider;
private PlaybackState mPlaybackState;
@@ -194,24 +194,22 @@ public final class MediaSession {
*/
public void setCallback(@Nullable Callback callback, @Nullable Handler handler) {
synchronized (mLock) {
+ if (mCallbackHandler != null) {
+ // We're updating the callback, clear the session from the old one.
+ mCallbackHandler.mCallback.mSession = null;
+ mCallbackHandler.removeCallbacksAndMessages(null);
+ }
if (callback == null) {
- if (mCallback != null) {
- mCallback.mCallback.mSession = null;
- }
- mCallback = null;
+ mCallbackHandler = null;
return;
}
- if (mCallback != null) {
- // We're updating the callback, clear the session from the old one.
- mCallback.mCallback.mSession = null;
- }
if (handler == null) {
handler = new Handler();
}
callback.mSession = this;
CallbackMessageHandler msgHandler = new CallbackMessageHandler(handler.getLooper(),
callback);
- mCallback = msgHandler;
+ mCallbackHandler = msgHandler;
}
}
@@ -636,8 +634,8 @@ public final class MediaSession {
private void postToCallback(int what, Object obj, Bundle extras) {
synchronized (mLock) {
- if (mCallback != null) {
- mCallback.post(what, obj, extras);
+ if (mCallbackHandler != null) {
+ mCallbackHandler.post(what, obj, extras);
}
}
}
diff --git a/media/java/android/media/session/PlaybackState.java b/media/java/android/media/session/PlaybackState.java
index 8283c8b967e8..17d16b896679 100644
--- a/media/java/android/media/session/PlaybackState.java
+++ b/media/java/android/media/session/PlaybackState.java
@@ -17,6 +17,7 @@ package android.media.session;
import android.annotation.DrawableRes;
import android.annotation.IntDef;
+import android.annotation.LongDef;
import android.annotation.Nullable;
import android.media.RemoteControlClient;
import android.os.Bundle;
@@ -41,7 +42,7 @@ public final class PlaybackState implements Parcelable {
/**
* @hide
*/
- @IntDef(flag=true, value={ACTION_STOP, ACTION_PAUSE, ACTION_PLAY, ACTION_REWIND,
+ @LongDef(flag=true, value={ACTION_STOP, ACTION_PAUSE, ACTION_PLAY, ACTION_REWIND,
ACTION_SKIP_TO_PREVIOUS, ACTION_SKIP_TO_NEXT, ACTION_FAST_FORWARD, ACTION_SET_RATING,
ACTION_SEEK_TO, ACTION_PLAY_PAUSE, ACTION_PLAY_FROM_MEDIA_ID, ACTION_PLAY_FROM_SEARCH,
ACTION_SKIP_TO_QUEUE_ITEM, ACTION_PLAY_FROM_URI, ACTION_PREPARE,
diff --git a/media/java/android/media/tv/ITvInputHardware.aidl b/media/java/android/media/tv/ITvInputHardware.aidl
index 96223ba7bac1..94c1013a837e 100644
--- a/media/java/android/media/tv/ITvInputHardware.aidl
+++ b/media/java/android/media/tv/ITvInputHardware.aidl
@@ -40,12 +40,6 @@ interface ITvInputHardware {
void setStreamVolume(float volume);
/**
- * Dispatch key event to HDMI service. The events would be automatically converted to
- * HDMI CEC commands. If the hardware is not representing an HDMI port, this method will fail.
- */
- boolean dispatchKeyEventToHdmi(in KeyEvent event);
-
- /**
* Override default audio sink from audio policy. When override is on, it is
* TvInputService's responsibility to adjust to audio configuration change
* (for example, when the audio sink becomes unavailable or more desirable
diff --git a/media/java/android/media/tv/TvContract.java b/media/java/android/media/tv/TvContract.java
index 48fb5bffffd3..3bbc2c4e2304 100644
--- a/media/java/android/media/tv/TvContract.java
+++ b/media/java/android/media/tv/TvContract.java
@@ -703,37 +703,37 @@ public final class TvContract {
}
/**
- * Returns {@code true}, if {@code uri} is a channel URI.
+ * @return {@code true} if {@code uri} is a channel URI.
*/
- public static boolean isChannelUri(Uri uri) {
+ public static boolean isChannelUri(@NonNull Uri uri) {
return isChannelUriForTunerInput(uri) || isChannelUriForPassthroughInput(uri);
}
/**
- * Returns {@code true}, if {@code uri} is a channel URI for a tuner input.
+ * @return {@code true} if {@code uri} is a channel URI for a tuner input.
*/
- public static boolean isChannelUriForTunerInput(Uri uri) {
+ public static boolean isChannelUriForTunerInput(@NonNull Uri uri) {
return isTvUri(uri) && isTwoSegmentUriStartingWith(uri, PATH_CHANNEL);
}
/**
- * Returns {@code true}, if {@code uri} is a channel URI for a pass-through input.
+ * @return {@code true} if {@code uri} is a channel URI for a pass-through input.
*/
- public static boolean isChannelUriForPassthroughInput(Uri uri) {
+ public static boolean isChannelUriForPassthroughInput(@NonNull Uri uri) {
return isTvUri(uri) && isTwoSegmentUriStartingWith(uri, PATH_PASSTHROUGH);
}
/**
- * Returns {@code true}, if {@code uri} is a program URI.
+ * @return {@code true} if {@code uri} is a program URI.
*/
- public static boolean isProgramUri(Uri uri) {
+ public static boolean isProgramUri(@NonNull Uri uri) {
return isTvUri(uri) && isTwoSegmentUriStartingWith(uri, PATH_PROGRAM);
}
/**
- * Returns {@code true}, if {@code uri} is a recorded program URI.
+ * @return {@code true} if {@code uri} is a recorded program URI.
*/
- public static boolean isRecordedProgramUri(Uri uri) {
+ public static boolean isRecordedProgramUri(@NonNull Uri uri) {
return isTvUri(uri) && isTwoSegmentUriStartingWith(uri, PATH_RECORDED_PROGRAM);
}
@@ -1650,7 +1650,7 @@ public final class TvContract {
public static final String CONTENT_ITEM_TYPE = "vnd.android.cursor.item/channel";
/** @hide */
- @StringDef({
+ @StringDef(prefix = { "TYPE_" }, value = {
TYPE_OTHER,
TYPE_NTSC,
TYPE_PAL,
@@ -1863,7 +1863,7 @@ public final class TvContract {
public static final String TYPE_PREVIEW = "TYPE_PREVIEW";
/** @hide */
- @StringDef({
+ @StringDef(prefix = { "SERVICE_TYPE_" }, value = {
SERVICE_TYPE_OTHER,
SERVICE_TYPE_AUDIO_VIDEO,
SERVICE_TYPE_AUDIO,
@@ -1881,7 +1881,7 @@ public final class TvContract {
public static final String SERVICE_TYPE_AUDIO = "SERVICE_TYPE_AUDIO";
/** @hide */
- @StringDef({
+ @StringDef(prefix = { "VIDEO_FORMAT_" }, value = {
VIDEO_FORMAT_240P,
VIDEO_FORMAT_360P,
VIDEO_FORMAT_480I,
@@ -1930,7 +1930,7 @@ public final class TvContract {
public static final String VIDEO_FORMAT_4320P = "VIDEO_FORMAT_4320P";
/** @hide */
- @StringDef({
+ @StringDef(prefix = { "VIDEO_RESOLUTION_" }, value = {
VIDEO_RESOLUTION_SD,
VIDEO_RESOLUTION_ED,
VIDEO_RESOLUTION_HD,
diff --git a/media/java/android/media/tv/TvInputManager.java b/media/java/android/media/tv/TvInputManager.java
index 2eaea6bf623d..143182f83ace 100644
--- a/media/java/android/media/tv/TvInputManager.java
+++ b/media/java/android/media/tv/TvInputManager.java
@@ -1329,7 +1329,6 @@ public final class TvInputManager {
* Returns the list of blocked content ratings.
*
* @return the list of content ratings blocked by the user.
- * @hide
*/
@SystemApi
public List<TvContentRating> getBlockedRatings() {
@@ -1387,6 +1386,7 @@ public final class TvInputManager {
* @hide
*/
@SystemApi
+ @RequiresPermission(android.Manifest.permission.READ_CONTENT_RATING_SYSTEMS)
public List<TvContentRatingSystemInfo> getTvContentRatingSystemList() {
try {
return mService.getTvContentRatingSystemList(mUserId);
@@ -1551,6 +1551,7 @@ public final class TvInputManager {
* @hide
*/
@SystemApi
+ @RequiresPermission(android.Manifest.permission.CAPTURE_TV_INPUT)
public boolean isSingleSessionActive() {
try {
return mService.isSingleSessionActive(mUserId);
@@ -2592,12 +2593,10 @@ public final class TvInputManager {
}
}
+ /** @removed */
+ @SystemApi
public boolean dispatchKeyEventToHdmi(KeyEvent event) {
- try {
- return mInterface.dispatchKeyEventToHdmi(event);
- } catch (RemoteException e) {
- throw new RuntimeException(e);
- }
+ return false;
}
public void overrideAudioSink(int audioType, String audioAddress, int samplingRate,